Gentoo Archives: gentoo-catalyst

From: Brian Dolbec <dolsen@g.o>
To: gentoo-catalyst@l.g.o
Cc: Brian Dolbec <dolsen@g.o>
Subject: [gentoo-catalyst] [PATCH 1/4] Initial rearrangement of the python directories
Date: Sat, 14 Dec 2013 03:20:45
Message-Id: 1386991211-9296-2-git-send-email-dolsen@gentoo.org
In Reply to: [gentoo-catalyst] rewrite-on-master patches, round-2 by Brian Dolbec
1 New minimal start script, moving the original catalyst script to catalyst/main.py.
2 Add __init__.py's to modules and arch sub-pkgs.
3 Update the module loading paths for the new locations.
4 Fix catalyst_support import to new location and specify imported modules.
5 ---
6 arch/alpha.py | 75 --
7 arch/amd64.py | 83 --
8 arch/arm.py | 133 ---
9 arch/hppa.py | 40 -
10 arch/ia64.py | 16 -
11 arch/mips.py | 464 --------
12 arch/powerpc.py | 124 ---
13 arch/s390.py | 33 -
14 arch/sh.py | 116 --
15 arch/sparc.py | 42 -
16 arch/x86.py | 153 ---
17 bin/catalyst | 46 +
18 catalyst | 428 --------
19 catalyst/__init__.py | 0
20 catalyst/arch/__init__.py | 1 +
21 catalyst/arch/alpha.py | 75 ++
22 catalyst/arch/amd64.py | 83 ++
23 catalyst/arch/arm.py | 133 +++
24 catalyst/arch/hppa.py | 40 +
25 catalyst/arch/ia64.py | 16 +
26 catalyst/arch/mips.py | 464 ++++++++
27 catalyst/arch/powerpc.py | 124 +++
28 catalyst/arch/s390.py | 33 +
29 catalyst/arch/sh.py | 116 ++
30 catalyst/arch/sparc.py | 42 +
31 catalyst/arch/x86.py | 153 +++
32 catalyst/config.py | 122 +++
33 catalyst/main.py | 435 ++++++++
34 catalyst/modules/__init__.py | 1 +
35 catalyst/modules/builder.py | 20 +
36 catalyst/modules/catalyst_lock.py | 468 +++++++++
37 catalyst/modules/catalyst_support.py | 718 +++++++++++++
38 catalyst/modules/embedded_target.py | 51 +
39 catalyst/modules/generic_stage_target.py | 1691 +++++++++++++++++++++++++++++
40 catalyst/modules/generic_target.py | 11 +
41 catalyst/modules/grp_target.py | 118 +++
42 catalyst/modules/livecd_stage1_target.py | 75 ++
43 catalyst/modules/livecd_stage2_target.py | 146 +++
44 catalyst/modules/netboot2_target.py | 166 +++
45 catalyst/modules/netboot_target.py | 128 +++
46 catalyst/modules/snapshot_target.py | 91 ++
47 catalyst/modules/stage1_target.py | 96 ++
48 catalyst/modules/stage2_target.py | 62 ++
49 catalyst/modules/stage3_target.py | 31 +
50 catalyst/modules/stage4_target.py | 43 +
51 catalyst/modules/tinderbox_target.py | 44 +
52 catalyst/util.py | 14 +
53 modules/builder.py | 20 -
54 modules/catalyst/__init__.py | 0
55 modules/catalyst/config.py | 122 ---
56 modules/catalyst/util.py | 14 -
57 modules/catalyst_lock.py | 468 ---------
58 modules/catalyst_support.py | 718 -------------
59 modules/embedded_target.py | 51 -
60 modules/generic_stage_target.py | 1692 ------------------------------
61 modules/generic_target.py | 11 -
62 modules/grp_target.py | 118 ---
63 modules/livecd_stage1_target.py | 75 --
64 modules/livecd_stage2_target.py | 146 ---
65 modules/netboot2_target.py | 166 ---
66 modules/netboot_target.py | 128 ---
67 modules/snapshot_target.py | 91 --
68 modules/stage1_target.py | 96 --
69 modules/stage2_target.py | 62 --
70 modules/stage3_target.py | 31 -
71 modules/stage4_target.py | 43 -
72 modules/tinderbox_target.py | 44 -
73 67 files changed, 5857 insertions(+), 5803 deletions(-)
74 delete mode 100644 arch/alpha.py
75 delete mode 100644 arch/amd64.py
76 delete mode 100644 arch/arm.py
77 delete mode 100644 arch/hppa.py
78 delete mode 100644 arch/ia64.py
79 delete mode 100644 arch/mips.py
80 delete mode 100644 arch/powerpc.py
81 delete mode 100644 arch/s390.py
82 delete mode 100644 arch/sh.py
83 delete mode 100644 arch/sparc.py
84 delete mode 100644 arch/x86.py
85 create mode 100755 bin/catalyst
86 delete mode 100755 catalyst
87 create mode 100644 catalyst/__init__.py
88 create mode 100644 catalyst/arch/__init__.py
89 create mode 100644 catalyst/arch/alpha.py
90 create mode 100644 catalyst/arch/amd64.py
91 create mode 100644 catalyst/arch/arm.py
92 create mode 100644 catalyst/arch/hppa.py
93 create mode 100644 catalyst/arch/ia64.py
94 create mode 100644 catalyst/arch/mips.py
95 create mode 100644 catalyst/arch/powerpc.py
96 create mode 100644 catalyst/arch/s390.py
97 create mode 100644 catalyst/arch/sh.py
98 create mode 100644 catalyst/arch/sparc.py
99 create mode 100644 catalyst/arch/x86.py
100 create mode 100644 catalyst/config.py
101 create mode 100644 catalyst/main.py
102 create mode 100644 catalyst/modules/__init__.py
103 create mode 100644 catalyst/modules/builder.py
104 create mode 100644 catalyst/modules/catalyst_lock.py
105 create mode 100644 catalyst/modules/catalyst_support.py
106 create mode 100644 catalyst/modules/embedded_target.py
107 create mode 100644 catalyst/modules/generic_stage_target.py
108 create mode 100644 catalyst/modules/generic_target.py
109 create mode 100644 catalyst/modules/grp_target.py
110 create mode 100644 catalyst/modules/livecd_stage1_target.py
111 create mode 100644 catalyst/modules/livecd_stage2_target.py
112 create mode 100644 catalyst/modules/netboot2_target.py
113 create mode 100644 catalyst/modules/netboot_target.py
114 create mode 100644 catalyst/modules/snapshot_target.py
115 create mode 100644 catalyst/modules/stage1_target.py
116 create mode 100644 catalyst/modules/stage2_target.py
117 create mode 100644 catalyst/modules/stage3_target.py
118 create mode 100644 catalyst/modules/stage4_target.py
119 create mode 100644 catalyst/modules/tinderbox_target.py
120 create mode 100644 catalyst/util.py
121 delete mode 100644 modules/builder.py
122 delete mode 100644 modules/catalyst/__init__.py
123 delete mode 100644 modules/catalyst/config.py
124 delete mode 100644 modules/catalyst/util.py
125 delete mode 100644 modules/catalyst_lock.py
126 delete mode 100644 modules/catalyst_support.py
127 delete mode 100644 modules/embedded_target.py
128 delete mode 100644 modules/generic_stage_target.py
129 delete mode 100644 modules/generic_target.py
130 delete mode 100644 modules/grp_target.py
131 delete mode 100644 modules/livecd_stage1_target.py
132 delete mode 100644 modules/livecd_stage2_target.py
133 delete mode 100644 modules/netboot2_target.py
134 delete mode 100644 modules/netboot_target.py
135 delete mode 100644 modules/snapshot_target.py
136 delete mode 100644 modules/stage1_target.py
137 delete mode 100644 modules/stage2_target.py
138 delete mode 100644 modules/stage3_target.py
139 delete mode 100644 modules/stage4_target.py
140 delete mode 100644 modules/tinderbox_target.py
141
142 diff --git a/arch/alpha.py b/arch/alpha.py
143 deleted file mode 100644
144 index f0fc95a..0000000
145 --- a/arch/alpha.py
146 +++ /dev/null
147 @@ -1,75 +0,0 @@
148 -
149 -import builder,os
150 -from catalyst_support import *
151 -
152 -class generic_alpha(builder.generic):
153 - "abstract base class for all alpha builders"
154 - def __init__(self,myspec):
155 - builder.generic.__init__(self,myspec)
156 - self.settings["CHROOT"]="chroot"
157 - self.settings["CFLAGS"]="-mieee -pipe"
158 -
159 -class arch_alpha(generic_alpha):
160 - "builder class for generic alpha (ev4+)"
161 - def __init__(self,myspec):
162 - generic_alpha.__init__(self,myspec)
163 - self.settings["CFLAGS"]+=" -O2 -mcpu=ev4"
164 - self.settings["CHOST"]="alpha-unknown-linux-gnu"
165 -
166 -class arch_ev4(generic_alpha):
167 - "builder class for alpha ev4"
168 - def __init__(self,myspec):
169 - generic_alpha.__init__(self,myspec)
170 - self.settings["CFLAGS"]+=" -O2 -mcpu=ev4"
171 - self.settings["CHOST"]="alphaev4-unknown-linux-gnu"
172 -
173 -class arch_ev45(generic_alpha):
174 - "builder class for alpha ev45"
175 - def __init__(self,myspec):
176 - generic_alpha.__init__(self,myspec)
177 - self.settings["CFLAGS"]+=" -O2 -mcpu=ev45"
178 - self.settings["CHOST"]="alphaev45-unknown-linux-gnu"
179 -
180 -class arch_ev5(generic_alpha):
181 - "builder class for alpha ev5"
182 - def __init__(self,myspec):
183 - generic_alpha.__init__(self,myspec)
184 - self.settings["CFLAGS"]+=" -O2 -mcpu=ev5"
185 - self.settings["CHOST"]="alphaev5-unknown-linux-gnu"
186 -
187 -class arch_ev56(generic_alpha):
188 - "builder class for alpha ev56 (ev5 plus BWX)"
189 - def __init__(self,myspec):
190 - generic_alpha.__init__(self,myspec)
191 - self.settings["CFLAGS"]+=" -O2 -mcpu=ev56"
192 - self.settings["CHOST"]="alphaev56-unknown-linux-gnu"
193 -
194 -class arch_pca56(generic_alpha):
195 - "builder class for alpha pca56 (ev5 plus BWX & MAX)"
196 - def __init__(self,myspec):
197 - generic_alpha.__init__(self,myspec)
198 - self.settings["CFLAGS"]+=" -O2 -mcpu=pca56"
199 - self.settings["CHOST"]="alphaev56-unknown-linux-gnu"
200 -
201 -class arch_ev6(generic_alpha):
202 - "builder class for alpha ev6"
203 - def __init__(self,myspec):
204 - generic_alpha.__init__(self,myspec)
205 - self.settings["CFLAGS"]+=" -O2 -mcpu=ev6"
206 - self.settings["CHOST"]="alphaev6-unknown-linux-gnu"
207 - self.settings["HOSTUSE"]=["ev6"]
208 -
209 -class arch_ev67(generic_alpha):
210 - "builder class for alpha ev67 (ev6 plus CIX)"
211 - def __init__(self,myspec):
212 - generic_alpha.__init__(self,myspec)
213 - self.settings["CFLAGS"]+=" -O2 -mcpu=ev67"
214 - self.settings["CHOST"]="alphaev67-unknown-linux-gnu"
215 - self.settings["HOSTUSE"]=["ev6"]
216 -
217 -def register():
218 - "Inform main catalyst program of the contents of this plugin."
219 - return ({ "alpha":arch_alpha, "ev4":arch_ev4, "ev45":arch_ev45,
220 - "ev5":arch_ev5, "ev56":arch_ev56, "pca56":arch_pca56,
221 - "ev6":arch_ev6, "ev67":arch_ev67 },
222 - ("alpha", ))
223 diff --git a/arch/amd64.py b/arch/amd64.py
224 deleted file mode 100644
225 index 262b55a..0000000
226 --- a/arch/amd64.py
227 +++ /dev/null
228 @@ -1,83 +0,0 @@
229 -
230 -import builder
231 -
232 -class generic_amd64(builder.generic):
233 - "abstract base class for all amd64 builders"
234 - def __init__(self,myspec):
235 - builder.generic.__init__(self,myspec)
236 - self.settings["CHROOT"]="chroot"
237 -
238 -class arch_amd64(generic_amd64):
239 - "builder class for generic amd64 (Intel and AMD)"
240 - def __init__(self,myspec):
241 - generic_amd64.__init__(self,myspec)
242 - self.settings["CFLAGS"]="-O2 -pipe"
243 - self.settings["CHOST"]="x86_64-pc-linux-gnu"
244 - self.settings["HOSTUSE"]=["mmx","sse","sse2"]
245 -
246 -class arch_nocona(generic_amd64):
247 - "improved version of Intel Pentium 4 CPU with 64-bit extensions, MMX, SSE, SSE2 and SSE3 support"
248 - def __init__(self,myspec):
249 - generic_amd64.__init__(self,myspec)
250 - self.settings["CFLAGS"]="-O2 -march=nocona -pipe"
251 - self.settings["CHOST"]="x86_64-pc-linux-gnu"
252 - self.settings["HOSTUSE"]=["mmx","sse","sse2"]
253 -
254 -# Requires gcc 4.3 to use this class
255 -class arch_core2(generic_amd64):
256 - "Intel Core 2 CPU with 64-bit extensions, MMX, SSE, SSE2, SSE3 and SSSE3 support"
257 - def __init__(self,myspec):
258 - generic_amd64.__init__(self,myspec)
259 - self.settings["CFLAGS"]="-O2 -march=core2 -pipe"
260 - self.settings["CHOST"]="x86_64-pc-linux-gnu"
261 - self.settings["HOSTUSE"]=["mmx","sse","sse2","ssse3"]
262 -
263 -class arch_k8(generic_amd64):
264 - "generic k8, opteron and athlon64 support"
265 - def __init__(self,myspec):
266 - generic_amd64.__init__(self,myspec)
267 - self.settings["CFLAGS"]="-O2 -march=k8 -pipe"
268 - self.settings["CHOST"]="x86_64-pc-linux-gnu"
269 - self.settings["HOSTUSE"]=["mmx","sse","sse2","3dnow"]
270 -
271 -class arch_k8_sse3(generic_amd64):
272 - "improved versions of k8, opteron and athlon64 with SSE3 support"
273 - def __init__(self,myspec):
274 - generic_amd64.__init__(self,myspec)
275 - self.settings["CFLAGS"]="-O2 -march=k8-sse3 -pipe"
276 - self.settings["CHOST"]="x86_64-pc-linux-gnu"
277 - self.settings["HOSTUSE"]=["mmx","sse","sse2","3dnow"]
278 -
279 -class arch_amdfam10(generic_amd64):
280 - "AMD Family 10h core based CPUs with x86-64 instruction set support"
281 - def __init__(self,myspec):
282 - generic_amd64.__init__(self,myspec)
283 - self.settings["CFLAGS"]="-O2 -march=amdfam10 -pipe"
284 - self.settings["CHOST"]="x86_64-pc-linux-gnu"
285 - self.settings["HOSTUSE"]=["mmx","sse","sse2","3dnow"]
286 -
287 -class arch_x32(generic_amd64):
288 - "builder class for generic x32 (Intel and AMD)"
289 - def __init__(self,myspec):
290 - generic_amd64.__init__(self,myspec)
291 - self.settings["CFLAGS"]="-O2 -pipe"
292 - self.settings["CHOST"]="x86_64-pc-linux-gnux32"
293 - self.settings["HOSTUSE"]=["mmx","sse","sse2"]
294 -
295 -def register():
296 - "inform main catalyst program of the contents of this plugin"
297 - return ({
298 - "amd64" : arch_amd64,
299 - "k8" : arch_k8,
300 - "opteron" : arch_k8,
301 - "athlon64" : arch_k8,
302 - "athlonfx" : arch_k8,
303 - "nocona" : arch_nocona,
304 - "core2" : arch_core2,
305 - "k8-sse3" : arch_k8_sse3,
306 - "opteron-sse3" : arch_k8_sse3,
307 - "athlon64-sse3" : arch_k8_sse3,
308 - "amdfam10" : arch_amdfam10,
309 - "barcelona" : arch_amdfam10,
310 - "x32" : arch_x32,
311 - }, ("x86_64","amd64","nocona"))
312 diff --git a/arch/arm.py b/arch/arm.py
313 deleted file mode 100644
314 index 2de3942..0000000
315 --- a/arch/arm.py
316 +++ /dev/null
317 @@ -1,133 +0,0 @@
318 -
319 -import builder,os
320 -from catalyst_support import *
321 -
322 -class generic_arm(builder.generic):
323 - "Abstract base class for all arm (little endian) builders"
324 - def __init__(self,myspec):
325 - builder.generic.__init__(self,myspec)
326 - self.settings["CHROOT"]="chroot"
327 - self.settings["CFLAGS"]="-O2 -pipe"
328 -
329 -class generic_armeb(builder.generic):
330 - "Abstract base class for all arm (big endian) builders"
331 - def __init__(self,myspec):
332 - builder.generic.__init__(self,myspec)
333 - self.settings["CHROOT"]="chroot"
334 - self.settings["CFLAGS"]="-O2 -pipe"
335 -
336 -class arch_arm(generic_arm):
337 - "Builder class for arm (little endian) target"
338 - def __init__(self,myspec):
339 - generic_arm.__init__(self,myspec)
340 - self.settings["CHOST"]="arm-unknown-linux-gnu"
341 -
342 -class arch_armeb(generic_armeb):
343 - "Builder class for arm (big endian) target"
344 - def __init__(self,myspec):
345 - generic_armeb.__init__(self,myspec)
346 - self.settings["CHOST"]="armeb-unknown-linux-gnu"
347 -
348 -class arch_armv4l(generic_arm):
349 - "Builder class for armv4l target"
350 - def __init__(self,myspec):
351 - generic_arm.__init__(self,myspec)
352 - self.settings["CHOST"]="armv4l-unknown-linux-gnu"
353 - self.settings["CFLAGS"]+=" -march=armv4"
354 -
355 -class arch_armv4tl(generic_arm):
356 - "Builder class for armv4tl target"
357 - def __init__(self,myspec):
358 - generic_arm.__init__(self,myspec)
359 - self.settings["CHOST"]="armv4tl-softfloat-linux-gnueabi"
360 - self.settings["CFLAGS"]+=" -march=armv4t"
361 -
362 -class arch_armv5tl(generic_arm):
363 - "Builder class for armv5tl target"
364 - def __init__(self,myspec):
365 - generic_arm.__init__(self,myspec)
366 - self.settings["CHOST"]="armv5tl-softfloat-linux-gnueabi"
367 - self.settings["CFLAGS"]+=" -march=armv5t"
368 -
369 -class arch_armv5tel(generic_arm):
370 - "Builder class for armv5tel target"
371 - def __init__(self,myspec):
372 - generic_arm.__init__(self,myspec)
373 - self.settings["CHOST"]="armv5tel-softfloat-linux-gnueabi"
374 - self.settings["CFLAGS"]+=" -march=armv5te"
375 -
376 -class arch_armv5tejl(generic_arm):
377 - "Builder class for armv5tejl target"
378 - def __init__(self,myspec):
379 - generic_arm.__init__(self,myspec)
380 - self.settings["CHOST"]="armv5tejl-softfloat-linux-gnueabi"
381 - self.settings["CFLAGS"]+=" -march=armv5te"
382 -
383 -class arch_armv6j(generic_arm):
384 - "Builder class for armv6j target"
385 - def __init__(self,myspec):
386 - generic_arm.__init__(self,myspec)
387 - self.settings["CHOST"]="armv6j-softfp-linux-gnueabi"
388 - self.settings["CFLAGS"]+=" -march=armv6j -mfpu=vfp -mfloat-abi=softfp"
389 -
390 -class arch_armv6z(generic_arm):
391 - "Builder class for armv6z target"
392 - def __init__(self,myspec):
393 - generic_arm.__init__(self,myspec)
394 - self.settings["CHOST"]="armv6z-softfp-linux-gnueabi"
395 - self.settings["CFLAGS"]+=" -march=armv6z -mfpu=vfp -mfloat-abi=softfp"
396 -
397 -class arch_armv6zk(generic_arm):
398 - "Builder class for armv6zk target"
399 - def __init__(self,myspec):
400 - generic_arm.__init__(self,myspec)
401 - self.settings["CHOST"]="armv6zk-softfp-linux-gnueabi"
402 - self.settings["CFLAGS"]+=" -march=armv6zk -mfpu=vfp -mfloat-abi=softfp"
403 -
404 -class arch_armv7a(generic_arm):
405 - "Builder class for armv7a target"
406 - def __init__(self,myspec):
407 - generic_arm.__init__(self,myspec)
408 - self.settings["CHOST"]="armv7a-softfp-linux-gnueabi"
409 - self.settings["CFLAGS"]+=" -march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=softfp"
410 -
411 -class arch_armv6j_hardfp(generic_arm):
412 - "Builder class for armv6j hardfloat target, needs >=gcc-4.5"
413 - def __init__(self,myspec):
414 - generic_arm.__init__(self,myspec)
415 - self.settings["CHOST"]="armv6j-hardfloat-linux-gnueabi"
416 - self.settings["CFLAGS"]+=" -march=armv6j -mfpu=vfp -mfloat-abi=hard"
417 -
418 -class arch_armv7a_hardfp(generic_arm):
419 - "Builder class for armv7a hardfloat target, needs >=gcc-4.5"
420 - def __init__(self,myspec):
421 - generic_arm.__init__(self,myspec)
422 - self.settings["CHOST"]="armv7a-hardfloat-linux-gnueabi"
423 - self.settings["CFLAGS"]+=" -march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=hard"
424 -
425 -class arch_armv5teb(generic_armeb):
426 - "Builder class for armv5teb (XScale) target"
427 - def __init__(self,myspec):
428 - generic_armeb.__init__(self,myspec)
429 - self.settings["CFLAGS"]+=" -mcpu=xscale"
430 - self.settings["CHOST"]="armv5teb-softfloat-linux-gnueabi"
431 -
432 -def register():
433 - "Inform main catalyst program of the contents of this plugin."
434 - return ({
435 - "arm" : arch_arm,
436 - "armv4l" : arch_armv4l,
437 - "armv4tl": arch_armv4tl,
438 - "armv5tl": arch_armv5tl,
439 - "armv5tel": arch_armv5tel,
440 - "armv5tejl": arch_armv5tejl,
441 - "armv6j" : arch_armv6j,
442 - "armv6z" : arch_armv6z,
443 - "armv6zk" : arch_armv6zk,
444 - "armv7a" : arch_armv7a,
445 - "armv6j_hardfp" : arch_armv6j_hardfp,
446 - "armv7a_hardfp" : arch_armv7a_hardfp,
447 - "armeb" : arch_armeb,
448 - "armv5teb" : arch_armv5teb
449 - }, ("arm", "armv4l", "armv4tl", "armv5tl", "armv5tel", "armv5tejl", "armv6l",
450 -"armv7l", "armeb", "armv5teb") )
451 diff --git a/arch/hppa.py b/arch/hppa.py
452 deleted file mode 100644
453 index f804398..0000000
454 --- a/arch/hppa.py
455 +++ /dev/null
456 @@ -1,40 +0,0 @@
457 -
458 -import builder,os
459 -from catalyst_support import *
460 -
461 -class generic_hppa(builder.generic):
462 - "Abstract base class for all hppa builders"
463 - def __init__(self,myspec):
464 - builder.generic.__init__(self,myspec)
465 - self.settings["CHROOT"]="chroot"
466 - self.settings["CFLAGS"]="-O2 -pipe"
467 - self.settings["CXXFLAGS"]="-O2 -pipe"
468 -
469 -class arch_hppa(generic_hppa):
470 - "Builder class for hppa systems"
471 - def __init__(self,myspec):
472 - generic_hppa.__init__(self,myspec)
473 - self.settings["CFLAGS"]+=" -march=1.0"
474 - self.settings["CHOST"]="hppa-unknown-linux-gnu"
475 -
476 -class arch_hppa1_1(generic_hppa):
477 - "Builder class for hppa 1.1 systems"
478 - def __init__(self,myspec):
479 - generic_hppa.__init__(self,myspec)
480 - self.settings["CFLAGS"]+=" -march=1.1"
481 - self.settings["CHOST"]="hppa1.1-unknown-linux-gnu"
482 -
483 -class arch_hppa2_0(generic_hppa):
484 - "Builder class for hppa 2.0 systems"
485 - def __init__(self,myspec):
486 - generic_hppa.__init__(self,myspec)
487 - self.settings["CFLAGS"]+=" -march=2.0"
488 - self.settings["CHOST"]="hppa2.0-unknown-linux-gnu"
489 -
490 -def register():
491 - "Inform main catalyst program of the contents of this plugin."
492 - return ({
493 - "hppa": arch_hppa,
494 - "hppa1.1": arch_hppa1_1,
495 - "hppa2.0": arch_hppa2_0
496 - }, ("parisc","parisc64","hppa","hppa64") )
497 diff --git a/arch/ia64.py b/arch/ia64.py
498 deleted file mode 100644
499 index 825af70..0000000
500 --- a/arch/ia64.py
501 +++ /dev/null
502 @@ -1,16 +0,0 @@
503 -
504 -import builder,os
505 -from catalyst_support import *
506 -
507 -class arch_ia64(builder.generic):
508 - "builder class for ia64"
509 - def __init__(self,myspec):
510 - builder.generic.__init__(self,myspec)
511 - self.settings["CHROOT"]="chroot"
512 - self.settings["CFLAGS"]="-O2 -pipe"
513 - self.settings["CFLAGS"]="-O2 -pipe"
514 - self.settings["CHOST"]="ia64-unknown-linux-gnu"
515 -
516 -def register():
517 - "Inform main catalyst program of the contents of this plugin."
518 - return ({ "ia64":arch_ia64 }, ("ia64", ))
519 diff --git a/arch/mips.py b/arch/mips.py
520 deleted file mode 100644
521 index b3730fa..0000000
522 --- a/arch/mips.py
523 +++ /dev/null
524 @@ -1,464 +0,0 @@
525 -
526 -import builder,os
527 -from catalyst_support import *
528 -
529 -class generic_mips(builder.generic):
530 - "Abstract base class for all mips builders [Big-endian]"
531 - def __init__(self,myspec):
532 - builder.generic.__init__(self,myspec)
533 - self.settings["CHROOT"]="chroot"
534 - self.settings["CHOST"]="mips-unknown-linux-gnu"
535 -
536 -class generic_mipsel(builder.generic):
537 - "Abstract base class for all mipsel builders [Little-endian]"
538 - def __init__(self,myspec):
539 - builder.generic.__init__(self,myspec)
540 - self.settings["CHROOT"]="chroot"
541 - self.settings["CHOST"]="mipsel-unknown-linux-gnu"
542 -
543 -class generic_mips64(builder.generic):
544 - "Abstract base class for all mips64 builders [Big-endian]"
545 - def __init__(self,myspec):
546 - builder.generic.__init__(self,myspec)
547 - self.settings["CHROOT"]="chroot"
548 - self.settings["CHOST"]="mips64-unknown-linux-gnu"
549 -
550 -class generic_mips64el(builder.generic):
551 - "Abstract base class for all mips64el builders [Little-endian]"
552 - def __init__(self,myspec):
553 - builder.generic.__init__(self,myspec)
554 - self.settings["CHROOT"]="chroot"
555 - self.settings["CHOST"]="mips64el-unknown-linux-gnu"
556 -
557 -class arch_mips1(generic_mips):
558 - "Builder class for MIPS I [Big-endian]"
559 - def __init__(self,myspec):
560 - generic_mips.__init__(self,myspec)
561 - self.settings["CFLAGS"]="-O2 -march=mips1 -mabi=32 -mplt -pipe"
562 -
563 -class arch_mips32(generic_mips):
564 - "Builder class for MIPS 32 [Big-endian]"
565 - def __init__(self,myspec):
566 - generic_mips.__init__(self,myspec)
567 - self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
568 -
569 -class arch_mips32_softfloat(generic_mips):
570 - "Builder class for MIPS 32 [Big-endian softfloat]"
571 - def __init__(self,myspec):
572 - generic_mips.__init__(self,myspec)
573 - self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
574 - self.settings["CHOST"]="mips-softfloat-linux-gnu"
575 -
576 -class arch_mips32r2(generic_mips):
577 - "Builder class for MIPS 32r2 [Big-endian]"
578 - def __init__(self,myspec):
579 - generic_mips.__init__(self,myspec)
580 - self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
581 -
582 -class arch_mips32r2_softfloat(generic_mips):
583 - "Builder class for MIPS 32r2 [Big-endian softfloat]"
584 - def __init__(self,myspec):
585 - generic_mips.__init__(self,myspec)
586 - self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
587 - self.settings["CHOST"]="mips-softfloat-linux-gnu"
588 -
589 -class arch_mips3(generic_mips):
590 - "Builder class for MIPS III [Big-endian]"
591 - def __init__(self,myspec):
592 - generic_mips.__init__(self,myspec)
593 - self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=32 -mplt -mfix-r4000 -mfix-r4400 -pipe"
594 -
595 -class arch_mips3_n32(generic_mips64):
596 - "Builder class for MIPS III [Big-endian N32]"
597 - def __init__(self,myspec):
598 - generic_mips64.__init__(self,myspec)
599 - self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=n32 -mplt -mfix-r4000 -mfix-r4400 -pipe"
600 -
601 -class arch_mips3_n64(generic_mips64):
602 - "Builder class for MIPS III [Big-endian N64]"
603 - def __init__(self,myspec):
604 - generic_mips64.__init__(self,myspec)
605 - self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=64 -mfix-r4000 -mfix-r4400 -pipe"
606 -
607 -class arch_mips3_multilib(generic_mips64):
608 - "Builder class for MIPS III [Big-endian multilib]"
609 - def __init__(self,myspec):
610 - generic_mips64.__init__(self,myspec)
611 - self.settings["CFLAGS"]="-O2 -march=mips3 -mplt -mfix-r4000 -mfix-r4400 -pipe"
612 -
613 -class arch_mips4(generic_mips):
614 - "Builder class for MIPS IV [Big-endian]"
615 - def __init__(self,myspec):
616 - generic_mips.__init__(self,myspec)
617 - self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=32 -mplt -pipe"
618 -
619 -class arch_mips4_n32(generic_mips64):
620 - "Builder class for MIPS IV [Big-endian N32]"
621 - def __init__(self,myspec):
622 - generic_mips64.__init__(self,myspec)
623 - self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=n32 -mplt -pipe"
624 -
625 -class arch_mips4_n64(generic_mips64):
626 - "Builder class for MIPS IV [Big-endian N64]"
627 - def __init__(self,myspec):
628 - generic_mips64.__init__(self,myspec)
629 - self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=64 -pipe"
630 -
631 -class arch_mips4_multilib(generic_mips64):
632 - "Builder class for MIPS IV [Big-endian multilib]"
633 - def __init__(self,myspec):
634 - generic_mips64.__init__(self,myspec)
635 - self.settings["CFLAGS"]="-O2 -march=mips4 -mplt -pipe"
636 -
637 -class arch_mips4_r10k(generic_mips):
638 - "Builder class for MIPS IV R10k [Big-endian]"
639 - def __init__(self,myspec):
640 - generic_mips.__init__(self,myspec)
641 - self.settings["CFLAGS"]="-O2 -march=r10k -mabi=32 -mplt -pipe"
642 -
643 -class arch_mips4_r10k_n32(generic_mips64):
644 - "Builder class for MIPS IV R10k [Big-endian N32]"
645 - def __init__(self,myspec):
646 - generic_mips64.__init__(self,myspec)
647 - self.settings["CFLAGS"]="-O2 -march=r10k -mabi=n32 -mplt -pipe"
648 -
649 -class arch_mips4_r10k_n64(generic_mips64):
650 - "Builder class for MIPS IV R10k [Big-endian N64]"
651 - def __init__(self,myspec):
652 - generic_mips64.__init__(self,myspec)
653 - self.settings["CFLAGS"]="-O2 -march=r10k -mabi=64 -pipe"
654 -
655 -class arch_mips4_r10k_multilib(generic_mips64):
656 - "Builder class for MIPS IV R10k [Big-endian multilib]"
657 - def __init__(self,myspec):
658 - generic_mips64.__init__(self,myspec)
659 - self.settings["CFLAGS"]="-O2 -march=r10k -mplt -pipe"
660 -
661 -class arch_mips64(generic_mips):
662 - "Builder class for MIPS 64 [Big-endian]"
663 - def __init__(self,myspec):
664 - generic_mips.__init__(self,myspec)
665 - self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=32 -mplt -pipe"
666 -
667 -class arch_mips64_n32(generic_mips64):
668 - "Builder class for MIPS 64 [Big-endian N32]"
669 - def __init__(self,myspec):
670 - generic_mips64.__init__(self,myspec)
671 - self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=n32 -mplt -pipe"
672 -
673 -class arch_mips64_n64(generic_mips64):
674 - "Builder class for MIPS 64 [Big-endian N64]"
675 - def __init__(self,myspec):
676 - generic_mips64.__init__(self,myspec)
677 - self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=64 -pipe"
678 -
679 -class arch_mips64_multilib(generic_mips64):
680 - "Builder class for MIPS 64 [Big-endian multilib]"
681 - def __init__(self,myspec):
682 - generic_mips64.__init__(self,myspec)
683 - self.settings["CFLAGS"]="-O2 -march=mips64 -mplt -pipe"
684 -
685 -class arch_mips64r2(generic_mips):
686 - "Builder class for MIPS 64r2 [Big-endian]"
687 - def __init__(self,myspec):
688 - generic_mips.__init__(self,myspec)
689 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=32 -mplt -pipe"
690 -
691 -class arch_mips64r2_n32(generic_mips64):
692 - "Builder class for MIPS 64r2 [Big-endian N32]"
693 - def __init__(self,myspec):
694 - generic_mips64.__init__(self,myspec)
695 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=n32 -mplt -pipe"
696 -
697 -class arch_mips64r2_n64(generic_mips64):
698 - "Builder class for MIPS 64r2 [Big-endian N64]"
699 - def __init__(self,myspec):
700 - generic_mips64.__init__(self,myspec)
701 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=64 -pipe"
702 -
703 -class arch_mips64r2_multilib(generic_mips64):
704 - "Builder class for MIPS 64r2 [Big-endian multilib]"
705 - def __init__(self,myspec):
706 - generic_mips64.__init__(self,myspec)
707 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mplt -pipe"
708 -
709 -class arch_mipsel1(generic_mipsel):
710 - "Builder class for MIPS I [Little-endian]"
711 - def __init__(self,myspec):
712 - generic_mipsel.__init__(self,myspec)
713 - self.settings["CFLAGS"]="-O2 -march=mips1 -mabi=32 -mplt -pipe"
714 -
715 -class arch_mips32el(generic_mipsel):
716 - "Builder class for MIPS 32 [Little-endian]"
717 - def __init__(self,myspec):
718 - generic_mipsel.__init__(self,myspec)
719 - self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
720 -
721 -class arch_mips32el_softfloat(generic_mipsel):
722 - "Builder class for MIPS 32 [Little-endian softfloat]"
723 - def __init__(self,myspec):
724 - generic_mipsel.__init__(self,myspec)
725 - self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
726 - self.settings["CHOST"]="mipsel-softfloat-linux-gnu"
727 -
728 -class arch_mips32r2el(generic_mipsel):
729 - "Builder class for MIPS 32r2 [Little-endian]"
730 - def __init__(self,myspec):
731 - generic_mipsel.__init__(self,myspec)
732 - self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
733 -
734 -class arch_mips32r2el_softfloat(generic_mipsel):
735 - "Builder class for MIPS 32r2 [Little-endian softfloat]"
736 - def __init__(self,myspec):
737 - generic_mipsel.__init__(self,myspec)
738 - self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
739 - self.settings["CHOST"]="mipsel-softfloat-linux-gnu"
740 -
741 -class arch_mipsel3(generic_mipsel):
742 - "Builder class for MIPS III [Little-endian]"
743 - def __init__(self,myspec):
744 - generic_mipsel.__init__(self,myspec)
745 - self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
746 -
747 -class arch_mipsel3_n32(generic_mips64el):
748 - "Builder class for MIPS III [Little-endian N32]"
749 - def __init__(self,myspec):
750 - generic_mips64el.__init__(self,myspec)
751 - self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=n32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
752 -
753 -class arch_mipsel3_n64(generic_mips64el):
754 - "Builder class for MIPS III [Little-endian N64]"
755 - def __init__(self,myspec):
756 - generic_mips64el.__init__(self,myspec)
757 - self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=64 -Wa,-mfix-loongson2f-nop -pipe"
758 -
759 -class arch_mipsel3_multilib(generic_mips64el):
760 - "Builder class for MIPS III [Little-endian multilib]"
761 - def __init__(self,myspec):
762 - generic_mips64el.__init__(self,myspec)
763 - self.settings["CFLAGS"]="-O2 -march=mips3 -mplt -Wa,-mfix-loongson2f-nop -pipe"
764 -
765 -class arch_loongson2e(generic_mipsel):
766 - "Builder class for Loongson 2E [Little-endian]"
767 - def __init__(self,myspec):
768 - generic_mipsel.__init__(self,myspec)
769 - self.settings["CFLAGS"]="-O2 -march=loongson2e -mabi=32 -mplt -pipe"
770 -
771 -class arch_loongson2e_n32(generic_mips64el):
772 - "Builder class for Loongson 2E [Little-endian N32]"
773 - def __init__(self,myspec):
774 - generic_mips64el.__init__(self,myspec)
775 - self.settings["CFLAGS"]="-O2 -march=loongson2e -mabi=n32 -mplt -pipe"
776 -
777 -class arch_loongson2e_n64(generic_mips64el):
778 - "Builder class for Loongson 2E [Little-endian N64]"
779 - def __init__(self,myspec):
780 - generic_mips64el.__init__(self,myspec)
781 - self.settings["CFLAGS"]="-O2 -march=loongson2e -mabi=64 -pipe"
782 -
783 -class arch_loongson2e_multilib(generic_mips64el):
784 - "Builder class for Loongson 2E [Little-endian multilib]"
785 - def __init__(self,myspec):
786 - generic_mips64el.__init__(self,myspec)
787 - self.settings["CFLAGS"]="-O2 -march=loongson2e -mplt -pipe"
788 -
789 -class arch_loongson2f(generic_mipsel):
790 - "Builder class for Loongson 2F [Little-endian]"
791 - def __init__(self,myspec):
792 - generic_mipsel.__init__(self,myspec)
793 - self.settings["CFLAGS"]="-O2 -march=loongson2f -mabi=32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
794 -
795 -class arch_loongson2f_n32(generic_mips64el):
796 - "Builder class for Loongson 2F [Little-endian N32]"
797 - def __init__(self,myspec):
798 - generic_mips64el.__init__(self,myspec)
799 - self.settings["CFLAGS"]="-O2 -march=loongson2f -mabi=n32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
800 -
801 -class arch_loongson2f_n64(generic_mips64el):
802 - "Builder class for Loongson 2F [Little-endian N64]"
803 - def __init__(self,myspec):
804 - generic_mips64el.__init__(self,myspec)
805 - self.settings["CFLAGS"]="-O2 -march=loongson2f -mabi=64 -Wa,-mfix-loongson2f-nop -pipe"
806 -
807 -class arch_loongson2f_multilib(generic_mips64el):
808 - "Builder class for Loongson 2F [Little-endian multilib]"
809 - def __init__(self,myspec):
810 - generic_mips64el.__init__(self,myspec)
811 - self.settings["CFLAGS"]="-O2 -march=loongson2f -mplt -Wa,-mfix-loongson2f-nop -pipe"
812 -
813 -class arch_mipsel4(generic_mipsel):
814 - "Builder class for MIPS IV [Little-endian]"
815 - def __init__(self,myspec):
816 - generic_mipsel.__init__(self,myspec)
817 - self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=32 -mplt -pipe"
818 -
819 -class arch_mipsel4_n32(generic_mips64el):
820 - "Builder class for MIPS IV [Little-endian N32]"
821 - def __init__(self,myspec):
822 - generic_mips64el.__init__(self,myspec)
823 - self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=n32 -mplt -pipe"
824 -
825 -class arch_mipsel4_n64(generic_mips64el):
826 - "Builder class for MIPS IV [Little-endian N64]"
827 - def __init__(self,myspec):
828 - generic_mips64el.__init__(self,myspec)
829 - self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=64 -pipe"
830 -
831 -class arch_mipsel4_multilib(generic_mips64el):
832 - "Builder class for MIPS IV [Little-endian multilib]"
833 - def __init__(self,myspec):
834 - generic_mips64el.__init__(self,myspec)
835 - self.settings["CFLAGS"]="-O2 -march=mips4 -mplt -pipe"
836 -
837 -class arch_mips64el(generic_mipsel):
838 - "Builder class for MIPS 64 [Little-endian]"
839 - def __init__(self,myspec):
840 - generic_mipsel.__init__(self,myspec)
841 - self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=32 -mplt -pipe"
842 -
843 -class arch_mips64el_n32(generic_mips64el):
844 - "Builder class for MIPS 64 [Little-endian N32]"
845 - def __init__(self,myspec):
846 - generic_mips64el.__init__(self,myspec)
847 - self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=n32 -mplt -pipe"
848 -
849 -class arch_mips64el_n64(generic_mips64el):
850 - "Builder class for MIPS 64 [Little-endian N64]"
851 - def __init__(self,myspec):
852 - generic_mips64el.__init__(self,myspec)
853 - self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=64 -pipe"
854 -
855 -class arch_mips64el_multilib(generic_mips64el):
856 - "Builder class for MIPS 64 [Little-endian multilib]"
857 - def __init__(self,myspec):
858 - generic_mips64el.__init__(self,myspec)
859 - self.settings["CFLAGS"]="-O2 -march=mips64 -mplt -pipe"
860 -
861 -class arch_mips64r2el(generic_mipsel):
862 - "Builder class for MIPS 64r2 [Little-endian]"
863 - def __init__(self,myspec):
864 - generic_mipsel.__init__(self,myspec)
865 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=32 -mplt -pipe"
866 -
867 -class arch_mips64r2el_n32(generic_mips64el):
868 - "Builder class for MIPS 64r2 [Little-endian N32]"
869 - def __init__(self,myspec):
870 - generic_mips64el.__init__(self,myspec)
871 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=n32 -mplt -pipe"
872 -
873 -class arch_mips64r2el_n64(generic_mips64el):
874 - "Builder class for MIPS 64r2 [Little-endian N64]"
875 - def __init__(self,myspec):
876 - generic_mips64el.__init__(self,myspec)
877 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=64 -pipe"
878 -
879 -class arch_mips64r2el_multilib(generic_mips64el):
880 - "Builder class for MIPS 64r2 [Little-endian multilib]"
881 - def __init__(self,myspec):
882 - generic_mips64el.__init__(self,myspec)
883 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mplt -pipe"
884 -
885 -class arch_loongson3a(generic_mipsel):
886 - "Builder class for Loongson 3A [Little-endian]"
887 - def __init__(self,myspec):
888 - generic_mipsel.__init__(self,myspec)
889 - self.settings["CFLAGS"]="-O2 -march=loongson3a -mabi=32 -mplt -pipe"
890 -
891 -class arch_loongson3a_n32(generic_mips64el):
892 - "Builder class for Loongson 3A [Little-endian N32]"
893 - def __init__(self,myspec):
894 - generic_mips64el.__init__(self,myspec)
895 - self.settings["CFLAGS"]="-O2 -march=loongson3a -mabi=n32 -mplt -pipe"
896 -
897 -class arch_loongson3a_n64(generic_mips64el):
898 - "Builder class for Loongson 3A [Little-endian N64]"
899 - def __init__(self,myspec):
900 - generic_mips64el.__init__(self,myspec)
901 - self.settings["CFLAGS"]="-O2 -march=loongson3a -mabi=64 -pipe"
902 -
903 -class arch_loongson3a_multilib(generic_mips64el):
904 - "Builder class for Loongson 3A [Little-endian multilib]"
905 - def __init__(self,myspec):
906 - generic_mips64el.__init__(self,myspec)
907 - self.settings["CFLAGS"]="-O2 -march=loongson3a -mplt -pipe"
908 -
909 -class arch_cobalt(generic_mipsel):
910 - "Builder class for cobalt [Little-endian]"
911 - def __init__(self,myspec):
912 - generic_mipsel.__init__(self,myspec)
913 - self.settings["CFLAGS"]="-O2 -march=r5000 -mabi=32 -mplt -pipe"
914 - self.settings["HOSTUSE"]=["cobalt"]
915 -
916 -class arch_cobalt_n32(generic_mips64el):
917 - "Builder class for cobalt [Little-endian N32]"
918 - def __init__(self,myspec):
919 - generic_mips64el.__init__(self,myspec)
920 - self.settings["CFLAGS"]="-O2 -march=r5000 -mabi=n32 -mplt -pipe"
921 - self.settings["HOSTUSE"]=["cobalt"]
922 -
923 -def register():
924 - "Inform main catalyst program of the contents of this plugin."
925 - return ({
926 - "cobalt" : arch_cobalt,
927 - "cobalt_n32" : arch_cobalt_n32,
928 - "mips" : arch_mips1,
929 - "mips1" : arch_mips1,
930 - "mips32" : arch_mips32,
931 - "mips32_softfloat" : arch_mips32_softfloat,
932 - "mips32r2" : arch_mips32r2,
933 - "mips32r2_softfloat" : arch_mips32r2_softfloat,
934 - "mips3" : arch_mips3,
935 - "mips3_n32" : arch_mips3_n32,
936 - "mips3_n64" : arch_mips3_n64,
937 - "mips3_multilib" : arch_mips3_multilib,
938 - "mips4" : arch_mips4,
939 - "mips4_n32" : arch_mips4_n32,
940 - "mips4_n64" : arch_mips4_n64,
941 - "mips4_multilib" : arch_mips4_multilib,
942 - "mips4_r10k" : arch_mips4_r10k,
943 - "mips4_r10k_n32" : arch_mips4_r10k_n32,
944 - "mips4_r10k_n64" : arch_mips4_r10k_n64,
945 - "mips4_r10k_multilib" : arch_mips4_r10k_multilib,
946 - "mips64" : arch_mips64,
947 - "mips64_n32" : arch_mips64_n32,
948 - "mips64_n64" : arch_mips64_n64,
949 - "mips64_multilib" : arch_mips64_multilib,
950 - "mips64r2" : arch_mips64r2,
951 - "mips64r2_n32" : arch_mips64r2_n32,
952 - "mips64r2_n64" : arch_mips64r2_n64,
953 - "mips64r2_multilib" : arch_mips64r2_multilib,
954 - "mipsel" : arch_mipsel1,
955 - "mipsel1" : arch_mipsel1,
956 - "mips32el" : arch_mips32el,
957 - "mips32el_softfloat" : arch_mips32el_softfloat,
958 - "mips32r2el" : arch_mips32r2el,
959 - "mips32r2el_softfloat" : arch_mips32r2el_softfloat,
960 - "mipsel3" : arch_mipsel3,
961 - "mipsel3_n32" : arch_mipsel3_n32,
962 - "mipsel3_n64" : arch_mipsel3_n64,
963 - "mipsel3_multilib" : arch_mipsel3_multilib,
964 - "mipsel4" : arch_mipsel4,
965 - "mipsel4_n32" : arch_mipsel4_n32,
966 - "mipsel4_n64" : arch_mipsel4_n64,
967 - "mipsel4_multilib" : arch_mipsel4_multilib,
968 - "mips64el" : arch_mips64el,
969 - "mips64el_n32" : arch_mips64el_n32,
970 - "mips64el_n64" : arch_mips64el_n64,
971 - "mips64el_multilib" : arch_mips64el_multilib,
972 - "mips64r2el" : arch_mips64r2el,
973 - "mips64r2el_n32" : arch_mips64r2el_n32,
974 - "mips64r2el_n64" : arch_mips64r2el_n64,
975 - "mips64r2el_multilib" : arch_mips64r2el_multilib,
976 - "loongson2e" : arch_loongson2e,
977 - "loongson2e_n32" : arch_loongson2e_n32,
978 - "loongson2e_n64" : arch_loongson2e_n64,
979 - "loongson2e_multilib" : arch_loongson2e_multilib,
980 - "loongson2f" : arch_loongson2f,
981 - "loongson2f_n32" : arch_loongson2f_n32,
982 - "loongson2f_n64" : arch_loongson2f_n64,
983 - "loongson2f_multilib" : arch_loongson2f_multilib,
984 - "loongson3a" : arch_loongson3a,
985 - "loongson3a_n32" : arch_loongson3a_n32,
986 - "loongson3a_n64" : arch_loongson3a_n64,
987 - "loongson3a_multilib" : arch_loongson3a_multilib,
988 - }, ("mips","mips64"))
989 diff --git a/arch/powerpc.py b/arch/powerpc.py
990 deleted file mode 100644
991 index e9f611b..0000000
992 --- a/arch/powerpc.py
993 +++ /dev/null
994 @@ -1,124 +0,0 @@
995 -
996 -import os,builder
997 -from catalyst_support import *
998 -
999 -class generic_ppc(builder.generic):
1000 - "abstract base class for all 32-bit powerpc builders"
1001 - def __init__(self,myspec):
1002 - builder.generic.__init__(self,myspec)
1003 - self.settings["CHOST"]="powerpc-unknown-linux-gnu"
1004 - if self.settings["buildarch"]=="ppc64":
1005 - if not os.path.exists("/bin/linux32") and not os.path.exists("/usr/bin/linux32"):
1006 - raise CatalystError,"required executable linux32 not found (\"emerge setarch\" to fix.)"
1007 - self.settings["CHROOT"]="linux32 chroot"
1008 - self.settings["crosscompile"] = False;
1009 - else:
1010 - self.settings["CHROOT"]="chroot"
1011 -
1012 -class generic_ppc64(builder.generic):
1013 - "abstract base class for all 64-bit powerpc builders"
1014 - def __init__(self,myspec):
1015 - builder.generic.__init__(self,myspec)
1016 - self.settings["CHROOT"]="chroot"
1017 -
1018 -class arch_ppc(generic_ppc):
1019 - "builder class for generic powerpc"
1020 - def __init__(self,myspec):
1021 - generic_ppc.__init__(self,myspec)
1022 - self.settings["CFLAGS"]="-O2 -mcpu=powerpc -mtune=powerpc -pipe"
1023 -
1024 -class arch_ppc64(generic_ppc64):
1025 - "builder class for generic ppc64"
1026 - def __init__(self,myspec):
1027 - generic_ppc64.__init__(self,myspec)
1028 - self.settings["CFLAGS"]="-O2 -pipe"
1029 - self.settings["CHOST"]="powerpc64-unknown-linux-gnu"
1030 -
1031 -class arch_970(arch_ppc64):
1032 - "builder class for 970 aka G5 under ppc64"
1033 - def __init__(self,myspec):
1034 - arch_ppc64.__init__(self,myspec)
1035 - self.settings["CFLAGS"]="-O2 -pipe -mcpu=970 -mtune=970"
1036 - self.settings["HOSTUSE"]=["altivec"]
1037 -
1038 -class arch_cell(arch_ppc64):
1039 - "builder class for cell under ppc64"
1040 - def __init__(self,myspec):
1041 - arch_ppc64.__init__(self,myspec)
1042 - self.settings["CFLAGS"]="-O2 -pipe -mcpu=cell -mtune=cell"
1043 - self.settings["HOSTUSE"]=["altivec","ibm"]
1044 -
1045 -class arch_g3(generic_ppc):
1046 - def __init__(self,myspec):
1047 - generic_ppc.__init__(self,myspec)
1048 - self.settings["CFLAGS"]="-O2 -mcpu=G3 -mtune=G3 -pipe"
1049 -
1050 -class arch_g4(generic_ppc):
1051 - def __init__(self,myspec):
1052 - generic_ppc.__init__(self,myspec)
1053 - self.settings["CFLAGS"]="-O2 -mcpu=G4 -mtune=G4 -maltivec -mabi=altivec -pipe"
1054 - self.settings["HOSTUSE"]=["altivec"]
1055 -
1056 -class arch_g5(generic_ppc):
1057 - def __init__(self,myspec):
1058 - generic_ppc.__init__(self,myspec)
1059 - self.settings["CFLAGS"]="-O2 -mcpu=G5 -mtune=G5 -maltivec -mabi=altivec -pipe"
1060 - self.settings["HOSTUSE"]=["altivec"]
1061 -
1062 -class arch_power(generic_ppc):
1063 - "builder class for generic power"
1064 - def __init__(self,myspec):
1065 - generic_ppc.__init__(self,myspec)
1066 - self.settings["CFLAGS"]="-O2 -mcpu=power -mtune=power -pipe"
1067 -
1068 -class arch_power_ppc(generic_ppc):
1069 - "builder class for generic powerpc/power"
1070 - def __init__(self,myspec):
1071 - generic_ppc.__init__(self,myspec)
1072 - self.settings["CFLAGS"]="-O2 -mcpu=common -mtune=common -pipe"
1073 -
1074 -class arch_power3(arch_ppc64):
1075 - "builder class for power3 under ppc64"
1076 - def __init__(self,myspec):
1077 - arch_ppc64.__init__(self,myspec)
1078 - self.settings["CFLAGS"]="-O2 -pipe -mcpu=power3 -mtune=power3"
1079 - self.settings["HOSTUSE"]=["ibm"]
1080 -
1081 -class arch_power4(arch_ppc64):
1082 - "builder class for power4 under ppc64"
1083 - def __init__(self,myspec):
1084 - arch_ppc64.__init__(self,myspec)
1085 - self.settings["CFLAGS"]="-O2 -pipe -mcpu=power4 -mtune=power4"
1086 - self.settings["HOSTUSE"]=["ibm"]
1087 -
1088 -class arch_power5(arch_ppc64):
1089 - "builder class for power5 under ppc64"
1090 - def __init__(self,myspec):
1091 - arch_ppc64.__init__(self,myspec)
1092 - self.settings["CFLAGS"]="-O2 -pipe -mcpu=power5 -mtune=power5"
1093 - self.settings["HOSTUSE"]=["ibm"]
1094 -
1095 -class arch_power6(arch_ppc64):
1096 - "builder class for power6 under ppc64"
1097 - def __init__(self,myspec):
1098 - arch_ppc64.__init__(self,myspec)
1099 - self.settings["CFLAGS"]="-O2 -pipe -mcpu=power6 -mtune=power6"
1100 - self.settings["HOSTUSE"]=["altivec","ibm"]
1101 -
1102 -def register():
1103 - "Inform main catalyst program of the contents of this plugin."
1104 - return ({
1105 - "970" : arch_970,
1106 - "cell" : arch_cell,
1107 - "g3" : arch_g3,
1108 - "g4" : arch_g4,
1109 - "g5" : arch_g5,
1110 - "power" : arch_power,
1111 - "power-ppc" : arch_power_ppc,
1112 - "power3" : arch_power3,
1113 - "power4" : arch_power4,
1114 - "power5" : arch_power5,
1115 - "power6" : arch_power6,
1116 - "ppc" : arch_ppc,
1117 - "ppc64" : arch_ppc64
1118 - }, ("ppc","ppc64","powerpc","powerpc64"))
1119 diff --git a/arch/s390.py b/arch/s390.py
1120 deleted file mode 100644
1121 index bf22f66..0000000
1122 --- a/arch/s390.py
1123 +++ /dev/null
1124 @@ -1,33 +0,0 @@
1125 -
1126 -import builder,os
1127 -from catalyst_support import *
1128 -
1129 -class generic_s390(builder.generic):
1130 - "abstract base class for all s390 builders"
1131 - def __init__(self,myspec):
1132 - builder.generic.__init__(self,myspec)
1133 - self.settings["CHROOT"]="chroot"
1134 -
1135 -class generic_s390x(builder.generic):
1136 - "abstract base class for all s390x builders"
1137 - def __init__(self,myspec):
1138 - builder.generic.__init__(self,myspec)
1139 - self.settings["CHROOT"]="chroot"
1140 -
1141 -class arch_s390(generic_s390):
1142 - "builder class for generic s390"
1143 - def __init__(self,myspec):
1144 - generic_s390.__init__(self,myspec)
1145 - self.settings["CFLAGS"]="-O2 -pipe"
1146 - self.settings["CHOST"]="s390-ibm-linux-gnu"
1147 -
1148 -class arch_s390x(generic_s390x):
1149 - "builder class for generic s390x"
1150 - def __init__(self,myspec):
1151 - generic_s390x.__init__(self,myspec)
1152 - self.settings["CFLAGS"]="-O2 -pipe"
1153 - self.settings["CHOST"]="s390x-ibm-linux-gnu"
1154 -
1155 -def register():
1156 - "Inform main catalyst program of the contents of this plugin."
1157 - return ({"s390":arch_s390,"s390x":arch_s390x}, ("s390", "s390x"))
1158 diff --git a/arch/sh.py b/arch/sh.py
1159 deleted file mode 100644
1160 index 2fc9531..0000000
1161 --- a/arch/sh.py
1162 +++ /dev/null
1163 @@ -1,116 +0,0 @@
1164 -
1165 -import builder,os
1166 -from catalyst_support import *
1167 -
1168 -class generic_sh(builder.generic):
1169 - "Abstract base class for all sh builders [Little-endian]"
1170 - def __init__(self,myspec):
1171 - builder.generic.__init__(self,myspec)
1172 - self.settings["CHROOT"]="chroot"
1173 -
1174 -class generic_sheb(builder.generic):
1175 - "Abstract base class for all sheb builders [Big-endian]"
1176 - def __init__(self,myspec):
1177 - builder.generic.__init__(self,myspec)
1178 - self.settings["CHROOT"]="chroot"
1179 -
1180 -class arch_sh(generic_sh):
1181 - "Builder class for SH [Little-endian]"
1182 - def __init__(self,myspec):
1183 - generic_sh.__init__(self,myspec)
1184 - self.settings["CFLAGS"]="-O2 -pipe"
1185 - self.settings["CHOST"]="sh-unknown-linux-gnu"
1186 -
1187 -class arch_sh2(generic_sh):
1188 - "Builder class for SH-2 [Little-endian]"
1189 - def __init__(self,myspec):
1190 - generic_sh.__init__(self,myspec)
1191 - self.settings["CFLAGS"]="-O2 -m2 -pipe"
1192 - self.settings["CHOST"]="sh2-unknown-linux-gnu"
1193 -
1194 -class arch_sh2a(generic_sh):
1195 - "Builder class for SH-2A [Little-endian]"
1196 - def __init__(self,myspec):
1197 - generic_sh.__init__(self,myspec)
1198 - self.settings["CFLAGS"]="-O2 -m2a -pipe"
1199 - self.settings["CHOST"]="sh2a-unknown-linux-gnu"
1200 -
1201 -class arch_sh3(generic_sh):
1202 - "Builder class for SH-3 [Little-endian]"
1203 - def __init__(self,myspec):
1204 - generic_sh.__init__(self,myspec)
1205 - self.settings["CFLAGS"]="-O2 -m3 -pipe"
1206 - self.settings["CHOST"]="sh3-unknown-linux-gnu"
1207 -
1208 -class arch_sh4(generic_sh):
1209 - "Builder class for SH-4 [Little-endian]"
1210 - def __init__(self,myspec):
1211 - generic_sh.__init__(self,myspec)
1212 - self.settings["CFLAGS"]="-O2 -m4 -pipe"
1213 - self.settings["CHOST"]="sh4-unknown-linux-gnu"
1214 -
1215 -class arch_sh4a(generic_sh):
1216 - "Builder class for SH-4A [Little-endian]"
1217 - def __init__(self,myspec):
1218 - generic_sh.__init__(self,myspec)
1219 - self.settings["CFLAGS"]="-O2 -m4a -pipe"
1220 - self.settings["CHOST"]="sh4a-unknown-linux-gnu"
1221 -
1222 -class arch_sheb(generic_sheb):
1223 - "Builder class for SH [Big-endian]"
1224 - def __init__(self,myspec):
1225 - generic_sheb.__init__(self,myspec)
1226 - self.settings["CFLAGS"]="-O2 -pipe"
1227 - self.settings["CHOST"]="sheb-unknown-linux-gnu"
1228 -
1229 -class arch_sh2eb(generic_sheb):
1230 - "Builder class for SH-2 [Big-endian]"
1231 - def __init__(self,myspec):
1232 - generic_sheb.__init__(self,myspec)
1233 - self.settings["CFLAGS"]="-O2 -m2 -pipe"
1234 - self.settings["CHOST"]="sh2eb-unknown-linux-gnu"
1235 -
1236 -class arch_sh2aeb(generic_sheb):
1237 - "Builder class for SH-2A [Big-endian]"
1238 - def __init__(self,myspec):
1239 - generic_sheb.__init__(self,myspec)
1240 - self.settings["CFLAGS"]="-O2 -m2a -pipe"
1241 - self.settings["CHOST"]="sh2aeb-unknown-linux-gnu"
1242 -
1243 -class arch_sh3eb(generic_sheb):
1244 - "Builder class for SH-3 [Big-endian]"
1245 - def __init__(self,myspec):
1246 - generic_sheb.__init__(self,myspec)
1247 - self.settings["CFLAGS"]="-O2 -m3 -pipe"
1248 - self.settings["CHOST"]="sh3eb-unknown-linux-gnu"
1249 -
1250 -class arch_sh4eb(generic_sheb):
1251 - "Builder class for SH-4 [Big-endian]"
1252 - def __init__(self,myspec):
1253 - generic_sheb.__init__(self,myspec)
1254 - self.settings["CFLAGS"]="-O2 -m4 -pipe"
1255 - self.settings["CHOST"]="sh4eb-unknown-linux-gnu"
1256 -
1257 -class arch_sh4aeb(generic_sheb):
1258 - "Builder class for SH-4A [Big-endian]"
1259 - def __init__(self,myspec):
1260 - generic_sheb.__init__(self,myspec)
1261 - self.settings["CFLAGS"]="-O2 -m4a -pipe"
1262 - self.settings["CHOST"]="sh4aeb-unknown-linux-gnu"
1263 -
1264 -def register():
1265 - "Inform main catalyst program of the contents of this plugin."
1266 - return ({
1267 - "sh" :arch_sh,
1268 - "sh2" :arch_sh2,
1269 - "sh2a" :arch_sh2a,
1270 - "sh3" :arch_sh3,
1271 - "sh4" :arch_sh4,
1272 - "sh4a" :arch_sh4a,
1273 - "sheb" :arch_sheb,
1274 - "sh2eb" :arch_sh2eb,
1275 - "sh2aeb" :arch_sh2aeb,
1276 - "sh3eb" :arch_sh3eb,
1277 - "sh4eb" :arch_sh4eb,
1278 - "sh4aeb" :arch_sh4aeb
1279 - }, ("sh2","sh2a","sh3","sh4","sh4a","sh2eb","sh2aeb","sh3eb","sh4eb","sh4aeb"))
1280 diff --git a/arch/sparc.py b/arch/sparc.py
1281 deleted file mode 100644
1282 index 5eb5344..0000000
1283 --- a/arch/sparc.py
1284 +++ /dev/null
1285 @@ -1,42 +0,0 @@
1286 -
1287 -import builder,os
1288 -from catalyst_support import *
1289 -
1290 -class generic_sparc(builder.generic):
1291 - "abstract base class for all sparc builders"
1292 - def __init__(self,myspec):
1293 - builder.generic.__init__(self,myspec)
1294 - if self.settings["buildarch"]=="sparc64":
1295 - if not os.path.exists("/bin/linux32") and not os.path.exists("/usr/bin/linux32"):
1296 - raise CatalystError,"required executable linux32 not found (\"emerge setarch\" to fix.)"
1297 - self.settings["CHROOT"]="linux32 chroot"
1298 - self.settings["crosscompile"] = False;
1299 - else:
1300 - self.settings["CHROOT"]="chroot"
1301 -
1302 -class generic_sparc64(builder.generic):
1303 - "abstract base class for all sparc64 builders"
1304 - def __init__(self,myspec):
1305 - builder.generic.__init__(self,myspec)
1306 - self.settings["CHROOT"]="chroot"
1307 -
1308 -class arch_sparc(generic_sparc):
1309 - "builder class for generic sparc (sun4cdm)"
1310 - def __init__(self,myspec):
1311 - generic_sparc.__init__(self,myspec)
1312 - self.settings["CFLAGS"]="-O2 -pipe"
1313 - self.settings["CHOST"]="sparc-unknown-linux-gnu"
1314 -
1315 -class arch_sparc64(generic_sparc64):
1316 - "builder class for generic sparc64 (sun4u)"
1317 - def __init__(self,myspec):
1318 - generic_sparc64.__init__(self,myspec)
1319 - self.settings["CFLAGS"]="-O2 -mcpu=ultrasparc -pipe"
1320 - self.settings["CHOST"]="sparc-unknown-linux-gnu"
1321 -
1322 -def register():
1323 - "Inform main catalyst program of the contents of this plugin."
1324 - return ({
1325 - "sparc" : arch_sparc,
1326 - "sparc64" : arch_sparc64
1327 - }, ("sparc","sparc64", ))
1328 diff --git a/arch/x86.py b/arch/x86.py
1329 deleted file mode 100644
1330 index 0391b79..0000000
1331 --- a/arch/x86.py
1332 +++ /dev/null
1333 @@ -1,153 +0,0 @@
1334 -
1335 -import builder,os
1336 -from catalyst_support import *
1337 -
1338 -class generic_x86(builder.generic):
1339 - "abstract base class for all x86 builders"
1340 - def __init__(self,myspec):
1341 - builder.generic.__init__(self,myspec)
1342 - if self.settings["buildarch"]=="amd64":
1343 - if not os.path.exists("/bin/linux32") and not os.path.exists("/usr/bin/linux32"):
1344 - raise CatalystError,"required executable linux32 not found (\"emerge setarch\" to fix.)"
1345 - self.settings["CHROOT"]="linux32 chroot"
1346 - self.settings["crosscompile"] = False;
1347 - else:
1348 - self.settings["CHROOT"]="chroot"
1349 -
1350 -class arch_x86(generic_x86):
1351 - "builder class for generic x86 (386+)"
1352 - def __init__(self,myspec):
1353 - generic_x86.__init__(self,myspec)
1354 - self.settings["CFLAGS"]="-O2 -mtune=i686 -pipe"
1355 - self.settings["CHOST"]="i386-pc-linux-gnu"
1356 -
1357 -class arch_i386(generic_x86):
1358 - "Intel i386 CPU"
1359 - def __init__(self,myspec):
1360 - generic_x86.__init__(self,myspec)
1361 - self.settings["CFLAGS"]="-O2 -march=i386 -pipe"
1362 - self.settings["CHOST"]="i386-pc-linux-gnu"
1363 -
1364 -class arch_i486(generic_x86):
1365 - "Intel i486 CPU"
1366 - def __init__(self,myspec):
1367 - generic_x86.__init__(self,myspec)
1368 - self.settings["CFLAGS"]="-O2 -march=i486 -pipe"
1369 - self.settings["CHOST"]="i486-pc-linux-gnu"
1370 -
1371 -class arch_i586(generic_x86):
1372 - "Intel Pentium CPU"
1373 - def __init__(self,myspec):
1374 - generic_x86.__init__(self,myspec)
1375 - self.settings["CFLAGS"]="-O2 -march=i586 -pipe"
1376 - self.settings["CHOST"]="i586-pc-linux-gnu"
1377 -
1378 -class arch_i686(generic_x86):
1379 - "Intel Pentium Pro CPU"
1380 - def __init__(self,myspec):
1381 - generic_x86.__init__(self,myspec)
1382 - self.settings["CFLAGS"]="-O2 -march=i686 -pipe"
1383 - self.settings["CHOST"]="i686-pc-linux-gnu"
1384 -
1385 -class arch_pentium_mmx(generic_x86):
1386 - "Intel Pentium MMX CPU with MMX support"
1387 - def __init__(self,myspec):
1388 - generic_x86.__init__(self,myspec)
1389 - self.settings["CFLAGS"]="-O2 -march=pentium-mmx -pipe"
1390 - self.settings["HOSTUSE"]=["mmx"]
1391 -
1392 -class arch_pentium2(generic_x86):
1393 - "Intel Pentium 2 CPU with MMX support"
1394 - def __init__(self,myspec):
1395 - generic_x86.__init__(self,myspec)
1396 - self.settings["CFLAGS"]="-O2 -march=pentium2 -pipe"
1397 - self.settings["HOSTUSE"]=["mmx"]
1398 -
1399 -class arch_pentium3(generic_x86):
1400 - "Intel Pentium 3 CPU with MMX and SSE support"
1401 - def __init__(self,myspec):
1402 - generic_x86.__init__(self,myspec)
1403 - self.settings["CFLAGS"]="-O2 -march=pentium3 -pipe"
1404 - self.settings["HOSTUSE"]=["mmx","sse"]
1405 -
1406 -class arch_pentium4(generic_x86):
1407 - "Intel Pentium 4 CPU with MMX, SSE and SSE2 support"
1408 - def __init__(self,myspec):
1409 - generic_x86.__init__(self,myspec)
1410 - self.settings["CFLAGS"]="-O2 -march=pentium4 -pipe"
1411 - self.settings["HOSTUSE"]=["mmx","sse","sse2"]
1412 -
1413 -class arch_pentium_m(generic_x86):
1414 - "Intel Pentium M CPU with MMX, SSE and SSE2 support"
1415 - def __init__(self,myspec):
1416 - generic_x86.__init__(self,myspec)
1417 - self.settings["CFLAGS"]="-O2 -march=pentium-m -pipe"
1418 - self.settings["HOSTUSE"]=["mmx","sse","sse2"]
1419 -
1420 -class arch_prescott(generic_x86):
1421 - "improved version of Intel Pentium 4 CPU with MMX, SSE, SSE2 and SSE3 support"
1422 - def __init__(self,myspec):
1423 - generic_x86.__init__(self,myspec)
1424 - self.settings["CFLAGS"]="-O2 -march=prescott -pipe"
1425 - self.settings["HOSTUSE"]=["mmx","sse","sse2"]
1426 - self.settings["CHOST"]="i686-pc-linux-gnu"
1427 -
1428 -class arch_k6(generic_x86):
1429 - "AMD K6 CPU with MMX support"
1430 - def __init__(self,myspec):
1431 - generic_x86.__init__(self,myspec)
1432 - self.settings["CFLAGS"]="-O2 -march=k6 -pipe"
1433 - self.settings["CHOST"]="i686-pc-linux-gnu"
1434 - self.settings["HOSTUSE"]=["mmx"]
1435 -
1436 -class arch_k6_2(generic_x86):
1437 - "AMD K6-2 CPU with MMX and 3dNOW! support"
1438 - def __init__(self,myspec):
1439 - generic_x86.__init__(self,myspec)
1440 - self.settings["CFLAGS"]="-O2 -march=k6-2 -pipe"
1441 - self.settings["CHOST"]="i686-pc-linux-gnu"
1442 - self.settings["HOSTUSE"]=["mmx","3dnow"]
1443 -
1444 -class arch_athlon(generic_x86):
1445 - "AMD Athlon CPU with MMX, 3dNOW!, enhanced 3dNOW! and SSE prefetch support"
1446 - def __init__(self,myspec):
1447 - generic_x86.__init__(self,myspec)
1448 - self.settings["CFLAGS"]="-O2 -march=athlon -pipe"
1449 - self.settings["CHOST"]="i686-pc-linux-gnu"
1450 - self.settings["HOSTUSE"]=["mmx","3dnow"]
1451 -
1452 -class arch_athlon_xp(generic_x86):
1453 - "improved AMD Athlon CPU with MMX, 3dNOW!, enhanced 3dNOW! and full SSE support"
1454 - def __init__(self,myspec):
1455 - generic_x86.__init__(self,myspec)
1456 - self.settings["CFLAGS"]="-O2 -march=athlon-xp -pipe"
1457 - self.settings["CHOST"]="i686-pc-linux-gnu"
1458 - self.settings["HOSTUSE"]=["mmx","3dnow","sse"]
1459 -
1460 -def register():
1461 - "Inform main catalyst program of the contents of this plugin."
1462 - return ({
1463 - "x86" : arch_x86,
1464 - "i386" : arch_i386,
1465 - "i486" : arch_i486,
1466 - "i586" : arch_i586,
1467 - "i686" : arch_i686,
1468 - "pentium" : arch_i586,
1469 - "pentium2" : arch_pentium2,
1470 - "pentium3" : arch_pentium3,
1471 - "pentium3m" : arch_pentium3,
1472 - "pentium-m" : arch_pentium_m,
1473 - "pentium4" : arch_pentium4,
1474 - "pentium4m" : arch_pentium4,
1475 - "pentiumpro" : arch_i686,
1476 - "pentium-mmx" : arch_pentium_mmx,
1477 - "prescott" : arch_prescott,
1478 - "k6" : arch_k6,
1479 - "k6-2" : arch_k6_2,
1480 - "k6-3" : arch_k6_2,
1481 - "athlon" : arch_athlon,
1482 - "athlon-tbird" : arch_athlon,
1483 - "athlon-4" : arch_athlon_xp,
1484 - "athlon-xp" : arch_athlon_xp,
1485 - "athlon-mp" : arch_athlon_xp
1486 - }, ('i386', 'i486', 'i586', 'i686'))
1487 diff --git a/bin/catalyst b/bin/catalyst
1488 new file mode 100755
1489 index 0000000..a4e55db
1490 --- /dev/null
1491 +++ b/bin/catalyst
1492 @@ -0,0 +1,46 @@
1493 +#!/usr/bin/python -OO
1494 +
1495 +# Maintained in full by:
1496 +# Catalyst Team <catalyst@g.o>
1497 +# Release Engineering Team <releng@g.o>
1498 +# Andrew Gaffney <agaffney@g.o>
1499 +# Chris Gianelloni <wolf31o2@××××××××.org>
1500 +# $Id$
1501 +
1502 +
1503 +from __future__ import print_function
1504 +
1505 +import sys
1506 +
1507 +__maintainer__="Catalyst <catalyst@g.o>"
1508 +__version__="2.0.12.2"
1509 +
1510 +
1511 +# This block ensures that ^C interrupts are handled quietly.
1512 +try:
1513 + import signal
1514 +
1515 + def exithandler(signum,frame):
1516 + signal.signal(signal.SIGINT, signal.SIG_IGN)
1517 + signal.signal(signal.SIGTERM, signal.SIG_IGN)
1518 + print()
1519 + sys.exit(1)
1520 +
1521 + signal.signal(signal.SIGINT, exithandler)
1522 + signal.signal(signal.SIGTERM, exithandler)
1523 + signal.signal(signal.SIGPIPE, signal.SIG_DFL)
1524 +
1525 +except KeyboardInterrupt:
1526 + print()
1527 + sys.exit(1)
1528 +
1529 +
1530 +from catalyst.main import main
1531 +
1532 +try:
1533 + main()
1534 +except KeyboardInterrupt:
1535 + print("Aborted.")
1536 + sys.exit(130)
1537 +sys.exit(0)
1538 +
1539 diff --git a/catalyst b/catalyst
1540 deleted file mode 100755
1541 index a68a2ba..0000000
1542 --- a/catalyst
1543 +++ /dev/null
1544 @@ -1,428 +0,0 @@
1545 -#!/usr/bin/python2 -OO
1546 -
1547 -# Maintained in full by:
1548 -# Catalyst Team <catalyst@g.o>
1549 -# Release Engineering Team <releng@g.o>
1550 -# Andrew Gaffney <agaffney@g.o>
1551 -# Chris Gianelloni <wolf31o2@××××××××.org>
1552 -# $Id$
1553 -
1554 -import os
1555 -import sys
1556 -import imp
1557 -import string
1558 -import getopt
1559 -import pdb
1560 -import os.path
1561 -
1562 -import modules.catalyst.config
1563 -import modules.catalyst.util
1564 -
1565 -__maintainer__="Catalyst <catalyst@g.o>"
1566 -__version__="2.0.15"
1567 -
1568 -conf_values={}
1569 -
1570 -def usage():
1571 - print """Usage catalyst [options] [-C variable=value...] [ -s identifier]
1572 - -a --clear-autoresume clear autoresume flags
1573 - -c --config use specified configuration file
1574 - -C --cli catalyst commandline (MUST BE LAST OPTION)
1575 - -d --debug enable debugging
1576 - -f --file read specfile
1577 - -F --fetchonly fetch files only
1578 - -h --help print this help message
1579 - -p --purge clear tmp dirs,package cache, autoresume flags
1580 - -P --purgeonly clear tmp dirs,package cache, autoresume flags and exit
1581 - -T --purgetmponly clear tmp dirs and autoresume flags and exit
1582 - -s --snapshot generate a release snapshot
1583 - -V --version display version information
1584 - -v --verbose verbose output
1585 -
1586 -Usage examples:
1587 -
1588 -Using the commandline option (-C, --cli) to build a Portage snapshot:
1589 -catalyst -C target=snapshot version_stamp=my_date
1590 -
1591 -Using the snapshot option (-s, --snapshot) to build a release snapshot:
1592 -catalyst -s 20071121"
1593 -
1594 -Using the specfile option (-f, --file) to build a stage target:
1595 -catalyst -f stage1-specfile.spec
1596 -"""
1597 -
1598 -
1599 -def version():
1600 - print "Catalyst, version "+__version__
1601 - print "Copyright 2003-2008 Gentoo Foundation"
1602 - print "Copyright 2008-2012 various authors"
1603 - print "Distributed under the GNU General Public License version 2.1\n"
1604 -
1605 -def parse_config(myconfig):
1606 - # search a couple of different areas for the main config file
1607 - myconf={}
1608 - config_file=""
1609 -
1610 - confdefaults={
1611 - "distdir": "/usr/portage/distfiles",
1612 - "hash_function": "crc32",
1613 - "packagedir": "/usr/portage/packages",
1614 - "portdir": "/usr/portage",
1615 - "port_tmpdir": "/var/tmp/portage",
1616 - "repo_name": "portage",
1617 - "sharedir": "/usr/lib/catalyst",
1618 - "snapshot_name": "portage-",
1619 - "snapshot_cache": "/var/tmp/catalyst/snapshot_cache",
1620 - "storedir": "/var/tmp/catalyst",
1621 - }
1622 -
1623 - # first, try the one passed (presumably from the cmdline)
1624 - if myconfig:
1625 - if os.path.exists(myconfig):
1626 - print "Using command line specified Catalyst configuration file, "+\
1627 - myconfig
1628 - config_file=myconfig
1629 -
1630 - else:
1631 - print "!!! catalyst: Could not use specified configuration file "+\
1632 - myconfig
1633 - sys.exit(1)
1634 -
1635 - # next, try the default location
1636 - elif os.path.exists("/etc/catalyst/catalyst.conf"):
1637 - print "Using default Catalyst configuration file," + \
1638 - " /etc/catalyst/catalyst.conf"
1639 - config_file="/etc/catalyst/catalyst.conf"
1640 -
1641 - # can't find a config file (we are screwed), so bail out
1642 - else:
1643 - print "!!! catalyst: Could not find a suitable configuration file"
1644 - sys.exit(1)
1645 -
1646 - # now, try and parse the config file "config_file"
1647 - try:
1648 -# execfile(config_file, myconf, myconf)
1649 - myconfig = modules.catalyst.config.ConfigParser(config_file)
1650 - myconf.update(myconfig.get_values())
1651 -
1652 - except:
1653 - print "!!! catalyst: Unable to parse configuration file, "+myconfig
1654 - sys.exit(1)
1655 -
1656 - # now, load up the values into conf_values so that we can use them
1657 - for x in confdefaults.keys():
1658 - if x in myconf:
1659 - print "Setting",x,"to config file value \""+myconf[x]+"\""
1660 - conf_values[x]=myconf[x]
1661 - else:
1662 - print "Setting",x,"to default value \""+confdefaults[x]+"\""
1663 - conf_values[x]=confdefaults[x]
1664 -
1665 - # parse out the rest of the options from the config file
1666 - if "autoresume" in string.split(conf_values["options"]):
1667 - print "Autoresuming support enabled."
1668 - conf_values["AUTORESUME"]="1"
1669 -
1670 - if "bindist" in string.split(conf_values["options"]):
1671 - print "Binary redistribution enabled"
1672 - conf_values["BINDIST"]="1"
1673 - else:
1674 - print "Bindist is not enabled in catalyst.conf"
1675 - print "Binary redistribution of generated stages/isos may be prohibited by law."
1676 - print "Please see the use description for bindist on any package you are including."
1677 -
1678 - if "ccache" in string.split(conf_values["options"]):
1679 - print "Compiler cache support enabled."
1680 - conf_values["CCACHE"]="1"
1681 -
1682 - if "clear-autoresume" in string.split(conf_values["options"]):
1683 - print "Cleaning autoresume flags support enabled."
1684 - conf_values["CLEAR_AUTORESUME"]="1"
1685 -
1686 - if "distcc" in string.split(conf_values["options"]):
1687 - print "Distcc support enabled."
1688 - conf_values["DISTCC"]="1"
1689 -
1690 - if "icecream" in string.split(conf_values["options"]):
1691 - print "Icecream compiler cluster support enabled."
1692 - conf_values["ICECREAM"]="1"
1693 -
1694 - if "kerncache" in string.split(conf_values["options"]):
1695 - print "Kernel cache support enabled."
1696 - conf_values["KERNCACHE"]="1"
1697 -
1698 - if "pkgcache" in string.split(conf_values["options"]):
1699 - print "Package cache support enabled."
1700 - conf_values["PKGCACHE"]="1"
1701 -
1702 - if "preserve_libs" in string.split(conf_values["options"]):
1703 - print "Preserving libs during unmerge."
1704 - conf_values["PRESERVE_LIBS"]="1"
1705 -
1706 - if "purge" in string.split(conf_values["options"]):
1707 - print "Purge support enabled."
1708 - conf_values["PURGE"]="1"
1709 -
1710 - if "seedcache" in string.split(conf_values["options"]):
1711 - print "Seed cache support enabled."
1712 - conf_values["SEEDCACHE"]="1"
1713 -
1714 - if "snapcache" in string.split(conf_values["options"]):
1715 - print "Snapshot cache support enabled."
1716 - conf_values["SNAPCACHE"]="1"
1717 -
1718 - if "digests" in myconf:
1719 - conf_values["digests"]=myconf["digests"]
1720 - if "contents" in myconf:
1721 - conf_values["contents"]=myconf["contents"]
1722 -
1723 - if "envscript" in myconf:
1724 - print "Envscript support enabled."
1725 - conf_values["ENVSCRIPT"]=myconf["envscript"]
1726 -
1727 - if "var_tmpfs_portage" in myconf:
1728 - conf_values["var_tmpfs_portage"]=myconf["var_tmpfs_portage"];
1729 -
1730 - if "port_logdir" in myconf:
1731 - conf_values["port_logdir"]=myconf["port_logdir"];
1732 -
1733 -def import_modules():
1734 - # import catalyst's own modules
1735 - # (i.e. catalyst_support and the arch modules)
1736 - targetmap={}
1737 -
1738 - try:
1739 - for x in required_build_targets:
1740 - try:
1741 - fh=open(conf_values["sharedir"]+"/modules/"+x+".py")
1742 - module=imp.load_module(x,fh,"modules/"+x+".py",
1743 - (".py","r",imp.PY_SOURCE))
1744 - fh.close()
1745 -
1746 - except IOError:
1747 - raise CatalystError,"Can't find "+x+".py plugin in "+\
1748 - conf_values["sharedir"]+"/modules/"
1749 -
1750 - for x in valid_build_targets:
1751 - try:
1752 - fh=open(conf_values["sharedir"]+"/modules/"+x+".py")
1753 - module=imp.load_module(x,fh,"modules/"+x+".py",
1754 - (".py","r",imp.PY_SOURCE))
1755 - module.register(targetmap)
1756 - fh.close()
1757 -
1758 - except IOError:
1759 - raise CatalystError,"Can't find "+x+".py plugin in "+\
1760 - conf_values["sharedir"]+"/modules/"
1761 -
1762 - except ImportError:
1763 - print "!!! catalyst: Python modules not found in "+\
1764 - conf_values["sharedir"]+"/modules; exiting."
1765 - sys.exit(1)
1766 -
1767 - return targetmap
1768 -
1769 -def build_target(addlargs, targetmap):
1770 - try:
1771 - if addlargs["target"] not in targetmap:
1772 - raise CatalystError, \
1773 - "Target \"%s\" not available." % addlargs["target"]
1774 -
1775 - mytarget=targetmap[addlargs["target"]](conf_values, addlargs)
1776 -
1777 - mytarget.run()
1778 -
1779 - except:
1780 - catalyst.util.print_traceback()
1781 - print "!!! catalyst: Error encountered during run of target " + \
1782 - addlargs["target"]
1783 - sys.exit(1)
1784 -
1785 -if __name__ == "__main__":
1786 - targetmap={}
1787 -
1788 - version()
1789 - if os.getuid() != 0:
1790 - # catalyst cannot be run as a normal user due to chroots, mounts, etc
1791 - print "!!! catalyst: This script requires root privileges to operate"
1792 - sys.exit(2)
1793 -
1794 - # we need some options in order to work correctly
1795 - if len(sys.argv) < 2:
1796 - usage()
1797 - sys.exit(2)
1798 -
1799 - # parse out the command line arguments
1800 - try:
1801 - opts,args = getopt.getopt(sys.argv[1:], "apPThvdc:C:f:FVs:",
1802 - ["purge", "purgeonly", "purgetmponly", "help", "version", "debug",
1803 - "clear-autoresume", "config=", "cli=", "file=", "fetch",
1804 - "verbose","snapshot="
1805 - ]
1806 - )
1807 -
1808 - except getopt.GetoptError:
1809 - usage()
1810 - sys.exit(2)
1811 -
1812 - # defaults for commandline opts
1813 - debug=False
1814 - verbose=False
1815 - fetch=False
1816 - myconfig=""
1817 - myspecfile=""
1818 - mycmdline=[]
1819 - myopts=[]
1820 -
1821 - # check preconditions
1822 - if len(opts) == 0:
1823 - print "!!! catalyst: please specify one of either -f or -C\n"
1824 - usage()
1825 - sys.exit(2)
1826 -
1827 - run = False
1828 - for o, a in opts:
1829 - if o in ("-h", "--help"):
1830 - usage()
1831 - sys.exit(1)
1832 -
1833 - if o in ("-V", "--version"):
1834 - print "Catalyst version "+__version__
1835 - sys.exit(1)
1836 -
1837 - if o in ("-d", "--debug"):
1838 - conf_values["DEBUG"]="1"
1839 - conf_values["VERBOSE"]="1"
1840 -
1841 - if o in ("-c", "--config"):
1842 - myconfig=a
1843 -
1844 - if o in ("-C", "--cli"):
1845 - run = True
1846 - x=sys.argv.index(o)+1
1847 - while x < len(sys.argv):
1848 - mycmdline.append(sys.argv[x])
1849 - x=x+1
1850 -
1851 - if o in ("-f", "--file"):
1852 - run = True
1853 - myspecfile=a
1854 -
1855 - if o in ("-F", "--fetchonly"):
1856 - conf_values["FETCH"]="1"
1857 -
1858 - if o in ("-v", "--verbose"):
1859 - conf_values["VERBOSE"]="1"
1860 -
1861 - if o in ("-s", "--snapshot"):
1862 - if len(sys.argv) < 3:
1863 - print "!!! catalyst: missing snapshot identifier\n"
1864 - usage()
1865 - sys.exit(2)
1866 - else:
1867 - run = True
1868 - mycmdline.append("target=snapshot")
1869 - mycmdline.append("version_stamp="+a)
1870 -
1871 - if o in ("-p", "--purge"):
1872 - conf_values["PURGE"] = "1"
1873 -
1874 - if o in ("-P", "--purgeonly"):
1875 - conf_values["PURGEONLY"] = "1"
1876 -
1877 - if o in ("-T", "--purgetmponly"):
1878 - conf_values["PURGETMPONLY"] = "1"
1879 -
1880 - if o in ("-a", "--clear-autoresume"):
1881 - conf_values["CLEAR_AUTORESUME"] = "1"
1882 -
1883 - if not run:
1884 - print "!!! catalyst: please specify one of either -f or -C\n"
1885 - usage()
1886 - sys.exit(2)
1887 -
1888 - # import configuration file and import our main module using those settings
1889 - parse_config(myconfig)
1890 - sys.path.append(conf_values["sharedir"]+"/modules")
1891 - from catalyst_support import *
1892 -
1893 - # Start checking that digests are valid now that the hash_map was imported
1894 - # from catalyst_support
1895 - if "digests" in conf_values:
1896 - for i in conf_values["digests"].split():
1897 - if i not in hash_map:
1898 - print
1899 - print i+" is not a valid digest entry"
1900 - print "Valid digest entries:"
1901 - print hash_map.keys()
1902 - print
1903 - print "Catalyst aborting...."
1904 - sys.exit(2)
1905 - if find_binary(hash_map[i][1]) == None:
1906 - print
1907 - print "digest="+i
1908 - print "\tThe "+hash_map[i][1]+\
1909 - " binary was not found. It needs to be in your system path"
1910 - print
1911 - print "Catalyst aborting...."
1912 - sys.exit(2)
1913 - if "hash_function" in conf_values:
1914 - if conf_values["hash_function"] not in hash_map:
1915 - print
1916 - print conf_values["hash_function"]+\
1917 - " is not a valid hash_function entry"
1918 - print "Valid hash_function entries:"
1919 - print hash_map.keys()
1920 - print
1921 - print "Catalyst aborting...."
1922 - sys.exit(2)
1923 - if find_binary(hash_map[conf_values["hash_function"]][1]) == None:
1924 - print
1925 - print "hash_function="+conf_values["hash_function"]
1926 - print "\tThe "+hash_map[conf_values["hash_function"]][1]+\
1927 - " binary was not found. It needs to be in your system path"
1928 - print
1929 - print "Catalyst aborting...."
1930 - sys.exit(2)
1931 -
1932 - # import the rest of the catalyst modules
1933 - targetmap=import_modules()
1934 -
1935 - addlargs={}
1936 -
1937 - if myspecfile:
1938 - spec = modules.catalyst.config.SpecParser(myspecfile)
1939 - addlargs.update(spec.get_values())
1940 -
1941 - if mycmdline:
1942 - try:
1943 - cmdline = modules.catalyst.config.ConfigParser()
1944 - cmdline.parse_lines(mycmdline)
1945 - addlargs.update(cmdline.get_values())
1946 - except CatalystError:
1947 - print "!!! catalyst: Could not parse commandline, exiting."
1948 - sys.exit(1)
1949 -
1950 - if "target" not in addlargs:
1951 - raise CatalystError, "Required value \"target\" not specified."
1952 -
1953 - # everything is setup, so the build is a go
1954 - try:
1955 - build_target(addlargs, targetmap)
1956 -
1957 - except CatalystError:
1958 - print
1959 - print "Catalyst aborting...."
1960 - sys.exit(2)
1961 - except KeyboardInterrupt:
1962 - print "\nCatalyst build aborted due to user interrupt ( Ctrl-C )"
1963 - print
1964 - print "Catalyst aborting...."
1965 - sys.exit(2)
1966 - except LockInUse:
1967 - print "Catalyst aborting...."
1968 - sys.exit(2)
1969 - except:
1970 - print "Catalyst aborting...."
1971 - raise
1972 - sys.exit(2)
1973 diff --git a/catalyst/__init__.py b/catalyst/__init__.py
1974 new file mode 100644
1975 index 0000000..e69de29
1976 diff --git a/catalyst/arch/__init__.py b/catalyst/arch/__init__.py
1977 new file mode 100644
1978 index 0000000..8b13789
1979 --- /dev/null
1980 +++ b/catalyst/arch/__init__.py
1981 @@ -0,0 +1 @@
1982 +
1983 diff --git a/catalyst/arch/alpha.py b/catalyst/arch/alpha.py
1984 new file mode 100644
1985 index 0000000..f0fc95a
1986 --- /dev/null
1987 +++ b/catalyst/arch/alpha.py
1988 @@ -0,0 +1,75 @@
1989 +
1990 +import builder,os
1991 +from catalyst_support import *
1992 +
1993 +class generic_alpha(builder.generic):
1994 + "abstract base class for all alpha builders"
1995 + def __init__(self,myspec):
1996 + builder.generic.__init__(self,myspec)
1997 + self.settings["CHROOT"]="chroot"
1998 + self.settings["CFLAGS"]="-mieee -pipe"
1999 +
2000 +class arch_alpha(generic_alpha):
2001 + "builder class for generic alpha (ev4+)"
2002 + def __init__(self,myspec):
2003 + generic_alpha.__init__(self,myspec)
2004 + self.settings["CFLAGS"]+=" -O2 -mcpu=ev4"
2005 + self.settings["CHOST"]="alpha-unknown-linux-gnu"
2006 +
2007 +class arch_ev4(generic_alpha):
2008 + "builder class for alpha ev4"
2009 + def __init__(self,myspec):
2010 + generic_alpha.__init__(self,myspec)
2011 + self.settings["CFLAGS"]+=" -O2 -mcpu=ev4"
2012 + self.settings["CHOST"]="alphaev4-unknown-linux-gnu"
2013 +
2014 +class arch_ev45(generic_alpha):
2015 + "builder class for alpha ev45"
2016 + def __init__(self,myspec):
2017 + generic_alpha.__init__(self,myspec)
2018 + self.settings["CFLAGS"]+=" -O2 -mcpu=ev45"
2019 + self.settings["CHOST"]="alphaev45-unknown-linux-gnu"
2020 +
2021 +class arch_ev5(generic_alpha):
2022 + "builder class for alpha ev5"
2023 + def __init__(self,myspec):
2024 + generic_alpha.__init__(self,myspec)
2025 + self.settings["CFLAGS"]+=" -O2 -mcpu=ev5"
2026 + self.settings["CHOST"]="alphaev5-unknown-linux-gnu"
2027 +
2028 +class arch_ev56(generic_alpha):
2029 + "builder class for alpha ev56 (ev5 plus BWX)"
2030 + def __init__(self,myspec):
2031 + generic_alpha.__init__(self,myspec)
2032 + self.settings["CFLAGS"]+=" -O2 -mcpu=ev56"
2033 + self.settings["CHOST"]="alphaev56-unknown-linux-gnu"
2034 +
2035 +class arch_pca56(generic_alpha):
2036 + "builder class for alpha pca56 (ev5 plus BWX & MAX)"
2037 + def __init__(self,myspec):
2038 + generic_alpha.__init__(self,myspec)
2039 + self.settings["CFLAGS"]+=" -O2 -mcpu=pca56"
2040 + self.settings["CHOST"]="alphaev56-unknown-linux-gnu"
2041 +
2042 +class arch_ev6(generic_alpha):
2043 + "builder class for alpha ev6"
2044 + def __init__(self,myspec):
2045 + generic_alpha.__init__(self,myspec)
2046 + self.settings["CFLAGS"]+=" -O2 -mcpu=ev6"
2047 + self.settings["CHOST"]="alphaev6-unknown-linux-gnu"
2048 + self.settings["HOSTUSE"]=["ev6"]
2049 +
2050 +class arch_ev67(generic_alpha):
2051 + "builder class for alpha ev67 (ev6 plus CIX)"
2052 + def __init__(self,myspec):
2053 + generic_alpha.__init__(self,myspec)
2054 + self.settings["CFLAGS"]+=" -O2 -mcpu=ev67"
2055 + self.settings["CHOST"]="alphaev67-unknown-linux-gnu"
2056 + self.settings["HOSTUSE"]=["ev6"]
2057 +
2058 +def register():
2059 + "Inform main catalyst program of the contents of this plugin."
2060 + return ({ "alpha":arch_alpha, "ev4":arch_ev4, "ev45":arch_ev45,
2061 + "ev5":arch_ev5, "ev56":arch_ev56, "pca56":arch_pca56,
2062 + "ev6":arch_ev6, "ev67":arch_ev67 },
2063 + ("alpha", ))
2064 diff --git a/catalyst/arch/amd64.py b/catalyst/arch/amd64.py
2065 new file mode 100644
2066 index 0000000..262b55a
2067 --- /dev/null
2068 +++ b/catalyst/arch/amd64.py
2069 @@ -0,0 +1,83 @@
2070 +
2071 +import builder
2072 +
2073 +class generic_amd64(builder.generic):
2074 + "abstract base class for all amd64 builders"
2075 + def __init__(self,myspec):
2076 + builder.generic.__init__(self,myspec)
2077 + self.settings["CHROOT"]="chroot"
2078 +
2079 +class arch_amd64(generic_amd64):
2080 + "builder class for generic amd64 (Intel and AMD)"
2081 + def __init__(self,myspec):
2082 + generic_amd64.__init__(self,myspec)
2083 + self.settings["CFLAGS"]="-O2 -pipe"
2084 + self.settings["CHOST"]="x86_64-pc-linux-gnu"
2085 + self.settings["HOSTUSE"]=["mmx","sse","sse2"]
2086 +
2087 +class arch_nocona(generic_amd64):
2088 + "improved version of Intel Pentium 4 CPU with 64-bit extensions, MMX, SSE, SSE2 and SSE3 support"
2089 + def __init__(self,myspec):
2090 + generic_amd64.__init__(self,myspec)
2091 + self.settings["CFLAGS"]="-O2 -march=nocona -pipe"
2092 + self.settings["CHOST"]="x86_64-pc-linux-gnu"
2093 + self.settings["HOSTUSE"]=["mmx","sse","sse2"]
2094 +
2095 +# Requires gcc 4.3 to use this class
2096 +class arch_core2(generic_amd64):
2097 + "Intel Core 2 CPU with 64-bit extensions, MMX, SSE, SSE2, SSE3 and SSSE3 support"
2098 + def __init__(self,myspec):
2099 + generic_amd64.__init__(self,myspec)
2100 + self.settings["CFLAGS"]="-O2 -march=core2 -pipe"
2101 + self.settings["CHOST"]="x86_64-pc-linux-gnu"
2102 + self.settings["HOSTUSE"]=["mmx","sse","sse2","ssse3"]
2103 +
2104 +class arch_k8(generic_amd64):
2105 + "generic k8, opteron and athlon64 support"
2106 + def __init__(self,myspec):
2107 + generic_amd64.__init__(self,myspec)
2108 + self.settings["CFLAGS"]="-O2 -march=k8 -pipe"
2109 + self.settings["CHOST"]="x86_64-pc-linux-gnu"
2110 + self.settings["HOSTUSE"]=["mmx","sse","sse2","3dnow"]
2111 +
2112 +class arch_k8_sse3(generic_amd64):
2113 + "improved versions of k8, opteron and athlon64 with SSE3 support"
2114 + def __init__(self,myspec):
2115 + generic_amd64.__init__(self,myspec)
2116 + self.settings["CFLAGS"]="-O2 -march=k8-sse3 -pipe"
2117 + self.settings["CHOST"]="x86_64-pc-linux-gnu"
2118 + self.settings["HOSTUSE"]=["mmx","sse","sse2","3dnow"]
2119 +
2120 +class arch_amdfam10(generic_amd64):
2121 + "AMD Family 10h core based CPUs with x86-64 instruction set support"
2122 + def __init__(self,myspec):
2123 + generic_amd64.__init__(self,myspec)
2124 + self.settings["CFLAGS"]="-O2 -march=amdfam10 -pipe"
2125 + self.settings["CHOST"]="x86_64-pc-linux-gnu"
2126 + self.settings["HOSTUSE"]=["mmx","sse","sse2","3dnow"]
2127 +
2128 +class arch_x32(generic_amd64):
2129 + "builder class for generic x32 (Intel and AMD)"
2130 + def __init__(self,myspec):
2131 + generic_amd64.__init__(self,myspec)
2132 + self.settings["CFLAGS"]="-O2 -pipe"
2133 + self.settings["CHOST"]="x86_64-pc-linux-gnux32"
2134 + self.settings["HOSTUSE"]=["mmx","sse","sse2"]
2135 +
2136 +def register():
2137 + "inform main catalyst program of the contents of this plugin"
2138 + return ({
2139 + "amd64" : arch_amd64,
2140 + "k8" : arch_k8,
2141 + "opteron" : arch_k8,
2142 + "athlon64" : arch_k8,
2143 + "athlonfx" : arch_k8,
2144 + "nocona" : arch_nocona,
2145 + "core2" : arch_core2,
2146 + "k8-sse3" : arch_k8_sse3,
2147 + "opteron-sse3" : arch_k8_sse3,
2148 + "athlon64-sse3" : arch_k8_sse3,
2149 + "amdfam10" : arch_amdfam10,
2150 + "barcelona" : arch_amdfam10,
2151 + "x32" : arch_x32,
2152 + }, ("x86_64","amd64","nocona"))
2153 diff --git a/catalyst/arch/arm.py b/catalyst/arch/arm.py
2154 new file mode 100644
2155 index 0000000..2de3942
2156 --- /dev/null
2157 +++ b/catalyst/arch/arm.py
2158 @@ -0,0 +1,133 @@
2159 +
2160 +import builder,os
2161 +from catalyst_support import *
2162 +
2163 +class generic_arm(builder.generic):
2164 + "Abstract base class for all arm (little endian) builders"
2165 + def __init__(self,myspec):
2166 + builder.generic.__init__(self,myspec)
2167 + self.settings["CHROOT"]="chroot"
2168 + self.settings["CFLAGS"]="-O2 -pipe"
2169 +
2170 +class generic_armeb(builder.generic):
2171 + "Abstract base class for all arm (big endian) builders"
2172 + def __init__(self,myspec):
2173 + builder.generic.__init__(self,myspec)
2174 + self.settings["CHROOT"]="chroot"
2175 + self.settings["CFLAGS"]="-O2 -pipe"
2176 +
2177 +class arch_arm(generic_arm):
2178 + "Builder class for arm (little endian) target"
2179 + def __init__(self,myspec):
2180 + generic_arm.__init__(self,myspec)
2181 + self.settings["CHOST"]="arm-unknown-linux-gnu"
2182 +
2183 +class arch_armeb(generic_armeb):
2184 + "Builder class for arm (big endian) target"
2185 + def __init__(self,myspec):
2186 + generic_armeb.__init__(self,myspec)
2187 + self.settings["CHOST"]="armeb-unknown-linux-gnu"
2188 +
2189 +class arch_armv4l(generic_arm):
2190 + "Builder class for armv4l target"
2191 + def __init__(self,myspec):
2192 + generic_arm.__init__(self,myspec)
2193 + self.settings["CHOST"]="armv4l-unknown-linux-gnu"
2194 + self.settings["CFLAGS"]+=" -march=armv4"
2195 +
2196 +class arch_armv4tl(generic_arm):
2197 + "Builder class for armv4tl target"
2198 + def __init__(self,myspec):
2199 + generic_arm.__init__(self,myspec)
2200 + self.settings["CHOST"]="armv4tl-softfloat-linux-gnueabi"
2201 + self.settings["CFLAGS"]+=" -march=armv4t"
2202 +
2203 +class arch_armv5tl(generic_arm):
2204 + "Builder class for armv5tl target"
2205 + def __init__(self,myspec):
2206 + generic_arm.__init__(self,myspec)
2207 + self.settings["CHOST"]="armv5tl-softfloat-linux-gnueabi"
2208 + self.settings["CFLAGS"]+=" -march=armv5t"
2209 +
2210 +class arch_armv5tel(generic_arm):
2211 + "Builder class for armv5tel target"
2212 + def __init__(self,myspec):
2213 + generic_arm.__init__(self,myspec)
2214 + self.settings["CHOST"]="armv5tel-softfloat-linux-gnueabi"
2215 + self.settings["CFLAGS"]+=" -march=armv5te"
2216 +
2217 +class arch_armv5tejl(generic_arm):
2218 + "Builder class for armv5tejl target"
2219 + def __init__(self,myspec):
2220 + generic_arm.__init__(self,myspec)
2221 + self.settings["CHOST"]="armv5tejl-softfloat-linux-gnueabi"
2222 + self.settings["CFLAGS"]+=" -march=armv5te"
2223 +
2224 +class arch_armv6j(generic_arm):
2225 + "Builder class for armv6j target"
2226 + def __init__(self,myspec):
2227 + generic_arm.__init__(self,myspec)
2228 + self.settings["CHOST"]="armv6j-softfp-linux-gnueabi"
2229 + self.settings["CFLAGS"]+=" -march=armv6j -mfpu=vfp -mfloat-abi=softfp"
2230 +
2231 +class arch_armv6z(generic_arm):
2232 + "Builder class for armv6z target"
2233 + def __init__(self,myspec):
2234 + generic_arm.__init__(self,myspec)
2235 + self.settings["CHOST"]="armv6z-softfp-linux-gnueabi"
2236 + self.settings["CFLAGS"]+=" -march=armv6z -mfpu=vfp -mfloat-abi=softfp"
2237 +
2238 +class arch_armv6zk(generic_arm):
2239 + "Builder class for armv6zk target"
2240 + def __init__(self,myspec):
2241 + generic_arm.__init__(self,myspec)
2242 + self.settings["CHOST"]="armv6zk-softfp-linux-gnueabi"
2243 + self.settings["CFLAGS"]+=" -march=armv6zk -mfpu=vfp -mfloat-abi=softfp"
2244 +
2245 +class arch_armv7a(generic_arm):
2246 + "Builder class for armv7a target"
2247 + def __init__(self,myspec):
2248 + generic_arm.__init__(self,myspec)
2249 + self.settings["CHOST"]="armv7a-softfp-linux-gnueabi"
2250 + self.settings["CFLAGS"]+=" -march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=softfp"
2251 +
2252 +class arch_armv6j_hardfp(generic_arm):
2253 + "Builder class for armv6j hardfloat target, needs >=gcc-4.5"
2254 + def __init__(self,myspec):
2255 + generic_arm.__init__(self,myspec)
2256 + self.settings["CHOST"]="armv6j-hardfloat-linux-gnueabi"
2257 + self.settings["CFLAGS"]+=" -march=armv6j -mfpu=vfp -mfloat-abi=hard"
2258 +
2259 +class arch_armv7a_hardfp(generic_arm):
2260 + "Builder class for armv7a hardfloat target, needs >=gcc-4.5"
2261 + def __init__(self,myspec):
2262 + generic_arm.__init__(self,myspec)
2263 + self.settings["CHOST"]="armv7a-hardfloat-linux-gnueabi"
2264 + self.settings["CFLAGS"]+=" -march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=hard"
2265 +
2266 +class arch_armv5teb(generic_armeb):
2267 + "Builder class for armv5teb (XScale) target"
2268 + def __init__(self,myspec):
2269 + generic_armeb.__init__(self,myspec)
2270 + self.settings["CFLAGS"]+=" -mcpu=xscale"
2271 + self.settings["CHOST"]="armv5teb-softfloat-linux-gnueabi"
2272 +
2273 +def register():
2274 + "Inform main catalyst program of the contents of this plugin."
2275 + return ({
2276 + "arm" : arch_arm,
2277 + "armv4l" : arch_armv4l,
2278 + "armv4tl": arch_armv4tl,
2279 + "armv5tl": arch_armv5tl,
2280 + "armv5tel": arch_armv5tel,
2281 + "armv5tejl": arch_armv5tejl,
2282 + "armv6j" : arch_armv6j,
2283 + "armv6z" : arch_armv6z,
2284 + "armv6zk" : arch_armv6zk,
2285 + "armv7a" : arch_armv7a,
2286 + "armv6j_hardfp" : arch_armv6j_hardfp,
2287 + "armv7a_hardfp" : arch_armv7a_hardfp,
2288 + "armeb" : arch_armeb,
2289 + "armv5teb" : arch_armv5teb
2290 + }, ("arm", "armv4l", "armv4tl", "armv5tl", "armv5tel", "armv5tejl", "armv6l",
2291 +"armv7l", "armeb", "armv5teb") )
2292 diff --git a/catalyst/arch/hppa.py b/catalyst/arch/hppa.py
2293 new file mode 100644
2294 index 0000000..f804398
2295 --- /dev/null
2296 +++ b/catalyst/arch/hppa.py
2297 @@ -0,0 +1,40 @@
2298 +
2299 +import builder,os
2300 +from catalyst_support import *
2301 +
2302 +class generic_hppa(builder.generic):
2303 + "Abstract base class for all hppa builders"
2304 + def __init__(self,myspec):
2305 + builder.generic.__init__(self,myspec)
2306 + self.settings["CHROOT"]="chroot"
2307 + self.settings["CFLAGS"]="-O2 -pipe"
2308 + self.settings["CXXFLAGS"]="-O2 -pipe"
2309 +
2310 +class arch_hppa(generic_hppa):
2311 + "Builder class for hppa systems"
2312 + def __init__(self,myspec):
2313 + generic_hppa.__init__(self,myspec)
2314 + self.settings["CFLAGS"]+=" -march=1.0"
2315 + self.settings["CHOST"]="hppa-unknown-linux-gnu"
2316 +
2317 +class arch_hppa1_1(generic_hppa):
2318 + "Builder class for hppa 1.1 systems"
2319 + def __init__(self,myspec):
2320 + generic_hppa.__init__(self,myspec)
2321 + self.settings["CFLAGS"]+=" -march=1.1"
2322 + self.settings["CHOST"]="hppa1.1-unknown-linux-gnu"
2323 +
2324 +class arch_hppa2_0(generic_hppa):
2325 + "Builder class for hppa 2.0 systems"
2326 + def __init__(self,myspec):
2327 + generic_hppa.__init__(self,myspec)
2328 + self.settings["CFLAGS"]+=" -march=2.0"
2329 + self.settings["CHOST"]="hppa2.0-unknown-linux-gnu"
2330 +
2331 +def register():
2332 + "Inform main catalyst program of the contents of this plugin."
2333 + return ({
2334 + "hppa": arch_hppa,
2335 + "hppa1.1": arch_hppa1_1,
2336 + "hppa2.0": arch_hppa2_0
2337 + }, ("parisc","parisc64","hppa","hppa64") )
2338 diff --git a/catalyst/arch/ia64.py b/catalyst/arch/ia64.py
2339 new file mode 100644
2340 index 0000000..825af70
2341 --- /dev/null
2342 +++ b/catalyst/arch/ia64.py
2343 @@ -0,0 +1,16 @@
2344 +
2345 +import builder,os
2346 +from catalyst_support import *
2347 +
2348 +class arch_ia64(builder.generic):
2349 + "builder class for ia64"
2350 + def __init__(self,myspec):
2351 + builder.generic.__init__(self,myspec)
2352 + self.settings["CHROOT"]="chroot"
2353 + self.settings["CFLAGS"]="-O2 -pipe"
2354 + self.settings["CFLAGS"]="-O2 -pipe"
2355 + self.settings["CHOST"]="ia64-unknown-linux-gnu"
2356 +
2357 +def register():
2358 + "Inform main catalyst program of the contents of this plugin."
2359 + return ({ "ia64":arch_ia64 }, ("ia64", ))
2360 diff --git a/catalyst/arch/mips.py b/catalyst/arch/mips.py
2361 new file mode 100644
2362 index 0000000..b3730fa
2363 --- /dev/null
2364 +++ b/catalyst/arch/mips.py
2365 @@ -0,0 +1,464 @@
2366 +
2367 +import builder,os
2368 +from catalyst_support import *
2369 +
2370 +class generic_mips(builder.generic):
2371 + "Abstract base class for all mips builders [Big-endian]"
2372 + def __init__(self,myspec):
2373 + builder.generic.__init__(self,myspec)
2374 + self.settings["CHROOT"]="chroot"
2375 + self.settings["CHOST"]="mips-unknown-linux-gnu"
2376 +
2377 +class generic_mipsel(builder.generic):
2378 + "Abstract base class for all mipsel builders [Little-endian]"
2379 + def __init__(self,myspec):
2380 + builder.generic.__init__(self,myspec)
2381 + self.settings["CHROOT"]="chroot"
2382 + self.settings["CHOST"]="mipsel-unknown-linux-gnu"
2383 +
2384 +class generic_mips64(builder.generic):
2385 + "Abstract base class for all mips64 builders [Big-endian]"
2386 + def __init__(self,myspec):
2387 + builder.generic.__init__(self,myspec)
2388 + self.settings["CHROOT"]="chroot"
2389 + self.settings["CHOST"]="mips64-unknown-linux-gnu"
2390 +
2391 +class generic_mips64el(builder.generic):
2392 + "Abstract base class for all mips64el builders [Little-endian]"
2393 + def __init__(self,myspec):
2394 + builder.generic.__init__(self,myspec)
2395 + self.settings["CHROOT"]="chroot"
2396 + self.settings["CHOST"]="mips64el-unknown-linux-gnu"
2397 +
2398 +class arch_mips1(generic_mips):
2399 + "Builder class for MIPS I [Big-endian]"
2400 + def __init__(self,myspec):
2401 + generic_mips.__init__(self,myspec)
2402 + self.settings["CFLAGS"]="-O2 -march=mips1 -mabi=32 -mplt -pipe"
2403 +
2404 +class arch_mips32(generic_mips):
2405 + "Builder class for MIPS 32 [Big-endian]"
2406 + def __init__(self,myspec):
2407 + generic_mips.__init__(self,myspec)
2408 + self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
2409 +
2410 +class arch_mips32_softfloat(generic_mips):
2411 + "Builder class for MIPS 32 [Big-endian softfloat]"
2412 + def __init__(self,myspec):
2413 + generic_mips.__init__(self,myspec)
2414 + self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
2415 + self.settings["CHOST"]="mips-softfloat-linux-gnu"
2416 +
2417 +class arch_mips32r2(generic_mips):
2418 + "Builder class for MIPS 32r2 [Big-endian]"
2419 + def __init__(self,myspec):
2420 + generic_mips.__init__(self,myspec)
2421 + self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
2422 +
2423 +class arch_mips32r2_softfloat(generic_mips):
2424 + "Builder class for MIPS 32r2 [Big-endian softfloat]"
2425 + def __init__(self,myspec):
2426 + generic_mips.__init__(self,myspec)
2427 + self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
2428 + self.settings["CHOST"]="mips-softfloat-linux-gnu"
2429 +
2430 +class arch_mips3(generic_mips):
2431 + "Builder class for MIPS III [Big-endian]"
2432 + def __init__(self,myspec):
2433 + generic_mips.__init__(self,myspec)
2434 + self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=32 -mplt -mfix-r4000 -mfix-r4400 -pipe"
2435 +
2436 +class arch_mips3_n32(generic_mips64):
2437 + "Builder class for MIPS III [Big-endian N32]"
2438 + def __init__(self,myspec):
2439 + generic_mips64.__init__(self,myspec)
2440 + self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=n32 -mplt -mfix-r4000 -mfix-r4400 -pipe"
2441 +
2442 +class arch_mips3_n64(generic_mips64):
2443 + "Builder class for MIPS III [Big-endian N64]"
2444 + def __init__(self,myspec):
2445 + generic_mips64.__init__(self,myspec)
2446 + self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=64 -mfix-r4000 -mfix-r4400 -pipe"
2447 +
2448 +class arch_mips3_multilib(generic_mips64):
2449 + "Builder class for MIPS III [Big-endian multilib]"
2450 + def __init__(self,myspec):
2451 + generic_mips64.__init__(self,myspec)
2452 + self.settings["CFLAGS"]="-O2 -march=mips3 -mplt -mfix-r4000 -mfix-r4400 -pipe"
2453 +
2454 +class arch_mips4(generic_mips):
2455 + "Builder class for MIPS IV [Big-endian]"
2456 + def __init__(self,myspec):
2457 + generic_mips.__init__(self,myspec)
2458 + self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=32 -mplt -pipe"
2459 +
2460 +class arch_mips4_n32(generic_mips64):
2461 + "Builder class for MIPS IV [Big-endian N32]"
2462 + def __init__(self,myspec):
2463 + generic_mips64.__init__(self,myspec)
2464 + self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=n32 -mplt -pipe"
2465 +
2466 +class arch_mips4_n64(generic_mips64):
2467 + "Builder class for MIPS IV [Big-endian N64]"
2468 + def __init__(self,myspec):
2469 + generic_mips64.__init__(self,myspec)
2470 + self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=64 -pipe"
2471 +
2472 +class arch_mips4_multilib(generic_mips64):
2473 + "Builder class for MIPS IV [Big-endian multilib]"
2474 + def __init__(self,myspec):
2475 + generic_mips64.__init__(self,myspec)
2476 + self.settings["CFLAGS"]="-O2 -march=mips4 -mplt -pipe"
2477 +
2478 +class arch_mips4_r10k(generic_mips):
2479 + "Builder class for MIPS IV R10k [Big-endian]"
2480 + def __init__(self,myspec):
2481 + generic_mips.__init__(self,myspec)
2482 + self.settings["CFLAGS"]="-O2 -march=r10k -mabi=32 -mplt -pipe"
2483 +
2484 +class arch_mips4_r10k_n32(generic_mips64):
2485 + "Builder class for MIPS IV R10k [Big-endian N32]"
2486 + def __init__(self,myspec):
2487 + generic_mips64.__init__(self,myspec)
2488 + self.settings["CFLAGS"]="-O2 -march=r10k -mabi=n32 -mplt -pipe"
2489 +
2490 +class arch_mips4_r10k_n64(generic_mips64):
2491 + "Builder class for MIPS IV R10k [Big-endian N64]"
2492 + def __init__(self,myspec):
2493 + generic_mips64.__init__(self,myspec)
2494 + self.settings["CFLAGS"]="-O2 -march=r10k -mabi=64 -pipe"
2495 +
2496 +class arch_mips4_r10k_multilib(generic_mips64):
2497 + "Builder class for MIPS IV R10k [Big-endian multilib]"
2498 + def __init__(self,myspec):
2499 + generic_mips64.__init__(self,myspec)
2500 + self.settings["CFLAGS"]="-O2 -march=r10k -mplt -pipe"
2501 +
2502 +class arch_mips64(generic_mips):
2503 + "Builder class for MIPS 64 [Big-endian]"
2504 + def __init__(self,myspec):
2505 + generic_mips.__init__(self,myspec)
2506 + self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=32 -mplt -pipe"
2507 +
2508 +class arch_mips64_n32(generic_mips64):
2509 + "Builder class for MIPS 64 [Big-endian N32]"
2510 + def __init__(self,myspec):
2511 + generic_mips64.__init__(self,myspec)
2512 + self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=n32 -mplt -pipe"
2513 +
2514 +class arch_mips64_n64(generic_mips64):
2515 + "Builder class for MIPS 64 [Big-endian N64]"
2516 + def __init__(self,myspec):
2517 + generic_mips64.__init__(self,myspec)
2518 + self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=64 -pipe"
2519 +
2520 +class arch_mips64_multilib(generic_mips64):
2521 + "Builder class for MIPS 64 [Big-endian multilib]"
2522 + def __init__(self,myspec):
2523 + generic_mips64.__init__(self,myspec)
2524 + self.settings["CFLAGS"]="-O2 -march=mips64 -mplt -pipe"
2525 +
2526 +class arch_mips64r2(generic_mips):
2527 + "Builder class for MIPS 64r2 [Big-endian]"
2528 + def __init__(self,myspec):
2529 + generic_mips.__init__(self,myspec)
2530 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=32 -mplt -pipe"
2531 +
2532 +class arch_mips64r2_n32(generic_mips64):
2533 + "Builder class for MIPS 64r2 [Big-endian N32]"
2534 + def __init__(self,myspec):
2535 + generic_mips64.__init__(self,myspec)
2536 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=n32 -mplt -pipe"
2537 +
2538 +class arch_mips64r2_n64(generic_mips64):
2539 + "Builder class for MIPS 64r2 [Big-endian N64]"
2540 + def __init__(self,myspec):
2541 + generic_mips64.__init__(self,myspec)
2542 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=64 -pipe"
2543 +
2544 +class arch_mips64r2_multilib(generic_mips64):
2545 + "Builder class for MIPS 64r2 [Big-endian multilib]"
2546 + def __init__(self,myspec):
2547 + generic_mips64.__init__(self,myspec)
2548 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mplt -pipe"
2549 +
2550 +class arch_mipsel1(generic_mipsel):
2551 + "Builder class for MIPS I [Little-endian]"
2552 + def __init__(self,myspec):
2553 + generic_mipsel.__init__(self,myspec)
2554 + self.settings["CFLAGS"]="-O2 -march=mips1 -mabi=32 -mplt -pipe"
2555 +
2556 +class arch_mips32el(generic_mipsel):
2557 + "Builder class for MIPS 32 [Little-endian]"
2558 + def __init__(self,myspec):
2559 + generic_mipsel.__init__(self,myspec)
2560 + self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
2561 +
2562 +class arch_mips32el_softfloat(generic_mipsel):
2563 + "Builder class for MIPS 32 [Little-endian softfloat]"
2564 + def __init__(self,myspec):
2565 + generic_mipsel.__init__(self,myspec)
2566 + self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
2567 + self.settings["CHOST"]="mipsel-softfloat-linux-gnu"
2568 +
2569 +class arch_mips32r2el(generic_mipsel):
2570 + "Builder class for MIPS 32r2 [Little-endian]"
2571 + def __init__(self,myspec):
2572 + generic_mipsel.__init__(self,myspec)
2573 + self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
2574 +
2575 +class arch_mips32r2el_softfloat(generic_mipsel):
2576 + "Builder class for MIPS 32r2 [Little-endian softfloat]"
2577 + def __init__(self,myspec):
2578 + generic_mipsel.__init__(self,myspec)
2579 + self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
2580 + self.settings["CHOST"]="mipsel-softfloat-linux-gnu"
2581 +
2582 +class arch_mipsel3(generic_mipsel):
2583 + "Builder class for MIPS III [Little-endian]"
2584 + def __init__(self,myspec):
2585 + generic_mipsel.__init__(self,myspec)
2586 + self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
2587 +
2588 +class arch_mipsel3_n32(generic_mips64el):
2589 + "Builder class for MIPS III [Little-endian N32]"
2590 + def __init__(self,myspec):
2591 + generic_mips64el.__init__(self,myspec)
2592 + self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=n32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
2593 +
2594 +class arch_mipsel3_n64(generic_mips64el):
2595 + "Builder class for MIPS III [Little-endian N64]"
2596 + def __init__(self,myspec):
2597 + generic_mips64el.__init__(self,myspec)
2598 + self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=64 -Wa,-mfix-loongson2f-nop -pipe"
2599 +
2600 +class arch_mipsel3_multilib(generic_mips64el):
2601 + "Builder class for MIPS III [Little-endian multilib]"
2602 + def __init__(self,myspec):
2603 + generic_mips64el.__init__(self,myspec)
2604 + self.settings["CFLAGS"]="-O2 -march=mips3 -mplt -Wa,-mfix-loongson2f-nop -pipe"
2605 +
2606 +class arch_loongson2e(generic_mipsel):
2607 + "Builder class for Loongson 2E [Little-endian]"
2608 + def __init__(self,myspec):
2609 + generic_mipsel.__init__(self,myspec)
2610 + self.settings["CFLAGS"]="-O2 -march=loongson2e -mabi=32 -mplt -pipe"
2611 +
2612 +class arch_loongson2e_n32(generic_mips64el):
2613 + "Builder class for Loongson 2E [Little-endian N32]"
2614 + def __init__(self,myspec):
2615 + generic_mips64el.__init__(self,myspec)
2616 + self.settings["CFLAGS"]="-O2 -march=loongson2e -mabi=n32 -mplt -pipe"
2617 +
2618 +class arch_loongson2e_n64(generic_mips64el):
2619 + "Builder class for Loongson 2E [Little-endian N64]"
2620 + def __init__(self,myspec):
2621 + generic_mips64el.__init__(self,myspec)
2622 + self.settings["CFLAGS"]="-O2 -march=loongson2e -mabi=64 -pipe"
2623 +
2624 +class arch_loongson2e_multilib(generic_mips64el):
2625 + "Builder class for Loongson 2E [Little-endian multilib]"
2626 + def __init__(self,myspec):
2627 + generic_mips64el.__init__(self,myspec)
2628 + self.settings["CFLAGS"]="-O2 -march=loongson2e -mplt -pipe"
2629 +
2630 +class arch_loongson2f(generic_mipsel):
2631 + "Builder class for Loongson 2F [Little-endian]"
2632 + def __init__(self,myspec):
2633 + generic_mipsel.__init__(self,myspec)
2634 + self.settings["CFLAGS"]="-O2 -march=loongson2f -mabi=32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
2635 +
2636 +class arch_loongson2f_n32(generic_mips64el):
2637 + "Builder class for Loongson 2F [Little-endian N32]"
2638 + def __init__(self,myspec):
2639 + generic_mips64el.__init__(self,myspec)
2640 + self.settings["CFLAGS"]="-O2 -march=loongson2f -mabi=n32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
2641 +
2642 +class arch_loongson2f_n64(generic_mips64el):
2643 + "Builder class for Loongson 2F [Little-endian N64]"
2644 + def __init__(self,myspec):
2645 + generic_mips64el.__init__(self,myspec)
2646 + self.settings["CFLAGS"]="-O2 -march=loongson2f -mabi=64 -Wa,-mfix-loongson2f-nop -pipe"
2647 +
2648 +class arch_loongson2f_multilib(generic_mips64el):
2649 + "Builder class for Loongson 2F [Little-endian multilib]"
2650 + def __init__(self,myspec):
2651 + generic_mips64el.__init__(self,myspec)
2652 + self.settings["CFLAGS"]="-O2 -march=loongson2f -mplt -Wa,-mfix-loongson2f-nop -pipe"
2653 +
2654 +class arch_mipsel4(generic_mipsel):
2655 + "Builder class for MIPS IV [Little-endian]"
2656 + def __init__(self,myspec):
2657 + generic_mipsel.__init__(self,myspec)
2658 + self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=32 -mplt -pipe"
2659 +
2660 +class arch_mipsel4_n32(generic_mips64el):
2661 + "Builder class for MIPS IV [Little-endian N32]"
2662 + def __init__(self,myspec):
2663 + generic_mips64el.__init__(self,myspec)
2664 + self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=n32 -mplt -pipe"
2665 +
2666 +class arch_mipsel4_n64(generic_mips64el):
2667 + "Builder class for MIPS IV [Little-endian N64]"
2668 + def __init__(self,myspec):
2669 + generic_mips64el.__init__(self,myspec)
2670 + self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=64 -pipe"
2671 +
2672 +class arch_mipsel4_multilib(generic_mips64el):
2673 + "Builder class for MIPS IV [Little-endian multilib]"
2674 + def __init__(self,myspec):
2675 + generic_mips64el.__init__(self,myspec)
2676 + self.settings["CFLAGS"]="-O2 -march=mips4 -mplt -pipe"
2677 +
2678 +class arch_mips64el(generic_mipsel):
2679 + "Builder class for MIPS 64 [Little-endian]"
2680 + def __init__(self,myspec):
2681 + generic_mipsel.__init__(self,myspec)
2682 + self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=32 -mplt -pipe"
2683 +
2684 +class arch_mips64el_n32(generic_mips64el):
2685 + "Builder class for MIPS 64 [Little-endian N32]"
2686 + def __init__(self,myspec):
2687 + generic_mips64el.__init__(self,myspec)
2688 + self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=n32 -mplt -pipe"
2689 +
2690 +class arch_mips64el_n64(generic_mips64el):
2691 + "Builder class for MIPS 64 [Little-endian N64]"
2692 + def __init__(self,myspec):
2693 + generic_mips64el.__init__(self,myspec)
2694 + self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=64 -pipe"
2695 +
2696 +class arch_mips64el_multilib(generic_mips64el):
2697 + "Builder class for MIPS 64 [Little-endian multilib]"
2698 + def __init__(self,myspec):
2699 + generic_mips64el.__init__(self,myspec)
2700 + self.settings["CFLAGS"]="-O2 -march=mips64 -mplt -pipe"
2701 +
2702 +class arch_mips64r2el(generic_mipsel):
2703 + "Builder class for MIPS 64r2 [Little-endian]"
2704 + def __init__(self,myspec):
2705 + generic_mipsel.__init__(self,myspec)
2706 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=32 -mplt -pipe"
2707 +
2708 +class arch_mips64r2el_n32(generic_mips64el):
2709 + "Builder class for MIPS 64r2 [Little-endian N32]"
2710 + def __init__(self,myspec):
2711 + generic_mips64el.__init__(self,myspec)
2712 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=n32 -mplt -pipe"
2713 +
2714 +class arch_mips64r2el_n64(generic_mips64el):
2715 + "Builder class for MIPS 64r2 [Little-endian N64]"
2716 + def __init__(self,myspec):
2717 + generic_mips64el.__init__(self,myspec)
2718 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=64 -pipe"
2719 +
2720 +class arch_mips64r2el_multilib(generic_mips64el):
2721 + "Builder class for MIPS 64r2 [Little-endian multilib]"
2722 + def __init__(self,myspec):
2723 + generic_mips64el.__init__(self,myspec)
2724 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mplt -pipe"
2725 +
2726 +class arch_loongson3a(generic_mipsel):
2727 + "Builder class for Loongson 3A [Little-endian]"
2728 + def __init__(self,myspec):
2729 + generic_mipsel.__init__(self,myspec)
2730 + self.settings["CFLAGS"]="-O2 -march=loongson3a -mabi=32 -mplt -pipe"
2731 +
2732 +class arch_loongson3a_n32(generic_mips64el):
2733 + "Builder class for Loongson 3A [Little-endian N32]"
2734 + def __init__(self,myspec):
2735 + generic_mips64el.__init__(self,myspec)
2736 + self.settings["CFLAGS"]="-O2 -march=loongson3a -mabi=n32 -mplt -pipe"
2737 +
2738 +class arch_loongson3a_n64(generic_mips64el):
2739 + "Builder class for Loongson 3A [Little-endian N64]"
2740 + def __init__(self,myspec):
2741 + generic_mips64el.__init__(self,myspec)
2742 + self.settings["CFLAGS"]="-O2 -march=loongson3a -mabi=64 -pipe"
2743 +
2744 +class arch_loongson3a_multilib(generic_mips64el):
2745 + "Builder class for Loongson 3A [Little-endian multilib]"
2746 + def __init__(self,myspec):
2747 + generic_mips64el.__init__(self,myspec)
2748 + self.settings["CFLAGS"]="-O2 -march=loongson3a -mplt -pipe"
2749 +
2750 +class arch_cobalt(generic_mipsel):
2751 + "Builder class for cobalt [Little-endian]"
2752 + def __init__(self,myspec):
2753 + generic_mipsel.__init__(self,myspec)
2754 + self.settings["CFLAGS"]="-O2 -march=r5000 -mabi=32 -mplt -pipe"
2755 + self.settings["HOSTUSE"]=["cobalt"]
2756 +
2757 +class arch_cobalt_n32(generic_mips64el):
2758 + "Builder class for cobalt [Little-endian N32]"
2759 + def __init__(self,myspec):
2760 + generic_mips64el.__init__(self,myspec)
2761 + self.settings["CFLAGS"]="-O2 -march=r5000 -mabi=n32 -mplt -pipe"
2762 + self.settings["HOSTUSE"]=["cobalt"]
2763 +
2764 +def register():
2765 + "Inform main catalyst program of the contents of this plugin."
2766 + return ({
2767 + "cobalt" : arch_cobalt,
2768 + "cobalt_n32" : arch_cobalt_n32,
2769 + "mips" : arch_mips1,
2770 + "mips1" : arch_mips1,
2771 + "mips32" : arch_mips32,
2772 + "mips32_softfloat" : arch_mips32_softfloat,
2773 + "mips32r2" : arch_mips32r2,
2774 + "mips32r2_softfloat" : arch_mips32r2_softfloat,
2775 + "mips3" : arch_mips3,
2776 + "mips3_n32" : arch_mips3_n32,
2777 + "mips3_n64" : arch_mips3_n64,
2778 + "mips3_multilib" : arch_mips3_multilib,
2779 + "mips4" : arch_mips4,
2780 + "mips4_n32" : arch_mips4_n32,
2781 + "mips4_n64" : arch_mips4_n64,
2782 + "mips4_multilib" : arch_mips4_multilib,
2783 + "mips4_r10k" : arch_mips4_r10k,
2784 + "mips4_r10k_n32" : arch_mips4_r10k_n32,
2785 + "mips4_r10k_n64" : arch_mips4_r10k_n64,
2786 + "mips4_r10k_multilib" : arch_mips4_r10k_multilib,
2787 + "mips64" : arch_mips64,
2788 + "mips64_n32" : arch_mips64_n32,
2789 + "mips64_n64" : arch_mips64_n64,
2790 + "mips64_multilib" : arch_mips64_multilib,
2791 + "mips64r2" : arch_mips64r2,
2792 + "mips64r2_n32" : arch_mips64r2_n32,
2793 + "mips64r2_n64" : arch_mips64r2_n64,
2794 + "mips64r2_multilib" : arch_mips64r2_multilib,
2795 + "mipsel" : arch_mipsel1,
2796 + "mipsel1" : arch_mipsel1,
2797 + "mips32el" : arch_mips32el,
2798 + "mips32el_softfloat" : arch_mips32el_softfloat,
2799 + "mips32r2el" : arch_mips32r2el,
2800 + "mips32r2el_softfloat" : arch_mips32r2el_softfloat,
2801 + "mipsel3" : arch_mipsel3,
2802 + "mipsel3_n32" : arch_mipsel3_n32,
2803 + "mipsel3_n64" : arch_mipsel3_n64,
2804 + "mipsel3_multilib" : arch_mipsel3_multilib,
2805 + "mipsel4" : arch_mipsel4,
2806 + "mipsel4_n32" : arch_mipsel4_n32,
2807 + "mipsel4_n64" : arch_mipsel4_n64,
2808 + "mipsel4_multilib" : arch_mipsel4_multilib,
2809 + "mips64el" : arch_mips64el,
2810 + "mips64el_n32" : arch_mips64el_n32,
2811 + "mips64el_n64" : arch_mips64el_n64,
2812 + "mips64el_multilib" : arch_mips64el_multilib,
2813 + "mips64r2el" : arch_mips64r2el,
2814 + "mips64r2el_n32" : arch_mips64r2el_n32,
2815 + "mips64r2el_n64" : arch_mips64r2el_n64,
2816 + "mips64r2el_multilib" : arch_mips64r2el_multilib,
2817 + "loongson2e" : arch_loongson2e,
2818 + "loongson2e_n32" : arch_loongson2e_n32,
2819 + "loongson2e_n64" : arch_loongson2e_n64,
2820 + "loongson2e_multilib" : arch_loongson2e_multilib,
2821 + "loongson2f" : arch_loongson2f,
2822 + "loongson2f_n32" : arch_loongson2f_n32,
2823 + "loongson2f_n64" : arch_loongson2f_n64,
2824 + "loongson2f_multilib" : arch_loongson2f_multilib,
2825 + "loongson3a" : arch_loongson3a,
2826 + "loongson3a_n32" : arch_loongson3a_n32,
2827 + "loongson3a_n64" : arch_loongson3a_n64,
2828 + "loongson3a_multilib" : arch_loongson3a_multilib,
2829 + }, ("mips","mips64"))
2830 diff --git a/catalyst/arch/powerpc.py b/catalyst/arch/powerpc.py
2831 new file mode 100644
2832 index 0000000..e9f611b
2833 --- /dev/null
2834 +++ b/catalyst/arch/powerpc.py
2835 @@ -0,0 +1,124 @@
2836 +
2837 +import os,builder
2838 +from catalyst_support import *
2839 +
2840 +class generic_ppc(builder.generic):
2841 + "abstract base class for all 32-bit powerpc builders"
2842 + def __init__(self,myspec):
2843 + builder.generic.__init__(self,myspec)
2844 + self.settings["CHOST"]="powerpc-unknown-linux-gnu"
2845 + if self.settings["buildarch"]=="ppc64":
2846 + if not os.path.exists("/bin/linux32") and not os.path.exists("/usr/bin/linux32"):
2847 + raise CatalystError,"required executable linux32 not found (\"emerge setarch\" to fix.)"
2848 + self.settings["CHROOT"]="linux32 chroot"
2849 + self.settings["crosscompile"] = False;
2850 + else:
2851 + self.settings["CHROOT"]="chroot"
2852 +
2853 +class generic_ppc64(builder.generic):
2854 + "abstract base class for all 64-bit powerpc builders"
2855 + def __init__(self,myspec):
2856 + builder.generic.__init__(self,myspec)
2857 + self.settings["CHROOT"]="chroot"
2858 +
2859 +class arch_ppc(generic_ppc):
2860 + "builder class for generic powerpc"
2861 + def __init__(self,myspec):
2862 + generic_ppc.__init__(self,myspec)
2863 + self.settings["CFLAGS"]="-O2 -mcpu=powerpc -mtune=powerpc -pipe"
2864 +
2865 +class arch_ppc64(generic_ppc64):
2866 + "builder class for generic ppc64"
2867 + def __init__(self,myspec):
2868 + generic_ppc64.__init__(self,myspec)
2869 + self.settings["CFLAGS"]="-O2 -pipe"
2870 + self.settings["CHOST"]="powerpc64-unknown-linux-gnu"
2871 +
2872 +class arch_970(arch_ppc64):
2873 + "builder class for 970 aka G5 under ppc64"
2874 + def __init__(self,myspec):
2875 + arch_ppc64.__init__(self,myspec)
2876 + self.settings["CFLAGS"]="-O2 -pipe -mcpu=970 -mtune=970"
2877 + self.settings["HOSTUSE"]=["altivec"]
2878 +
2879 +class arch_cell(arch_ppc64):
2880 + "builder class for cell under ppc64"
2881 + def __init__(self,myspec):
2882 + arch_ppc64.__init__(self,myspec)
2883 + self.settings["CFLAGS"]="-O2 -pipe -mcpu=cell -mtune=cell"
2884 + self.settings["HOSTUSE"]=["altivec","ibm"]
2885 +
2886 +class arch_g3(generic_ppc):
2887 + def __init__(self,myspec):
2888 + generic_ppc.__init__(self,myspec)
2889 + self.settings["CFLAGS"]="-O2 -mcpu=G3 -mtune=G3 -pipe"
2890 +
2891 +class arch_g4(generic_ppc):
2892 + def __init__(self,myspec):
2893 + generic_ppc.__init__(self,myspec)
2894 + self.settings["CFLAGS"]="-O2 -mcpu=G4 -mtune=G4 -maltivec -mabi=altivec -pipe"
2895 + self.settings["HOSTUSE"]=["altivec"]
2896 +
2897 +class arch_g5(generic_ppc):
2898 + def __init__(self,myspec):
2899 + generic_ppc.__init__(self,myspec)
2900 + self.settings["CFLAGS"]="-O2 -mcpu=G5 -mtune=G5 -maltivec -mabi=altivec -pipe"
2901 + self.settings["HOSTUSE"]=["altivec"]
2902 +
2903 +class arch_power(generic_ppc):
2904 + "builder class for generic power"
2905 + def __init__(self,myspec):
2906 + generic_ppc.__init__(self,myspec)
2907 + self.settings["CFLAGS"]="-O2 -mcpu=power -mtune=power -pipe"
2908 +
2909 +class arch_power_ppc(generic_ppc):
2910 + "builder class for generic powerpc/power"
2911 + def __init__(self,myspec):
2912 + generic_ppc.__init__(self,myspec)
2913 + self.settings["CFLAGS"]="-O2 -mcpu=common -mtune=common -pipe"
2914 +
2915 +class arch_power3(arch_ppc64):
2916 + "builder class for power3 under ppc64"
2917 + def __init__(self,myspec):
2918 + arch_ppc64.__init__(self,myspec)
2919 + self.settings["CFLAGS"]="-O2 -pipe -mcpu=power3 -mtune=power3"
2920 + self.settings["HOSTUSE"]=["ibm"]
2921 +
2922 +class arch_power4(arch_ppc64):
2923 + "builder class for power4 under ppc64"
2924 + def __init__(self,myspec):
2925 + arch_ppc64.__init__(self,myspec)
2926 + self.settings["CFLAGS"]="-O2 -pipe -mcpu=power4 -mtune=power4"
2927 + self.settings["HOSTUSE"]=["ibm"]
2928 +
2929 +class arch_power5(arch_ppc64):
2930 + "builder class for power5 under ppc64"
2931 + def __init__(self,myspec):
2932 + arch_ppc64.__init__(self,myspec)
2933 + self.settings["CFLAGS"]="-O2 -pipe -mcpu=power5 -mtune=power5"
2934 + self.settings["HOSTUSE"]=["ibm"]
2935 +
2936 +class arch_power6(arch_ppc64):
2937 + "builder class for power6 under ppc64"
2938 + def __init__(self,myspec):
2939 + arch_ppc64.__init__(self,myspec)
2940 + self.settings["CFLAGS"]="-O2 -pipe -mcpu=power6 -mtune=power6"
2941 + self.settings["HOSTUSE"]=["altivec","ibm"]
2942 +
2943 +def register():
2944 + "Inform main catalyst program of the contents of this plugin."
2945 + return ({
2946 + "970" : arch_970,
2947 + "cell" : arch_cell,
2948 + "g3" : arch_g3,
2949 + "g4" : arch_g4,
2950 + "g5" : arch_g5,
2951 + "power" : arch_power,
2952 + "power-ppc" : arch_power_ppc,
2953 + "power3" : arch_power3,
2954 + "power4" : arch_power4,
2955 + "power5" : arch_power5,
2956 + "power6" : arch_power6,
2957 + "ppc" : arch_ppc,
2958 + "ppc64" : arch_ppc64
2959 + }, ("ppc","ppc64","powerpc","powerpc64"))
2960 diff --git a/catalyst/arch/s390.py b/catalyst/arch/s390.py
2961 new file mode 100644
2962 index 0000000..bf22f66
2963 --- /dev/null
2964 +++ b/catalyst/arch/s390.py
2965 @@ -0,0 +1,33 @@
2966 +
2967 +import builder,os
2968 +from catalyst_support import *
2969 +
2970 +class generic_s390(builder.generic):
2971 + "abstract base class for all s390 builders"
2972 + def __init__(self,myspec):
2973 + builder.generic.__init__(self,myspec)
2974 + self.settings["CHROOT"]="chroot"
2975 +
2976 +class generic_s390x(builder.generic):
2977 + "abstract base class for all s390x builders"
2978 + def __init__(self,myspec):
2979 + builder.generic.__init__(self,myspec)
2980 + self.settings["CHROOT"]="chroot"
2981 +
2982 +class arch_s390(generic_s390):
2983 + "builder class for generic s390"
2984 + def __init__(self,myspec):
2985 + generic_s390.__init__(self,myspec)
2986 + self.settings["CFLAGS"]="-O2 -pipe"
2987 + self.settings["CHOST"]="s390-ibm-linux-gnu"
2988 +
2989 +class arch_s390x(generic_s390x):
2990 + "builder class for generic s390x"
2991 + def __init__(self,myspec):
2992 + generic_s390x.__init__(self,myspec)
2993 + self.settings["CFLAGS"]="-O2 -pipe"
2994 + self.settings["CHOST"]="s390x-ibm-linux-gnu"
2995 +
2996 +def register():
2997 + "Inform main catalyst program of the contents of this plugin."
2998 + return ({"s390":arch_s390,"s390x":arch_s390x}, ("s390", "s390x"))
2999 diff --git a/catalyst/arch/sh.py b/catalyst/arch/sh.py
3000 new file mode 100644
3001 index 0000000..2fc9531
3002 --- /dev/null
3003 +++ b/catalyst/arch/sh.py
3004 @@ -0,0 +1,116 @@
3005 +
3006 +import builder,os
3007 +from catalyst_support import *
3008 +
3009 +class generic_sh(builder.generic):
3010 + "Abstract base class for all sh builders [Little-endian]"
3011 + def __init__(self,myspec):
3012 + builder.generic.__init__(self,myspec)
3013 + self.settings["CHROOT"]="chroot"
3014 +
3015 +class generic_sheb(builder.generic):
3016 + "Abstract base class for all sheb builders [Big-endian]"
3017 + def __init__(self,myspec):
3018 + builder.generic.__init__(self,myspec)
3019 + self.settings["CHROOT"]="chroot"
3020 +
3021 +class arch_sh(generic_sh):
3022 + "Builder class for SH [Little-endian]"
3023 + def __init__(self,myspec):
3024 + generic_sh.__init__(self,myspec)
3025 + self.settings["CFLAGS"]="-O2 -pipe"
3026 + self.settings["CHOST"]="sh-unknown-linux-gnu"
3027 +
3028 +class arch_sh2(generic_sh):
3029 + "Builder class for SH-2 [Little-endian]"
3030 + def __init__(self,myspec):
3031 + generic_sh.__init__(self,myspec)
3032 + self.settings["CFLAGS"]="-O2 -m2 -pipe"
3033 + self.settings["CHOST"]="sh2-unknown-linux-gnu"
3034 +
3035 +class arch_sh2a(generic_sh):
3036 + "Builder class for SH-2A [Little-endian]"
3037 + def __init__(self,myspec):
3038 + generic_sh.__init__(self,myspec)
3039 + self.settings["CFLAGS"]="-O2 -m2a -pipe"
3040 + self.settings["CHOST"]="sh2a-unknown-linux-gnu"
3041 +
3042 +class arch_sh3(generic_sh):
3043 + "Builder class for SH-3 [Little-endian]"
3044 + def __init__(self,myspec):
3045 + generic_sh.__init__(self,myspec)
3046 + self.settings["CFLAGS"]="-O2 -m3 -pipe"
3047 + self.settings["CHOST"]="sh3-unknown-linux-gnu"
3048 +
3049 +class arch_sh4(generic_sh):
3050 + "Builder class for SH-4 [Little-endian]"
3051 + def __init__(self,myspec):
3052 + generic_sh.__init__(self,myspec)
3053 + self.settings["CFLAGS"]="-O2 -m4 -pipe"
3054 + self.settings["CHOST"]="sh4-unknown-linux-gnu"
3055 +
3056 +class arch_sh4a(generic_sh):
3057 + "Builder class for SH-4A [Little-endian]"
3058 + def __init__(self,myspec):
3059 + generic_sh.__init__(self,myspec)
3060 + self.settings["CFLAGS"]="-O2 -m4a -pipe"
3061 + self.settings["CHOST"]="sh4a-unknown-linux-gnu"
3062 +
3063 +class arch_sheb(generic_sheb):
3064 + "Builder class for SH [Big-endian]"
3065 + def __init__(self,myspec):
3066 + generic_sheb.__init__(self,myspec)
3067 + self.settings["CFLAGS"]="-O2 -pipe"
3068 + self.settings["CHOST"]="sheb-unknown-linux-gnu"
3069 +
3070 +class arch_sh2eb(generic_sheb):
3071 + "Builder class for SH-2 [Big-endian]"
3072 + def __init__(self,myspec):
3073 + generic_sheb.__init__(self,myspec)
3074 + self.settings["CFLAGS"]="-O2 -m2 -pipe"
3075 + self.settings["CHOST"]="sh2eb-unknown-linux-gnu"
3076 +
3077 +class arch_sh2aeb(generic_sheb):
3078 + "Builder class for SH-2A [Big-endian]"
3079 + def __init__(self,myspec):
3080 + generic_sheb.__init__(self,myspec)
3081 + self.settings["CFLAGS"]="-O2 -m2a -pipe"
3082 + self.settings["CHOST"]="sh2aeb-unknown-linux-gnu"
3083 +
3084 +class arch_sh3eb(generic_sheb):
3085 + "Builder class for SH-3 [Big-endian]"
3086 + def __init__(self,myspec):
3087 + generic_sheb.__init__(self,myspec)
3088 + self.settings["CFLAGS"]="-O2 -m3 -pipe"
3089 + self.settings["CHOST"]="sh3eb-unknown-linux-gnu"
3090 +
3091 +class arch_sh4eb(generic_sheb):
3092 + "Builder class for SH-4 [Big-endian]"
3093 + def __init__(self,myspec):
3094 + generic_sheb.__init__(self,myspec)
3095 + self.settings["CFLAGS"]="-O2 -m4 -pipe"
3096 + self.settings["CHOST"]="sh4eb-unknown-linux-gnu"
3097 +
3098 +class arch_sh4aeb(generic_sheb):
3099 + "Builder class for SH-4A [Big-endian]"
3100 + def __init__(self,myspec):
3101 + generic_sheb.__init__(self,myspec)
3102 + self.settings["CFLAGS"]="-O2 -m4a -pipe"
3103 + self.settings["CHOST"]="sh4aeb-unknown-linux-gnu"
3104 +
3105 +def register():
3106 + "Inform main catalyst program of the contents of this plugin."
3107 + return ({
3108 + "sh" :arch_sh,
3109 + "sh2" :arch_sh2,
3110 + "sh2a" :arch_sh2a,
3111 + "sh3" :arch_sh3,
3112 + "sh4" :arch_sh4,
3113 + "sh4a" :arch_sh4a,
3114 + "sheb" :arch_sheb,
3115 + "sh2eb" :arch_sh2eb,
3116 + "sh2aeb" :arch_sh2aeb,
3117 + "sh3eb" :arch_sh3eb,
3118 + "sh4eb" :arch_sh4eb,
3119 + "sh4aeb" :arch_sh4aeb
3120 + }, ("sh2","sh2a","sh3","sh4","sh4a","sh2eb","sh2aeb","sh3eb","sh4eb","sh4aeb"))
3121 diff --git a/catalyst/arch/sparc.py b/catalyst/arch/sparc.py
3122 new file mode 100644
3123 index 0000000..5eb5344
3124 --- /dev/null
3125 +++ b/catalyst/arch/sparc.py
3126 @@ -0,0 +1,42 @@
3127 +
3128 +import builder,os
3129 +from catalyst_support import *
3130 +
3131 +class generic_sparc(builder.generic):
3132 + "abstract base class for all sparc builders"
3133 + def __init__(self,myspec):
3134 + builder.generic.__init__(self,myspec)
3135 + if self.settings["buildarch"]=="sparc64":
3136 + if not os.path.exists("/bin/linux32") and not os.path.exists("/usr/bin/linux32"):
3137 + raise CatalystError,"required executable linux32 not found (\"emerge setarch\" to fix.)"
3138 + self.settings["CHROOT"]="linux32 chroot"
3139 + self.settings["crosscompile"] = False;
3140 + else:
3141 + self.settings["CHROOT"]="chroot"
3142 +
3143 +class generic_sparc64(builder.generic):
3144 + "abstract base class for all sparc64 builders"
3145 + def __init__(self,myspec):
3146 + builder.generic.__init__(self,myspec)
3147 + self.settings["CHROOT"]="chroot"
3148 +
3149 +class arch_sparc(generic_sparc):
3150 + "builder class for generic sparc (sun4cdm)"
3151 + def __init__(self,myspec):
3152 + generic_sparc.__init__(self,myspec)
3153 + self.settings["CFLAGS"]="-O2 -pipe"
3154 + self.settings["CHOST"]="sparc-unknown-linux-gnu"
3155 +
3156 +class arch_sparc64(generic_sparc64):
3157 + "builder class for generic sparc64 (sun4u)"
3158 + def __init__(self,myspec):
3159 + generic_sparc64.__init__(self,myspec)
3160 + self.settings["CFLAGS"]="-O2 -mcpu=ultrasparc -pipe"
3161 + self.settings["CHOST"]="sparc-unknown-linux-gnu"
3162 +
3163 +def register():
3164 + "Inform main catalyst program of the contents of this plugin."
3165 + return ({
3166 + "sparc" : arch_sparc,
3167 + "sparc64" : arch_sparc64
3168 + }, ("sparc","sparc64", ))
3169 diff --git a/catalyst/arch/x86.py b/catalyst/arch/x86.py
3170 new file mode 100644
3171 index 0000000..0391b79
3172 --- /dev/null
3173 +++ b/catalyst/arch/x86.py
3174 @@ -0,0 +1,153 @@
3175 +
3176 +import builder,os
3177 +from catalyst_support import *
3178 +
3179 +class generic_x86(builder.generic):
3180 + "abstract base class for all x86 builders"
3181 + def __init__(self,myspec):
3182 + builder.generic.__init__(self,myspec)
3183 + if self.settings["buildarch"]=="amd64":
3184 + if not os.path.exists("/bin/linux32") and not os.path.exists("/usr/bin/linux32"):
3185 + raise CatalystError,"required executable linux32 not found (\"emerge setarch\" to fix.)"
3186 + self.settings["CHROOT"]="linux32 chroot"
3187 + self.settings["crosscompile"] = False;
3188 + else:
3189 + self.settings["CHROOT"]="chroot"
3190 +
3191 +class arch_x86(generic_x86):
3192 + "builder class for generic x86 (386+)"
3193 + def __init__(self,myspec):
3194 + generic_x86.__init__(self,myspec)
3195 + self.settings["CFLAGS"]="-O2 -mtune=i686 -pipe"
3196 + self.settings["CHOST"]="i386-pc-linux-gnu"
3197 +
3198 +class arch_i386(generic_x86):
3199 + "Intel i386 CPU"
3200 + def __init__(self,myspec):
3201 + generic_x86.__init__(self,myspec)
3202 + self.settings["CFLAGS"]="-O2 -march=i386 -pipe"
3203 + self.settings["CHOST"]="i386-pc-linux-gnu"
3204 +
3205 +class arch_i486(generic_x86):
3206 + "Intel i486 CPU"
3207 + def __init__(self,myspec):
3208 + generic_x86.__init__(self,myspec)
3209 + self.settings["CFLAGS"]="-O2 -march=i486 -pipe"
3210 + self.settings["CHOST"]="i486-pc-linux-gnu"
3211 +
3212 +class arch_i586(generic_x86):
3213 + "Intel Pentium CPU"
3214 + def __init__(self,myspec):
3215 + generic_x86.__init__(self,myspec)
3216 + self.settings["CFLAGS"]="-O2 -march=i586 -pipe"
3217 + self.settings["CHOST"]="i586-pc-linux-gnu"
3218 +
3219 +class arch_i686(generic_x86):
3220 + "Intel Pentium Pro CPU"
3221 + def __init__(self,myspec):
3222 + generic_x86.__init__(self,myspec)
3223 + self.settings["CFLAGS"]="-O2 -march=i686 -pipe"
3224 + self.settings["CHOST"]="i686-pc-linux-gnu"
3225 +
3226 +class arch_pentium_mmx(generic_x86):
3227 + "Intel Pentium MMX CPU with MMX support"
3228 + def __init__(self,myspec):
3229 + generic_x86.__init__(self,myspec)
3230 + self.settings["CFLAGS"]="-O2 -march=pentium-mmx -pipe"
3231 + self.settings["HOSTUSE"]=["mmx"]
3232 +
3233 +class arch_pentium2(generic_x86):
3234 + "Intel Pentium 2 CPU with MMX support"
3235 + def __init__(self,myspec):
3236 + generic_x86.__init__(self,myspec)
3237 + self.settings["CFLAGS"]="-O2 -march=pentium2 -pipe"
3238 + self.settings["HOSTUSE"]=["mmx"]
3239 +
3240 +class arch_pentium3(generic_x86):
3241 + "Intel Pentium 3 CPU with MMX and SSE support"
3242 + def __init__(self,myspec):
3243 + generic_x86.__init__(self,myspec)
3244 + self.settings["CFLAGS"]="-O2 -march=pentium3 -pipe"
3245 + self.settings["HOSTUSE"]=["mmx","sse"]
3246 +
3247 +class arch_pentium4(generic_x86):
3248 + "Intel Pentium 4 CPU with MMX, SSE and SSE2 support"
3249 + def __init__(self,myspec):
3250 + generic_x86.__init__(self,myspec)
3251 + self.settings["CFLAGS"]="-O2 -march=pentium4 -pipe"
3252 + self.settings["HOSTUSE"]=["mmx","sse","sse2"]
3253 +
3254 +class arch_pentium_m(generic_x86):
3255 + "Intel Pentium M CPU with MMX, SSE and SSE2 support"
3256 + def __init__(self,myspec):
3257 + generic_x86.__init__(self,myspec)
3258 + self.settings["CFLAGS"]="-O2 -march=pentium-m -pipe"
3259 + self.settings["HOSTUSE"]=["mmx","sse","sse2"]
3260 +
3261 +class arch_prescott(generic_x86):
3262 + "improved version of Intel Pentium 4 CPU with MMX, SSE, SSE2 and SSE3 support"
3263 + def __init__(self,myspec):
3264 + generic_x86.__init__(self,myspec)
3265 + self.settings["CFLAGS"]="-O2 -march=prescott -pipe"
3266 + self.settings["HOSTUSE"]=["mmx","sse","sse2"]
3267 + self.settings["CHOST"]="i686-pc-linux-gnu"
3268 +
3269 +class arch_k6(generic_x86):
3270 + "AMD K6 CPU with MMX support"
3271 + def __init__(self,myspec):
3272 + generic_x86.__init__(self,myspec)
3273 + self.settings["CFLAGS"]="-O2 -march=k6 -pipe"
3274 + self.settings["CHOST"]="i686-pc-linux-gnu"
3275 + self.settings["HOSTUSE"]=["mmx"]
3276 +
3277 +class arch_k6_2(generic_x86):
3278 + "AMD K6-2 CPU with MMX and 3dNOW! support"
3279 + def __init__(self,myspec):
3280 + generic_x86.__init__(self,myspec)
3281 + self.settings["CFLAGS"]="-O2 -march=k6-2 -pipe"
3282 + self.settings["CHOST"]="i686-pc-linux-gnu"
3283 + self.settings["HOSTUSE"]=["mmx","3dnow"]
3284 +
3285 +class arch_athlon(generic_x86):
3286 + "AMD Athlon CPU with MMX, 3dNOW!, enhanced 3dNOW! and SSE prefetch support"
3287 + def __init__(self,myspec):
3288 + generic_x86.__init__(self,myspec)
3289 + self.settings["CFLAGS"]="-O2 -march=athlon -pipe"
3290 + self.settings["CHOST"]="i686-pc-linux-gnu"
3291 + self.settings["HOSTUSE"]=["mmx","3dnow"]
3292 +
3293 +class arch_athlon_xp(generic_x86):
3294 + "improved AMD Athlon CPU with MMX, 3dNOW!, enhanced 3dNOW! and full SSE support"
3295 + def __init__(self,myspec):
3296 + generic_x86.__init__(self,myspec)
3297 + self.settings["CFLAGS"]="-O2 -march=athlon-xp -pipe"
3298 + self.settings["CHOST"]="i686-pc-linux-gnu"
3299 + self.settings["HOSTUSE"]=["mmx","3dnow","sse"]
3300 +
3301 +def register():
3302 + "Inform main catalyst program of the contents of this plugin."
3303 + return ({
3304 + "x86" : arch_x86,
3305 + "i386" : arch_i386,
3306 + "i486" : arch_i486,
3307 + "i586" : arch_i586,
3308 + "i686" : arch_i686,
3309 + "pentium" : arch_i586,
3310 + "pentium2" : arch_pentium2,
3311 + "pentium3" : arch_pentium3,
3312 + "pentium3m" : arch_pentium3,
3313 + "pentium-m" : arch_pentium_m,
3314 + "pentium4" : arch_pentium4,
3315 + "pentium4m" : arch_pentium4,
3316 + "pentiumpro" : arch_i686,
3317 + "pentium-mmx" : arch_pentium_mmx,
3318 + "prescott" : arch_prescott,
3319 + "k6" : arch_k6,
3320 + "k6-2" : arch_k6_2,
3321 + "k6-3" : arch_k6_2,
3322 + "athlon" : arch_athlon,
3323 + "athlon-tbird" : arch_athlon,
3324 + "athlon-4" : arch_athlon_xp,
3325 + "athlon-xp" : arch_athlon_xp,
3326 + "athlon-mp" : arch_athlon_xp
3327 + }, ('i386', 'i486', 'i586', 'i686'))
3328 diff --git a/catalyst/config.py b/catalyst/config.py
3329 new file mode 100644
3330 index 0000000..726bf74
3331 --- /dev/null
3332 +++ b/catalyst/config.py
3333 @@ -0,0 +1,122 @@
3334 +import re
3335 +from modules.catalyst_support import *
3336 +
3337 +class ParserBase:
3338 +
3339 + filename = ""
3340 + lines = None
3341 + values = None
3342 + key_value_separator = "="
3343 + multiple_values = False
3344 + empty_values = True
3345 +
3346 + def __getitem__(self, key):
3347 + return self.values[key]
3348 +
3349 + def get_values(self):
3350 + return self.values
3351 +
3352 + def dump(self):
3353 + dump = ""
3354 + for x in self.values.keys():
3355 + dump += x + " = " + repr(self.values[x]) + "\n"
3356 + return dump
3357 +
3358 + def parse_file(self, filename):
3359 + try:
3360 + myf = open(filename, "r")
3361 + except:
3362 + raise CatalystError, "Could not open file " + filename
3363 + self.lines = myf.readlines()
3364 + myf.close()
3365 + self.filename = filename
3366 + self.parse()
3367 +
3368 + def parse_lines(self, lines):
3369 + self.lines = lines
3370 + self.parse()
3371 +
3372 + def parse(self):
3373 + values = {}
3374 + cur_array = []
3375 +
3376 + trailing_comment=re.compile('\s*#.*$')
3377 + white_space=re.compile('\s+')
3378 +
3379 + for x, myline in enumerate(self.lines):
3380 + myline = myline.strip()
3381 +
3382 + # Force the line to be clean
3383 + # Remove Comments ( anything following # )
3384 + myline = trailing_comment.sub("", myline)
3385 +
3386 + # Skip any blank lines
3387 + if not myline: continue
3388 +
3389 + # Look for separator
3390 + msearch = myline.find(self.key_value_separator)
3391 +
3392 + # If separator found assume its a new key
3393 + if msearch != -1:
3394 + # Split on the first occurence of the separator creating two strings in the array mobjs
3395 + mobjs = myline.split(self.key_value_separator, 1)
3396 + mobjs[1] = mobjs[1].strip().strip('"')
3397 +
3398 +# # Check that this key doesn't exist already in the spec
3399 +# if mobjs[0] in values:
3400 +# raise Exception("You have a duplicate key (" + mobjs[0] + ") in your spec. Please fix it")
3401 +
3402 + # Start a new array using the first element of mobjs
3403 + cur_array = [mobjs[0]]
3404 + if mobjs[1]:
3405 + if self.multiple_values:
3406 + # split on white space creating additional array elements
3407 +# subarray = white_space.split(mobjs[1])
3408 + subarray = mobjs[1].split()
3409 + cur_array += subarray
3410 + else:
3411 + cur_array += [mobjs[1]]
3412 +
3413 + # Else add on to the last key we were working on
3414 + else:
3415 + if self.multiple_values:
3416 +# mobjs = white_space.split(myline)
3417 +# cur_array += mobjs
3418 + cur_array += myline.split()
3419 + else:
3420 + raise CatalystError, "Syntax error: " + x
3421 +
3422 + # XXX: Do we really still need this "single value is a string" behavior?
3423 + if len(cur_array) == 2:
3424 + values[cur_array[0]] = cur_array[1]
3425 + else:
3426 + values[cur_array[0]] = cur_array[1:]
3427 +
3428 + if not self.empty_values:
3429 + for x in values.keys():
3430 + # Delete empty key pairs
3431 + if not values[x]:
3432 + print "\n\tWARNING: No value set for key " + x + "...deleting"
3433 + del values[x]
3434 +
3435 + self.values = values
3436 +
3437 +class SpecParser(ParserBase):
3438 +
3439 + key_value_separator = ':'
3440 + multiple_values = True
3441 + empty_values = False
3442 +
3443 + def __init__(self, filename=""):
3444 + if filename:
3445 + self.parse_file(filename)
3446 +
3447 +class ConfigParser(ParserBase):
3448 +
3449 + key_value_separator = '='
3450 + multiple_values = False
3451 + empty_values = True
3452 +
3453 + def __init__(self, filename=""):
3454 + if filename:
3455 + self.parse_file(filename)
3456 diff --git a/catalyst/main.py b/catalyst/main.py
3457 new file mode 100644
3458 index 0000000..d972b97
3459 --- /dev/null
3460 +++ b/catalyst/main.py
3461 @@ -0,0 +1,435 @@
3462 +#!/usr/bin/python2 -OO
3463 +
3464 +# Maintained in full by:
3465 +# Catalyst Team <catalyst@g.o>
3466 +# Release Engineering Team <releng@g.o>
3467 +# Andrew Gaffney <agaffney@g.o>
3468 +# Chris Gianelloni <wolf31o2@××××××××.org>
3469 +# $Id$
3470 +
3471 +import os
3472 +import sys
3473 +import imp
3474 +import string
3475 +import getopt
3476 +import pdb
3477 +import os.path
3478 +
3479 +__selfpath__ = os.path.abspath(os.path.dirname(__file__))
3480 +
3481 +sys.path.append(__selfpath__ + "/modules")
3482 +
3483 +import catalyst.config
3484 +import catalyst.util
3485 +from catalyst.modules.catalyst_support import (required_build_targets,
3486 + valid_build_targets, CatalystError, hash_map, find_binary, LockInUse)
3487 +
3488 +__maintainer__="Catalyst <catalyst@g.o>"
3489 +__version__="2.0.15"
3490 +
3491 +conf_values={}
3492 +
3493 +def usage():
3494 + print """Usage catalyst [options] [-C variable=value...] [ -s identifier]
3495 + -a --clear-autoresume clear autoresume flags
3496 + -c --config use specified configuration file
3497 + -C --cli catalyst commandline (MUST BE LAST OPTION)
3498 + -d --debug enable debugging
3499 + -f --file read specfile
3500 + -F --fetchonly fetch files only
3501 + -h --help print this help message
3502 + -p --purge clear tmp dirs,package cache, autoresume flags
3503 + -P --purgeonly clear tmp dirs,package cache, autoresume flags and exit
3504 + -T --purgetmponly clear tmp dirs and autoresume flags and exit
3505 + -s --snapshot generate a release snapshot
3506 + -V --version display version information
3507 + -v --verbose verbose output
3508 +
3509 +Usage examples:
3510 +
3511 +Using the commandline option (-C, --cli) to build a Portage snapshot:
3512 +catalyst -C target=snapshot version_stamp=my_date
3513 +
3514 +Using the snapshot option (-s, --snapshot) to build a release snapshot:
3515 +catalyst -s 20071121"
3516 +
3517 +Using the specfile option (-f, --file) to build a stage target:
3518 +catalyst -f stage1-specfile.spec
3519 +"""
3520 +
3521 +
3522 +def version():
3523 + print "Catalyst, version "+__version__
3524 + print "Copyright 2003-2008 Gentoo Foundation"
3525 + print "Copyright 2008-2012 various authors"
3526 + print "Distributed under the GNU General Public License version 2.1\n"
3527 +
3528 +def parse_config(myconfig):
3529 + # search a couple of different areas for the main config file
3530 + myconf={}
3531 + config_file=""
3532 +
3533 + confdefaults={
3534 + "distdir": "/usr/portage/distfiles",
3535 + "hash_function": "crc32",
3536 + "packagedir": "/usr/portage/packages",
3537 + "portdir": "/usr/portage",
3538 + "port_tmpdir": "/var/tmp/portage",
3539 + "repo_name": "portage",
3540 + "sharedir": "/usr/lib/catalyst",
3541 + "snapshot_name": "portage-",
3542 + "snapshot_cache": "/var/tmp/catalyst/snapshot_cache",
3543 + "storedir": "/var/tmp/catalyst",
3544 + }
3545 +
3546 + # first, try the one passed (presumably from the cmdline)
3547 + if myconfig:
3548 + if os.path.exists(myconfig):
3549 + print "Using command line specified Catalyst configuration file, "+\
3550 + myconfig
3551 + config_file=myconfig
3552 +
3553 + else:
3554 + print "!!! catalyst: Could not use specified configuration file "+\
3555 + myconfig
3556 + sys.exit(1)
3557 +
3558 + # next, try the default location
3559 + elif os.path.exists("/etc/catalyst/catalyst.conf"):
3560 + print "Using default Catalyst configuration file," + \
3561 + " /etc/catalyst/catalyst.conf"
3562 + config_file="/etc/catalyst/catalyst.conf"
3563 +
3564 + # can't find a config file (we are screwed), so bail out
3565 + else:
3566 + print "!!! catalyst: Could not find a suitable configuration file"
3567 + sys.exit(1)
3568 +
3569 + # now, try and parse the config file "config_file"
3570 + try:
3571 +# execfile(config_file, myconf, myconf)
3572 + myconfig = catalyst.config.ConfigParser(config_file)
3573 + myconf.update(myconfig.get_values())
3574 +
3575 + except:
3576 + print "!!! catalyst: Unable to parse configuration file, "+myconfig
3577 + sys.exit(1)
3578 +
3579 + # now, load up the values into conf_values so that we can use them
3580 + for x in confdefaults.keys():
3581 + if x in myconf:
3582 + print "Setting",x,"to config file value \""+myconf[x]+"\""
3583 + conf_values[x]=myconf[x]
3584 + else:
3585 + print "Setting",x,"to default value \""+confdefaults[x]+"\""
3586 + conf_values[x]=confdefaults[x]
3587 +
3588 + # add our python base directory to use for loading target arch's
3589 + conf_values["PythonDir"] = __selfpath__
3590 +
3591 + # parse out the rest of the options from the config file
3592 + if "autoresume" in string.split(conf_values["options"]):
3593 + print "Autoresuming support enabled."
3594 + conf_values["AUTORESUME"]="1"
3595 +
3596 + if "bindist" in string.split(conf_values["options"]):
3597 + print "Binary redistribution enabled"
3598 + conf_values["BINDIST"]="1"
3599 + else:
3600 + print "Bindist is not enabled in catalyst.conf"
3601 + print "Binary redistribution of generated stages/isos may be prohibited by law."
3602 + print "Please see the use description for bindist on any package you are including."
3603 +
3604 + if "ccache" in string.split(conf_values["options"]):
3605 + print "Compiler cache support enabled."
3606 + conf_values["CCACHE"]="1"
3607 +
3608 + if "clear-autoresume" in string.split(conf_values["options"]):
3609 + print "Cleaning autoresume flags support enabled."
3610 + conf_values["CLEAR_AUTORESUME"]="1"
3611 +
3612 + if "distcc" in string.split(conf_values["options"]):
3613 + print "Distcc support enabled."
3614 + conf_values["DISTCC"]="1"
3615 +
3616 + if "icecream" in string.split(conf_values["options"]):
3617 + print "Icecream compiler cluster support enabled."
3618 + conf_values["ICECREAM"]="1"
3619 +
3620 + if "kerncache" in string.split(conf_values["options"]):
3621 + print "Kernel cache support enabled."
3622 + conf_values["KERNCACHE"]="1"
3623 +
3624 + if "pkgcache" in string.split(conf_values["options"]):
3625 + print "Package cache support enabled."
3626 + conf_values["PKGCACHE"]="1"
3627 +
3628 + if "preserve_libs" in string.split(conf_values["options"]):
3629 + print "Preserving libs during unmerge."
3630 + conf_values["PRESERVE_LIBS"]="1"
3631 +
3632 + if "purge" in string.split(conf_values["options"]):
3633 + print "Purge support enabled."
3634 + conf_values["PURGE"]="1"
3635 +
3636 + if "seedcache" in string.split(conf_values["options"]):
3637 + print "Seed cache support enabled."
3638 + conf_values["SEEDCACHE"]="1"
3639 +
3640 + if "snapcache" in string.split(conf_values["options"]):
3641 + print "Snapshot cache support enabled."
3642 + conf_values["SNAPCACHE"]="1"
3643 +
3644 + if "digests" in myconf:
3645 + conf_values["digests"]=myconf["digests"]
3646 + if "contents" in myconf:
3647 + conf_values["contents"]=myconf["contents"]
3648 +
3649 + if "envscript" in myconf:
3650 + print "Envscript support enabled."
3651 + conf_values["ENVSCRIPT"]=myconf["envscript"]
3652 +
3653 + if "var_tmpfs_portage" in myconf:
3654 + conf_values["var_tmpfs_portage"]=myconf["var_tmpfs_portage"];
3655 +
3656 + if "port_logdir" in myconf:
3657 + conf_values["port_logdir"]=myconf["port_logdir"];
3658 +
3659 +def import_modules():
3660 + # import catalyst's own modules
3661 + # (i.e. catalyst_support and the arch modules)
3662 + targetmap={}
3663 +
3664 + try:
3665 + module_dir = __selfpath__ + "/modules/"
3666 + for x in required_build_targets:
3667 + try:
3668 + fh=open(module_dir + x + ".py")
3669 + module=imp.load_module(x, fh,"modules/" + x + ".py",
3670 + (".py", "r", imp.PY_SOURCE))
3671 + fh.close()
3672 +
3673 + except IOError:
3674 + raise CatalystError, "Can't find " + x + ".py plugin in " + \
3675 + module_dir
3676 + for x in valid_build_targets:
3677 + try:
3678 + fh=open(module_dir + x + ".py")
3679 + module=imp.load_module(x, fh, "modules/" + x + ".py",
3680 + (".py", "r", imp.PY_SOURCE))
3681 + module.register(targetmap)
3682 + fh.close()
3683 +
3684 + except IOError:
3685 + raise CatalystError,"Can't find " + x + ".py plugin in " + \
3686 + module_dir
3687 +
3688 + except ImportError:
3689 + print "!!! catalyst: Python modules not found in "+\
3690 + module_dir + "; exiting."
3691 + sys.exit(1)
3692 +
3693 + return targetmap
3694 +
3695 +def build_target(addlargs, targetmap):
3696 + try:
3697 + if addlargs["target"] not in targetmap:
3698 + raise CatalystError, \
3699 + "Target \"%s\" not available." % addlargs["target"]
3700 +
3701 + mytarget=targetmap[addlargs["target"]](conf_values, addlargs)
3702 +
3703 + mytarget.run()
3704 +
3705 + except:
3706 + catalyst.util.print_traceback()
3707 + print "!!! catalyst: Error encountered during run of target " + \
3708 + addlargs["target"]
3709 + sys.exit(1)
3710 +
3711 +def main():
3712 + targetmap={}
3713 +
3714 + version()
3715 + if os.getuid() != 0:
3716 + # catalyst cannot be run as a normal user due to chroots, mounts, etc
3717 + print "!!! catalyst: This script requires root privileges to operate"
3718 + sys.exit(2)
3719 +
3720 + # we need some options in order to work correctly
3721 + if len(sys.argv) < 2:
3722 + usage()
3723 + sys.exit(2)
3724 +
3725 + # parse out the command line arguments
3726 + try:
3727 + opts,args = getopt.getopt(sys.argv[1:], "apPThvdc:C:f:FVs:",
3728 + ["purge", "purgeonly", "purgetmponly", "help", "version", "debug",
3729 + "clear-autoresume", "config=", "cli=", "file=", "fetch",
3730 + "verbose","snapshot="
3731 + ]
3732 + )
3733 +
3734 + except getopt.GetoptError:
3735 + usage()
3736 + sys.exit(2)
3737 +
3738 + # defaults for commandline opts
3739 + debug=False
3740 + verbose=False
3741 + fetch=False
3742 + myconfig=""
3743 + myspecfile=""
3744 + mycmdline=[]
3745 + myopts=[]
3746 +
3747 + # check preconditions
3748 + if len(opts) == 0:
3749 + print "!!! catalyst: please specify one of either -f or -C\n"
3750 + usage()
3751 + sys.exit(2)
3752 +
3753 + run = False
3754 + for o, a in opts:
3755 + if o in ("-h", "--help"):
3756 + usage()
3757 + sys.exit(1)
3758 +
3759 + if o in ("-V", "--version"):
3760 + print "Catalyst version "+__version__
3761 + sys.exit(1)
3762 +
3763 + if o in ("-d", "--debug"):
3764 + conf_values["DEBUG"]="1"
3765 + conf_values["VERBOSE"]="1"
3766 +
3767 + if o in ("-c", "--config"):
3768 + myconfig=a
3769 +
3770 + if o in ("-C", "--cli"):
3771 + run = True
3772 + x=sys.argv.index(o)+1
3773 + while x < len(sys.argv):
3774 + mycmdline.append(sys.argv[x])
3775 + x=x+1
3776 +
3777 + if o in ("-f", "--file"):
3778 + run = True
3779 + myspecfile=a
3780 +
3781 + if o in ("-F", "--fetchonly"):
3782 + conf_values["FETCH"]="1"
3783 +
3784 + if o in ("-v", "--verbose"):
3785 + conf_values["VERBOSE"]="1"
3786 +
3787 + if o in ("-s", "--snapshot"):
3788 + if len(sys.argv) < 3:
3789 + print "!!! catalyst: missing snapshot identifier\n"
3790 + usage()
3791 + sys.exit(2)
3792 + else:
3793 + run = True
3794 + mycmdline.append("target=snapshot")
3795 + mycmdline.append("version_stamp="+a)
3796 +
3797 + if o in ("-p", "--purge"):
3798 + conf_values["PURGE"] = "1"
3799 +
3800 + if o in ("-P", "--purgeonly"):
3801 + conf_values["PURGEONLY"] = "1"
3802 +
3803 + if o in ("-T", "--purgetmponly"):
3804 + conf_values["PURGETMPONLY"] = "1"
3805 +
3806 + if o in ("-a", "--clear-autoresume"):
3807 + conf_values["CLEAR_AUTORESUME"] = "1"
3808 +
3809 + if not run:
3810 + print "!!! catalyst: please specify one of either -f or -C\n"
3811 + usage()
3812 + sys.exit(2)
3813 +
3814 + # import configuration file and import our main module using those settings
3815 + parse_config(myconfig)
3816 +
3817 + # Start checking that digests are valid now that the hash_map was imported
3818 + # from catalyst_support
3819 + if "digests" in conf_values:
3820 + for i in conf_values["digests"].split():
3821 + if i not in hash_map:
3822 + print
3823 + print i+" is not a valid digest entry"
3824 + print "Valid digest entries:"
3825 + print hash_map.keys()
3826 + print
3827 + print "Catalyst aborting...."
3828 + sys.exit(2)
3829 + if find_binary(hash_map[i][1]) == None:
3830 + print
3831 + print "digest="+i
3832 + print "\tThe "+hash_map[i][1]+\
3833 + " binary was not found. It needs to be in your system path"
3834 + print
3835 + print "Catalyst aborting...."
3836 + sys.exit(2)
3837 + if "hash_function" in conf_values:
3838 + if conf_values["hash_function"] not in hash_map:
3839 + print
3840 + print conf_values["hash_function"]+\
3841 + " is not a valid hash_function entry"
3842 + print "Valid hash_function entries:"
3843 + print hash_map.keys()
3844 + print
3845 + print "Catalyst aborting...."
3846 + sys.exit(2)
3847 + if find_binary(hash_map[conf_values["hash_function"]][1]) == None:
3848 + print
3849 + print "hash_function="+conf_values["hash_function"]
3850 + print "\tThe "+hash_map[conf_values["hash_function"]][1]+\
3851 + " binary was not found. It needs to be in your system path"
3852 + print
3853 + print "Catalyst aborting...."
3854 + sys.exit(2)
3855 +
3856 + # import the rest of the catalyst modules
3857 + targetmap=import_modules()
3858 +
3859 + addlargs={}
3860 +
3861 + if myspecfile:
3862 + spec = catalyst.config.SpecParser(myspecfile)
3863 + addlargs.update(spec.get_values())
3864 +
3865 + if mycmdline:
3866 + try:
3867 + cmdline = catalyst.config.ConfigParser()
3868 + cmdline.parse_lines(mycmdline)
3869 + addlargs.update(cmdline.get_values())
3870 + except CatalystError:
3871 + print "!!! catalyst: Could not parse commandline, exiting."
3872 + sys.exit(1)
3873 +
3874 + if "target" not in addlargs:
3875 + raise CatalystError, "Required value \"target\" not specified."
3876 +
3877 + # everything is setup, so the build is a go
3878 + try:
3879 + build_target(addlargs, targetmap)
3880 +
3881 + except CatalystError:
3882 + print
3883 + print "Catalyst aborting...."
3884 + sys.exit(2)
3885 + except KeyboardInterrupt:
3886 + print "\nCatalyst build aborted due to user interrupt ( Ctrl-C )"
3887 + print
3888 + print "Catalyst aborting...."
3889 + sys.exit(2)
3890 + except LockInUse:
3891 + print "Catalyst aborting...."
3892 + sys.exit(2)
3893 + except:
3894 + print "Catalyst aborting...."
3895 + raise
3896 + sys.exit(2)
3897 diff --git a/catalyst/modules/__init__.py b/catalyst/modules/__init__.py
3898 new file mode 100644
3899 index 0000000..8b13789
3900 --- /dev/null
3901 +++ b/catalyst/modules/__init__.py
3902 @@ -0,0 +1 @@
3903 +
3904 diff --git a/catalyst/modules/builder.py b/catalyst/modules/builder.py
3905 new file mode 100644
3906 index 0000000..ad27d78
3907 --- /dev/null
3908 +++ b/catalyst/modules/builder.py
3909 @@ -0,0 +1,20 @@
3910 +
3911 +class generic:
3912 + def __init__(self,myspec):
3913 + self.settings=myspec
3914 +
3915 + def mount_safety_check(self):
3916 + """
3917 + Make sure that no bind mounts exist in chrootdir (to use before
3918 + cleaning the directory, to make sure we don't wipe the contents of
3919 + a bind mount
3920 + """
3921 + pass
3922 +
3923 + def mount_all(self):
3924 + """do all bind mounts"""
3925 + pass
3926 +
3927 + def umount_all(self):
3928 + """unmount all bind mounts"""
3929 + pass
3930 diff --git a/catalyst/modules/catalyst_lock.py b/catalyst/modules/catalyst_lock.py
3931 new file mode 100644
3932 index 0000000..5311cf8
3933 --- /dev/null
3934 +++ b/catalyst/modules/catalyst_lock.py
3935 @@ -0,0 +1,468 @@
3936 +#!/usr/bin/python
3937 +import os
3938 +import fcntl
3939 +import errno
3940 +import sys
3941 +import string
3942 +import time
3943 +from catalyst_support import *
3944 +
3945 +def writemsg(mystr):
3946 + sys.stderr.write(mystr)
3947 + sys.stderr.flush()
3948 +
3949 +class LockDir:
3950 + locking_method=fcntl.flock
3951 + lock_dirs_in_use=[]
3952 + die_on_failed_lock=True
3953 + def __del__(self):
3954 + self.clean_my_hardlocks()
3955 + self.delete_lock_from_path_list()
3956 + if self.islocked():
3957 + self.fcntl_unlock()
3958 +
3959 + def __init__(self,lockdir):
3960 + self.locked=False
3961 + self.myfd=None
3962 + self.set_gid(250)
3963 + self.locking_method=LockDir.locking_method
3964 + self.set_lockdir(lockdir)
3965 + self.set_lockfilename(".catalyst_lock")
3966 + self.set_lockfile()
3967 +
3968 + if LockDir.lock_dirs_in_use.count(lockdir)>0:
3969 + raise "This directory already associated with a lock object"
3970 + else:
3971 + LockDir.lock_dirs_in_use.append(lockdir)
3972 +
3973 + self.hardlock_paths={}
3974 +
3975 + def delete_lock_from_path_list(self):
3976 + i=0
3977 + try:
3978 + if LockDir.lock_dirs_in_use:
3979 + for x in LockDir.lock_dirs_in_use:
3980 + if LockDir.lock_dirs_in_use[i] == self.lockdir:
3981 + del LockDir.lock_dirs_in_use[i]
3982 + break
3983 + i=i+1
3984 + except AttributeError:
3985 + pass
3986 +
3987 + def islocked(self):
3988 + if self.locked:
3989 + return True
3990 + else:
3991 + return False
3992 +
3993 + def set_gid(self,gid):
3994 + if not self.islocked():
3995 +# if "DEBUG" in self.settings:
3996 +# print "setting gid to", gid
3997 + self.gid=gid
3998 +
3999 + def set_lockdir(self,lockdir):
4000 + if not os.path.exists(lockdir):
4001 + os.makedirs(lockdir)
4002 + if os.path.isdir(lockdir):
4003 + if not self.islocked():
4004 + if lockdir[-1] == "/":
4005 + lockdir=lockdir[:-1]
4006 + self.lockdir=normpath(lockdir)
4007 +# if "DEBUG" in self.settings:
4008 +# print "setting lockdir to", self.lockdir
4009 + else:
4010 + raise "the lock object needs a path to a dir"
4011 +
4012 + def set_lockfilename(self,lockfilename):
4013 + if not self.islocked():
4014 + self.lockfilename=lockfilename
4015 +# if "DEBUG" in self.settings:
4016 +# print "setting lockfilename to", self.lockfilename
4017 +
4018 + def set_lockfile(self):
4019 + if not self.islocked():
4020 + self.lockfile=normpath(self.lockdir+'/'+self.lockfilename)
4021 +# if "DEBUG" in self.settings:
4022 +# print "setting lockfile to", self.lockfile
4023 +
4024 + def read_lock(self):
4025 + if not self.locking_method == "HARDLOCK":
4026 + self.fcntl_lock("read")
4027 + else:
4028 + print "HARDLOCKING doesnt support shared-read locks"
4029 + print "using exclusive write locks"
4030 + self.hard_lock()
4031 +
4032 + def write_lock(self):
4033 + if not self.locking_method == "HARDLOCK":
4034 + self.fcntl_lock("write")
4035 + else:
4036 + self.hard_lock()
4037 +
4038 + def unlock(self):
4039 + if not self.locking_method == "HARDLOCK":
4040 + self.fcntl_unlock()
4041 + else:
4042 + self.hard_unlock()
4043 +
4044 + def fcntl_lock(self,locktype):
4045 + if self.myfd==None:
4046 + if not os.path.exists(os.path.dirname(self.lockdir)):
4047 + raise DirectoryNotFound, os.path.dirname(self.lockdir)
4048 + if not os.path.exists(self.lockfile):
4049 + old_mask=os.umask(000)
4050 + self.myfd = os.open(self.lockfile, os.O_CREAT|os.O_RDWR,0660)
4051 + try:
4052 + if os.stat(self.lockfile).st_gid != self.gid:
4053 + os.chown(self.lockfile,os.getuid(),self.gid)
4054 + except SystemExit, e:
4055 + raise
4056 + except OSError, e:
4057 + if e[0] == 2: #XXX: No such file or directory
4058 + return self.fcntl_locking(locktype)
4059 + else:
4060 + writemsg("Cannot chown a lockfile. This could cause inconvenience later.\n")
4061 +
4062 + os.umask(old_mask)
4063 + else:
4064 + self.myfd = os.open(self.lockfile, os.O_CREAT|os.O_RDWR,0660)
4065 +
4066 + try:
4067 + if locktype == "read":
4068 + self.locking_method(self.myfd,fcntl.LOCK_SH|fcntl.LOCK_NB)
4069 + else:
4070 + self.locking_method(self.myfd,fcntl.LOCK_EX|fcntl.LOCK_NB)
4071 + except IOError, e:
4072 + if "errno" not in dir(e):
4073 + raise
4074 + if e.errno == errno.EAGAIN:
4075 + if not LockDir.die_on_failed_lock:
4076 + # Resource temp unavailable; eg, someone beat us to the lock.
4077 + writemsg("waiting for lock on %s\n" % self.lockfile)
4078 +
4079 + # Try for the exclusive or shared lock again.
4080 + if locktype == "read":
4081 + self.locking_method(self.myfd,fcntl.LOCK_SH)
4082 + else:
4083 + self.locking_method(self.myfd,fcntl.LOCK_EX)
4084 + else:
4085 + raise LockInUse,self.lockfile
4086 + elif e.errno == errno.ENOLCK:
4087 + pass
4088 + else:
4089 + raise
4090 + if not os.path.exists(self.lockfile):
4091 + os.close(self.myfd)
4092 + self.myfd=None
4093 + #writemsg("lockfile recurse\n")
4094 + self.fcntl_lock(locktype)
4095 + else:
4096 + self.locked=True
4097 + #writemsg("Lockfile obtained\n")
4098 +
4099 + def fcntl_unlock(self):
4100 + import fcntl
4101 + unlinkfile = 1
4102 + if not os.path.exists(self.lockfile):
4103 + print "lockfile does not exist '%s'" % self.lockfile
4104 + if (self.myfd != None):
4105 + try:
4106 + os.close(myfd)
4107 + self.myfd=None
4108 + except:
4109 + pass
4110 + return False
4111 +
4112 + try:
4113 + if self.myfd == None:
4114 + self.myfd = os.open(self.lockfile, os.O_WRONLY,0660)
4115 + unlinkfile = 1
4116 + self.locking_method(self.myfd,fcntl.LOCK_UN)
4117 + except SystemExit, e:
4118 + raise
4119 + except Exception, e:
4120 + os.close(self.myfd)
4121 + self.myfd=None
4122 + raise IOError, "Failed to unlock file '%s'\n" % self.lockfile
4123 + try:
4124 + # This sleep call was added to allow other processes that are
4125 + # waiting for a lock to be able to grab it before it is deleted.
4126 + # lockfile() already accounts for this situation, however, and
4127 + # the sleep here adds more time than is saved overall, so am
4128 + # commenting until it is proved necessary.
4129 + #time.sleep(0.0001)
4130 + if unlinkfile:
4131 + InUse=False
4132 + try:
4133 + self.locking_method(self.myfd,fcntl.LOCK_EX|fcntl.LOCK_NB)
4134 + except:
4135 + print "Read lock may be in effect. skipping lockfile delete..."
4136 + InUse=True
4137 + # We won the lock, so there isn't competition for it.
4138 + # We can safely delete the file.
4139 + #writemsg("Got the lockfile...\n")
4140 + #writemsg("Unlinking...\n")
4141 + self.locking_method(self.myfd,fcntl.LOCK_UN)
4142 + if not InUse:
4143 + os.unlink(self.lockfile)
4144 + os.close(self.myfd)
4145 + self.myfd=None
4146 +# if "DEBUG" in self.settings:
4147 +# print "Unlinked lockfile..."
4148 + except SystemExit, e:
4149 + raise
4150 + except Exception, e:
4151 + # We really don't care... Someone else has the lock.
4152 + # So it is their problem now.
4153 + print "Failed to get lock... someone took it."
4154 + print str(e)
4155 +
4156 + # Why test lockfilename? Because we may have been handed an
4157 + # fd originally, and the caller might not like having their
4158 + # open fd closed automatically on them.
4159 + #if type(lockfilename) == types.StringType:
4160 + # os.close(myfd)
4161 +
4162 + if (self.myfd != None):
4163 + os.close(self.myfd)
4164 + self.myfd=None
4165 + self.locked=False
4166 + time.sleep(.0001)
4167 +
4168 + def hard_lock(self,max_wait=14400):
4169 + """Does the NFS, hardlink shuffle to ensure locking on the disk.
4170 + We create a PRIVATE lockfile, that is just a placeholder on the disk.
4171 + Then we HARDLINK the real lockfile to that private file.
4172 + If our file can 2 references, then we have the lock. :)
4173 + Otherwise we lather, rise, and repeat.
4174 + We default to a 4 hour timeout.
4175 + """
4176 +
4177 + self.myhardlock = self.hardlock_name(self.lockdir)
4178 +
4179 + start_time = time.time()
4180 + reported_waiting = False
4181 +
4182 + while(time.time() < (start_time + max_wait)):
4183 + # We only need it to exist.
4184 + self.myfd = os.open(self.myhardlock, os.O_CREAT|os.O_RDWR,0660)
4185 + os.close(self.myfd)
4186 +
4187 + self.add_hardlock_file_to_cleanup()
4188 + if not os.path.exists(self.myhardlock):
4189 + raise FileNotFound, "Created lockfile is missing: %(filename)s" % {"filename":self.myhardlock}
4190 + try:
4191 + res = os.link(self.myhardlock, self.lockfile)
4192 + except SystemExit, e:
4193 + raise
4194 + except Exception, e:
4195 +# if "DEBUG" in self.settings:
4196 +# print "lockfile(): Hardlink: Link failed."
4197 +# print "Exception: ",e
4198 + pass
4199 +
4200 + if self.hardlink_is_mine(self.myhardlock, self.lockfile):
4201 + # We have the lock.
4202 + if reported_waiting:
4203 + print
4204 + return True
4205 +
4206 + if reported_waiting:
4207 + writemsg(".")
4208 + else:
4209 + reported_waiting = True
4210 + print
4211 + print "Waiting on (hardlink) lockfile: (one '.' per 3 seconds)"
4212 + print "Lockfile: " + self.lockfile
4213 + time.sleep(3)
4214 +
4215 + os.unlink(self.myhardlock)
4216 + return False
4217 +
4218 + def hard_unlock(self):
4219 + try:
4220 + if os.path.exists(self.myhardlock):
4221 + os.unlink(self.myhardlock)
4222 + if os.path.exists(self.lockfile):
4223 + os.unlink(self.lockfile)
4224 + except SystemExit, e:
4225 + raise
4226 + except:
4227 + writemsg("Something strange happened to our hardlink locks.\n")
4228 +
4229 + def add_hardlock_file_to_cleanup(self):
4230 + #mypath = self.normpath(path)
4231 + if os.path.isdir(self.lockdir) and os.path.isfile(self.myhardlock):
4232 + self.hardlock_paths[self.lockdir]=self.myhardlock
4233 +
4234 + def remove_hardlock_file_from_cleanup(self):
4235 + if self.lockdir in self.hardlock_paths:
4236 + del self.hardlock_paths[self.lockdir]
4237 + print self.hardlock_paths
4238 +
4239 + def hardlock_name(self, path):
4240 + mypath=path+"/.hardlock-"+os.uname()[1]+"-"+str(os.getpid())
4241 + newpath = os.path.normpath(mypath)
4242 + if len(newpath) > 1:
4243 + if newpath[1] == "/":
4244 + newpath = "/"+newpath.lstrip("/")
4245 + return newpath
4246 +
4247 + def hardlink_is_mine(self,link,lock):
4248 + import stat
4249 + try:
4250 + myhls = os.stat(link)
4251 + mylfs = os.stat(lock)
4252 + except SystemExit, e:
4253 + raise
4254 + except:
4255 + myhls = None
4256 + mylfs = None
4257 +
4258 + if myhls:
4259 + if myhls[stat.ST_NLINK] == 2:
4260 + return True
4261 + if mylfs:
4262 + if mylfs[stat.ST_INO] == myhls[stat.ST_INO]:
4263 + return True
4264 + return False
4265 +
4266 + def hardlink_active(lock):
4267 + if not os.path.exists(lock):
4268 + return False
4269 +
4270 + def clean_my_hardlocks(self):
4271 + try:
4272 + for x in self.hardlock_paths.keys():
4273 + self.hardlock_cleanup(x)
4274 + except AttributeError:
4275 + pass
4276 +
4277 + def hardlock_cleanup(self,path):
4278 + mypid = str(os.getpid())
4279 + myhost = os.uname()[1]
4280 + mydl = os.listdir(path)
4281 + results = []
4282 + mycount = 0
4283 +
4284 + mylist = {}
4285 + for x in mydl:
4286 + filepath=path+"/"+x
4287 + if os.path.isfile(filepath):
4288 + parts = filepath.split(".hardlock-")
4289 + if len(parts) == 2:
4290 + filename = parts[0]
4291 + hostpid = parts[1].split("-")
4292 + host = "-".join(hostpid[:-1])
4293 + pid = hostpid[-1]
4294 + if filename not in mylist:
4295 + mylist[filename] = {}
4296 +
4297 + if host not in mylist[filename]:
4298 + mylist[filename][host] = []
4299 + mylist[filename][host].append(pid)
4300 + mycount += 1
4301 + else:
4302 + mylist[filename][host].append(pid)
4303 + mycount += 1
4304 +
4305 +
4306 + results.append("Found %(count)s locks" % {"count":mycount})
4307 + for x in mylist.keys():
4308 + if myhost in mylist[x]:
4309 + mylockname = self.hardlock_name(x)
4310 + if self.hardlink_is_mine(mylockname, self.lockfile) or \
4311 + not os.path.exists(self.lockfile):
4312 + for y in mylist[x].keys():
4313 + for z in mylist[x][y]:
4314 + filename = x+".hardlock-"+y+"-"+z
4315 + if filename == mylockname:
4316 + self.hard_unlock()
4317 + continue
4318 + try:
4319 + # We're sweeping through, unlinking everyone's locks.
4320 + os.unlink(filename)
4321 + results.append("Unlinked: " + filename)
4322 + except SystemExit, e:
4323 + raise
4324 + except Exception,e:
4325 + pass
4326 + try:
4327 + os.unlink(x)
4328 + results.append("Unlinked: " + x)
4329 + os.unlink(mylockname)
4330 + results.append("Unlinked: " + mylockname)
4331 + except SystemExit, e:
4332 + raise
4333 + except Exception,e:
4334 + pass
4335 + else:
4336 + try:
4337 + os.unlink(mylockname)
4338 + results.append("Unlinked: " + mylockname)
4339 + except SystemExit, e:
4340 + raise
4341 + except Exception,e:
4342 + pass
4343 + return results
4344 +
4345 +if __name__ == "__main__":
4346 +
4347 + def lock_work():
4348 + print
4349 + for i in range(1,6):
4350 + print i,time.time()
4351 + time.sleep(1)
4352 + print
4353 + def normpath(mypath):
4354 + newpath = os.path.normpath(mypath)
4355 + if len(newpath) > 1:
4356 + if newpath[1] == "/":
4357 + newpath = "/"+newpath.lstrip("/")
4358 + return newpath
4359 +
4360 + print "Lock 5 starting"
4361 + import time
4362 + Lock1=LockDir("/tmp/lock_path")
4363 + Lock1.write_lock()
4364 + print "Lock1 write lock"
4365 +
4366 + lock_work()
4367 +
4368 + Lock1.unlock()
4369 + print "Lock1 unlock"
4370 +
4371 + Lock1.read_lock()
4372 + print "Lock1 read lock"
4373 +
4374 + lock_work()
4375 +
4376 + Lock1.unlock()
4377 + print "Lock1 unlock"
4378 +
4379 + Lock1.read_lock()
4380 + print "Lock1 read lock"
4381 +
4382 + Lock1.write_lock()
4383 + print "Lock1 write lock"
4384 +
4385 + lock_work()
4386 +
4387 + Lock1.unlock()
4388 + print "Lock1 unlock"
4389 +
4390 + Lock1.read_lock()
4391 + print "Lock1 read lock"
4392 +
4393 + lock_work()
4394 +
4395 + Lock1.unlock()
4396 + print "Lock1 unlock"
4397 +
4398 +#Lock1.write_lock()
4399 +#time.sleep(2)
4400 +#Lock1.unlock()
4401 + ##Lock1.write_lock()
4402 + #time.sleep(2)
4403 + #Lock1.unlock()
4404 diff --git a/catalyst/modules/catalyst_support.py b/catalyst/modules/catalyst_support.py
4405 new file mode 100644
4406 index 0000000..316dfa3
4407 --- /dev/null
4408 +++ b/catalyst/modules/catalyst_support.py
4409 @@ -0,0 +1,718 @@
4410 +
4411 +import sys,string,os,types,re,signal,traceback,time
4412 +#import md5,sha
4413 +selinux_capable = False
4414 +#userpriv_capable = (os.getuid() == 0)
4415 +#fakeroot_capable = False
4416 +BASH_BINARY = "/bin/bash"
4417 +
4418 +try:
4419 + import resource
4420 + max_fd_limit=resource.getrlimit(RLIMIT_NOFILE)
4421 +except SystemExit, e:
4422 + raise
4423 +except:
4424 + # hokay, no resource module.
4425 + max_fd_limit=256
4426 +
4427 +# pids this process knows of.
4428 +spawned_pids = []
4429 +
4430 +try:
4431 + import urllib
4432 +except SystemExit, e:
4433 + raise
4434 +
4435 +def cleanup(pids,block_exceptions=True):
4436 + """function to go through and reap the list of pids passed to it"""
4437 + global spawned_pids
4438 + if type(pids) == int:
4439 + pids = [pids]
4440 + for x in pids:
4441 + try:
4442 + os.kill(x,signal.SIGTERM)
4443 + if os.waitpid(x,os.WNOHANG)[1] == 0:
4444 + # feisty bugger, still alive.
4445 + os.kill(x,signal.SIGKILL)
4446 + os.waitpid(x,0)
4447 +
4448 + except OSError, oe:
4449 + if block_exceptions:
4450 + pass
4451 + if oe.errno not in (10,3):
4452 + raise oe
4453 + except SystemExit:
4454 + raise
4455 + except Exception:
4456 + if block_exceptions:
4457 + pass
4458 + try: spawned_pids.remove(x)
4459 + except IndexError: pass
4460 +
4461 +
4462 +
4463 +# a function to turn a string of non-printable characters into a string of
4464 +# hex characters
4465 +def hexify(str):
4466 + hexStr = string.hexdigits
4467 + r = ''
4468 + for ch in str:
4469 + i = ord(ch)
4470 + r = r + hexStr[(i >> 4) & 0xF] + hexStr[i & 0xF]
4471 + return r
4472 +# hexify()
4473 +
4474 +def generate_contents(file,contents_function="auto",verbose=False):
4475 + try:
4476 + _ = contents_function
4477 + if _ == 'auto' and file.endswith('.iso'):
4478 + _ = 'isoinfo-l'
4479 + if (_ in ['tar-tv','auto']):
4480 + if file.endswith('.tgz') or file.endswith('.tar.gz'):
4481 + _ = 'tar-tvz'
4482 + elif file.endswith('.tbz2') or file.endswith('.tar.bz2'):
4483 + _ = 'tar-tvj'
4484 + elif file.endswith('.tar'):
4485 + _ = 'tar-tv'
4486 +
4487 + if _ == 'auto':
4488 + warn('File %r has unknown type for automatic detection.' % (file, ))
4489 + return None
4490 + else:
4491 + contents_function = _
4492 + _ = contents_map[contents_function]
4493 + return _[0](file,_[1],verbose)
4494 + except:
4495 + raise CatalystError,\
4496 + "Error generating contents, is appropriate utility (%s) installed on your system?" \
4497 + % (contents_function, )
4498 +
4499 +def calc_contents(file,cmd,verbose):
4500 + args={ 'file': file }
4501 + cmd=cmd % dict(args)
4502 + a=os.popen(cmd)
4503 + mylines=a.readlines()
4504 + a.close()
4505 + result="".join(mylines)
4506 + if verbose:
4507 + print result
4508 + return result
4509 +
4510 +# This has map must be defined after the function calc_content
4511 +# It is possible to call different functions from this but they must be defined
4512 +# before hash_map
4513 +# Key,function,cmd
4514 +contents_map={
4515 + # 'find' is disabled because it requires the source path, which is not
4516 + # always available
4517 + #"find" :[calc_contents,"find %(path)s"],
4518 + "tar-tv":[calc_contents,"tar tvf %(file)s"],
4519 + "tar-tvz":[calc_contents,"tar tvzf %(file)s"],
4520 + "tar-tvj":[calc_contents,"tar -I lbzip2 -tvf %(file)s"],
4521 + "isoinfo-l":[calc_contents,"isoinfo -l -i %(file)s"],
4522 + # isoinfo-f should be a last resort only
4523 + "isoinfo-f":[calc_contents,"isoinfo -f -i %(file)s"],
4524 +}
4525 +
4526 +def generate_hash(file,hash_function="crc32",verbose=False):
4527 + try:
4528 + return hash_map[hash_function][0](file,hash_map[hash_function][1],hash_map[hash_function][2],\
4529 + hash_map[hash_function][3],verbose)
4530 + except:
4531 + raise CatalystError,"Error generating hash, is appropriate utility installed on your system?"
4532 +
4533 +def calc_hash(file,cmd,cmd_args,id_string="MD5",verbose=False):
4534 + a=os.popen(cmd+" "+cmd_args+" "+file)
4535 + mylines=a.readlines()
4536 + a.close()
4537 + mylines=mylines[0].split()
4538 + result=mylines[0]
4539 + if verbose:
4540 + print id_string+" (%s) = %s" % (file, result)
4541 + return result
4542 +
4543 +def calc_hash2(file,cmd,cmd_args,id_string="MD5",verbose=False):
4544 + a=os.popen(cmd+" "+cmd_args+" "+file)
4545 + header=a.readline()
4546 + mylines=a.readline().split()
4547 + hash=mylines[0]
4548 + short_file=os.path.split(mylines[1])[1]
4549 + a.close()
4550 + result=header+hash+" "+short_file+"\n"
4551 + if verbose:
4552 + print header+" (%s) = %s" % (short_file, result)
4553 + return result
4554 +
4555 +# This has map must be defined after the function calc_hash
4556 +# It is possible to call different functions from this but they must be defined
4557 +# before hash_map
4558 +# Key,function,cmd,cmd_args,Print string
4559 +hash_map={
4560 + "adler32":[calc_hash2,"shash","-a ADLER32","ADLER32"],\
4561 + "crc32":[calc_hash2,"shash","-a CRC32","CRC32"],\
4562 + "crc32b":[calc_hash2,"shash","-a CRC32B","CRC32B"],\
4563 + "gost":[calc_hash2,"shash","-a GOST","GOST"],\
4564 + "haval128":[calc_hash2,"shash","-a HAVAL128","HAVAL128"],\
4565 + "haval160":[calc_hash2,"shash","-a HAVAL160","HAVAL160"],\
4566 + "haval192":[calc_hash2,"shash","-a HAVAL192","HAVAL192"],\
4567 + "haval224":[calc_hash2,"shash","-a HAVAL224","HAVAL224"],\
4568 + "haval256":[calc_hash2,"shash","-a HAVAL256","HAVAL256"],\
4569 + "md2":[calc_hash2,"shash","-a MD2","MD2"],\
4570 + "md4":[calc_hash2,"shash","-a MD4","MD4"],\
4571 + "md5":[calc_hash2,"shash","-a MD5","MD5"],\
4572 + "ripemd128":[calc_hash2,"shash","-a RIPEMD128","RIPEMD128"],\
4573 + "ripemd160":[calc_hash2,"shash","-a RIPEMD160","RIPEMD160"],\
4574 + "ripemd256":[calc_hash2,"shash","-a RIPEMD256","RIPEMD256"],\
4575 + "ripemd320":[calc_hash2,"shash","-a RIPEMD320","RIPEMD320"],\
4576 + "sha1":[calc_hash2,"shash","-a SHA1","SHA1"],\
4577 + "sha224":[calc_hash2,"shash","-a SHA224","SHA224"],\
4578 + "sha256":[calc_hash2,"shash","-a SHA256","SHA256"],\
4579 + "sha384":[calc_hash2,"shash","-a SHA384","SHA384"],\
4580 + "sha512":[calc_hash2,"shash","-a SHA512","SHA512"],\
4581 + "snefru128":[calc_hash2,"shash","-a SNEFRU128","SNEFRU128"],\
4582 + "snefru256":[calc_hash2,"shash","-a SNEFRU256","SNEFRU256"],\
4583 + "tiger":[calc_hash2,"shash","-a TIGER","TIGER"],\
4584 + "tiger128":[calc_hash2,"shash","-a TIGER128","TIGER128"],\
4585 + "tiger160":[calc_hash2,"shash","-a TIGER160","TIGER160"],\
4586 + "whirlpool":[calc_hash2,"shash","-a WHIRLPOOL","WHIRLPOOL"],\
4587 + }
4588 +
4589 +def read_from_clst(file):
4590 + line = ''
4591 + myline = ''
4592 + try:
4593 + myf=open(file,"r")
4594 + except:
4595 + return -1
4596 + #raise CatalystError, "Could not open file "+file
4597 + for line in myf.readlines():
4598 + #line = string.replace(line, "\n", "") # drop newline
4599 + myline = myline + line
4600 + myf.close()
4601 + return myline
4602 +# read_from_clst
4603 +
4604 +# these should never be touched
4605 +required_build_targets=["generic_target","generic_stage_target"]
4606 +
4607 +# new build types should be added here
4608 +valid_build_targets=["stage1_target","stage2_target","stage3_target","stage4_target","grp_target",
4609 + "livecd_stage1_target","livecd_stage2_target","embedded_target",
4610 + "tinderbox_target","snapshot_target","netboot_target","netboot2_target"]
4611 +
4612 +required_config_file_values=["storedir","sharedir","distdir","portdir"]
4613 +valid_config_file_values=required_config_file_values[:]
4614 +valid_config_file_values.append("PKGCACHE")
4615 +valid_config_file_values.append("KERNCACHE")
4616 +valid_config_file_values.append("CCACHE")
4617 +valid_config_file_values.append("DISTCC")
4618 +valid_config_file_values.append("ICECREAM")
4619 +valid_config_file_values.append("ENVSCRIPT")
4620 +valid_config_file_values.append("AUTORESUME")
4621 +valid_config_file_values.append("FETCH")
4622 +valid_config_file_values.append("CLEAR_AUTORESUME")
4623 +valid_config_file_values.append("options")
4624 +valid_config_file_values.append("DEBUG")
4625 +valid_config_file_values.append("VERBOSE")
4626 +valid_config_file_values.append("PURGE")
4627 +valid_config_file_values.append("PURGEONLY")
4628 +valid_config_file_values.append("SNAPCACHE")
4629 +valid_config_file_values.append("snapshot_cache")
4630 +valid_config_file_values.append("hash_function")
4631 +valid_config_file_values.append("digests")
4632 +valid_config_file_values.append("contents")
4633 +valid_config_file_values.append("SEEDCACHE")
4634 +
4635 +verbosity=1
4636 +
4637 +def list_bashify(mylist):
4638 + if type(mylist)==types.StringType:
4639 + mypack=[mylist]
4640 + else:
4641 + mypack=mylist[:]
4642 + for x in range(0,len(mypack)):
4643 + # surround args with quotes for passing to bash,
4644 + # allows things like "<" to remain intact
4645 + mypack[x]="'"+mypack[x]+"'"
4646 + mypack=string.join(mypack)
4647 + return mypack
4648 +
4649 +def list_to_string(mylist):
4650 + if type(mylist)==types.StringType:
4651 + mypack=[mylist]
4652 + else:
4653 + mypack=mylist[:]
4654 + for x in range(0,len(mypack)):
4655 + # surround args with quotes for passing to bash,
4656 + # allows things like "<" to remain intact
4657 + mypack[x]=mypack[x]
4658 + mypack=string.join(mypack)
4659 + return mypack
4660 +
4661 +class CatalystError(Exception):
4662 + def __init__(self, message):
4663 + if message:
4664 + (type,value)=sys.exc_info()[:2]
4665 + if value!=None:
4666 + print
4667 + print traceback.print_exc(file=sys.stdout)
4668 + print
4669 + print "!!! catalyst: "+message
4670 + print
4671 +
4672 +class LockInUse(Exception):
4673 + def __init__(self, message):
4674 + if message:
4675 + #(type,value)=sys.exc_info()[:2]
4676 + #if value!=None:
4677 + #print
4678 + #kprint traceback.print_exc(file=sys.stdout)
4679 + print
4680 + print "!!! catalyst lock file in use: "+message
4681 + print
4682 +
4683 +def die(msg=None):
4684 + warn(msg)
4685 + sys.exit(1)
4686 +
4687 +def warn(msg):
4688 + print "!!! catalyst: "+msg
4689 +
4690 +def find_binary(myc):
4691 + """look through the environmental path for an executable file named whatever myc is"""
4692 + # this sucks. badly.
4693 + p=os.getenv("PATH")
4694 + if p == None:
4695 + return None
4696 + for x in p.split(":"):
4697 + #if it exists, and is executable
4698 + if os.path.exists("%s/%s" % (x,myc)) and os.stat("%s/%s" % (x,myc))[0] & 0x0248:
4699 + return "%s/%s" % (x,myc)
4700 + return None
4701 +
4702 +def spawn_bash(mycommand,env={},debug=False,opt_name=None,**keywords):
4703 + """spawn mycommand as an arguement to bash"""
4704 + args=[BASH_BINARY]
4705 + if not opt_name:
4706 + opt_name=mycommand.split()[0]
4707 + if "BASH_ENV" not in env:
4708 + env["BASH_ENV"] = "/etc/spork/is/not/valid/profile.env"
4709 + if debug:
4710 + args.append("-x")
4711 + args.append("-c")
4712 + args.append(mycommand)
4713 + return spawn(args,env=env,opt_name=opt_name,**keywords)
4714 +
4715 +#def spawn_get_output(mycommand,spawn_type=spawn,raw_exit_code=False,emulate_gso=True, \
4716 +# collect_fds=[1],fd_pipes=None,**keywords):
4717 +
4718 +def spawn_get_output(mycommand,raw_exit_code=False,emulate_gso=True, \
4719 + collect_fds=[1],fd_pipes=None,**keywords):
4720 + """call spawn, collecting the output to fd's specified in collect_fds list
4721 + emulate_gso is a compatability hack to emulate commands.getstatusoutput's return, minus the
4722 + requirement it always be a bash call (spawn_type controls the actual spawn call), and minus the
4723 + 'lets let log only stdin and let stderr slide by'.
4724 +
4725 + emulate_gso was deprecated from the day it was added, so convert your code over.
4726 + spawn_type is the passed in function to call- typically spawn_bash, spawn, spawn_sandbox, or spawn_fakeroot"""
4727 + global selinux_capable
4728 + pr,pw=os.pipe()
4729 +
4730 + #if type(spawn_type) not in [types.FunctionType, types.MethodType]:
4731 + # s="spawn_type must be passed a function, not",type(spawn_type),spawn_type
4732 + # raise Exception,s
4733 +
4734 + if fd_pipes==None:
4735 + fd_pipes={}
4736 + fd_pipes[0] = 0
4737 +
4738 + for x in collect_fds:
4739 + fd_pipes[x] = pw
4740 + keywords["returnpid"]=True
4741 +
4742 + mypid=spawn_bash(mycommand,fd_pipes=fd_pipes,**keywords)
4743 + os.close(pw)
4744 + if type(mypid) != types.ListType:
4745 + os.close(pr)
4746 + return [mypid, "%s: No such file or directory" % mycommand.split()[0]]
4747 +
4748 + fd=os.fdopen(pr,"r")
4749 + mydata=fd.readlines()
4750 + fd.close()
4751 + if emulate_gso:
4752 + mydata=string.join(mydata)
4753 + if len(mydata) and mydata[-1] == "\n":
4754 + mydata=mydata[:-1]
4755 + retval=os.waitpid(mypid[0],0)[1]
4756 + cleanup(mypid)
4757 + if raw_exit_code:
4758 + return [retval,mydata]
4759 + retval=process_exit_code(retval)
4760 + return [retval, mydata]
4761 +
4762 +# base spawn function
4763 +def spawn(mycommand,env={},raw_exit_code=False,opt_name=None,fd_pipes=None,returnpid=False,\
4764 + uid=None,gid=None,groups=None,umask=None,logfile=None,path_lookup=True,\
4765 + selinux_context=None, raise_signals=False, func_call=False):
4766 + """base fork/execve function.
4767 + mycommand is the desired command- if you need a command to execute in a bash/sandbox/fakeroot
4768 + environment, use the appropriate spawn call. This is a straight fork/exec code path.
4769 + Can either have a tuple, or a string passed in. If uid/gid/groups/umask specified, it changes
4770 + the forked process to said value. If path_lookup is on, a non-absolute command will be converted
4771 + to an absolute command, otherwise it returns None.
4772 +
4773 + selinux_context is the desired context, dependant on selinux being available.
4774 + opt_name controls the name the processor goes by.
4775 + fd_pipes controls which file descriptor numbers are left open in the forked process- it's a dict of
4776 + current fd's raw fd #, desired #.
4777 +
4778 + func_call is a boolean for specifying to execute a python function- use spawn_func instead.
4779 + raise_signals is questionable. Basically throw an exception if signal'd. No exception is thrown
4780 + if raw_input is on.
4781 +
4782 + logfile overloads the specified fd's to write to a tee process which logs to logfile
4783 + returnpid returns the relevant pids (a list, including the logging process if logfile is on).
4784 +
4785 + non-returnpid calls to spawn will block till the process has exited, returning the exitcode/signal
4786 + raw_exit_code controls whether the actual waitpid result is returned, or intrepretted."""
4787 +
4788 + myc=''
4789 + if not func_call:
4790 + if type(mycommand)==types.StringType:
4791 + mycommand=mycommand.split()
4792 + myc = mycommand[0]
4793 + if not os.access(myc, os.X_OK):
4794 + if not path_lookup:
4795 + return None
4796 + myc = find_binary(myc)
4797 + if myc == None:
4798 + return None
4799 + mypid=[]
4800 + if logfile:
4801 + pr,pw=os.pipe()
4802 + mypid.extend(spawn(('tee','-i','-a',logfile),returnpid=True,fd_pipes={0:pr,1:1,2:2}))
4803 + retval=os.waitpid(mypid[-1],os.WNOHANG)[1]
4804 + if retval != 0:
4805 + # he's dead jim.
4806 + if raw_exit_code:
4807 + return retval
4808 + return process_exit_code(retval)
4809 +
4810 + if fd_pipes == None:
4811 + fd_pipes={}
4812 + fd_pipes[0] = 0
4813 + fd_pipes[1]=pw
4814 + fd_pipes[2]=pw
4815 +
4816 + if not opt_name:
4817 + opt_name = mycommand[0]
4818 + myargs=[opt_name]
4819 + myargs.extend(mycommand[1:])
4820 + global spawned_pids
4821 + mypid.append(os.fork())
4822 + if mypid[-1] != 0:
4823 + #log the bugger.
4824 + spawned_pids.extend(mypid)
4825 +
4826 + if mypid[-1] == 0:
4827 + if func_call:
4828 + spawned_pids = []
4829 +
4830 + # this may look ugly, but basically it moves file descriptors around to ensure no
4831 + # handles that are needed are accidentally closed during the final dup2 calls.
4832 + trg_fd=[]
4833 + if type(fd_pipes)==types.DictType:
4834 + src_fd=[]
4835 + k=fd_pipes.keys()
4836 + k.sort()
4837 +
4838 + #build list of which fds will be where, and where they are at currently
4839 + for x in k:
4840 + trg_fd.append(x)
4841 + src_fd.append(fd_pipes[x])
4842 +
4843 + # run through said list dup'ing descriptors so that they won't be waxed
4844 + # by other dup calls.
4845 + for x in range(0,len(trg_fd)):
4846 + if trg_fd[x] == src_fd[x]:
4847 + continue
4848 + if trg_fd[x] in src_fd[x+1:]:
4849 + new=os.dup2(trg_fd[x],max(src_fd) + 1)
4850 + os.close(trg_fd[x])
4851 + try:
4852 + while True:
4853 + src_fd[s.index(trg_fd[x])]=new
4854 + except SystemExit, e:
4855 + raise
4856 + except:
4857 + pass
4858 +
4859 + # transfer the fds to their final pre-exec position.
4860 + for x in range(0,len(trg_fd)):
4861 + if trg_fd[x] != src_fd[x]:
4862 + os.dup2(src_fd[x], trg_fd[x])
4863 + else:
4864 + trg_fd=[0,1,2]
4865 +
4866 + # wax all open descriptors that weren't requested be left open.
4867 + for x in range(0,max_fd_limit):
4868 + if x not in trg_fd:
4869 + try:
4870 + os.close(x)
4871 + except SystemExit, e:
4872 + raise
4873 + except:
4874 + pass
4875 +
4876 + # note this order must be preserved- can't change gid/groups if you change uid first.
4877 + if selinux_capable and selinux_context:
4878 + import selinux
4879 + selinux.setexec(selinux_context)
4880 + if gid:
4881 + os.setgid(gid)
4882 + if groups:
4883 + os.setgroups(groups)
4884 + if uid:
4885 + os.setuid(uid)
4886 + if umask:
4887 + os.umask(umask)
4888 + else:
4889 + os.umask(022)
4890 +
4891 + try:
4892 + #print "execing", myc, myargs
4893 + if func_call:
4894 + # either use a passed in func for interpretting the results, or return if no exception.
4895 + # note the passed in list, and dict are expanded.
4896 + if len(mycommand) == 4:
4897 + os._exit(mycommand[3](mycommand[0](*mycommand[1],**mycommand[2])))
4898 + try:
4899 + mycommand[0](*mycommand[1],**mycommand[2])
4900 + except Exception,e:
4901 + print "caught exception",e," in forked func",mycommand[0]
4902 + sys.exit(0)
4903 +
4904 + #os.execvp(myc,myargs)
4905 + os.execve(myc,myargs,env)
4906 + except SystemExit, e:
4907 + raise
4908 + except Exception, e:
4909 + if not func_call:
4910 + raise str(e)+":\n "+myc+" "+string.join(myargs)
4911 + print "func call failed"
4912 +
4913 + # If the execve fails, we need to report it, and exit
4914 + # *carefully* --- report error here
4915 + os._exit(1)
4916 + sys.exit(1)
4917 + return # should never get reached
4918 +
4919 + # if we were logging, kill the pipes.
4920 + if logfile:
4921 + os.close(pr)
4922 + os.close(pw)
4923 +
4924 + if returnpid:
4925 + return mypid
4926 +
4927 + # loop through pids (typically one, unless logging), either waiting on their death, or waxing them
4928 + # if the main pid (mycommand) returned badly.
4929 + while len(mypid):
4930 + retval=os.waitpid(mypid[-1],0)[1]
4931 + if retval != 0:
4932 + cleanup(mypid[0:-1],block_exceptions=False)
4933 + # at this point we've killed all other kid pids generated via this call.
4934 + # return now.
4935 + if raw_exit_code:
4936 + return retval
4937 + return process_exit_code(retval,throw_signals=raise_signals)
4938 + else:
4939 + mypid.pop(-1)
4940 + cleanup(mypid)
4941 + return 0
4942 +
4943 +def cmd(mycmd,myexc="",env={}):
4944 + try:
4945 + sys.stdout.flush()
4946 + retval=spawn_bash(mycmd,env)
4947 + if retval != 0:
4948 + raise CatalystError,myexc
4949 + except:
4950 + raise
4951 +
4952 +def process_exit_code(retval,throw_signals=False):
4953 + """process a waitpid returned exit code, returning exit code if it exit'd, or the
4954 + signal if it died from signalling
4955 + if throw_signals is on, it raises a SystemExit if the process was signaled.
4956 + This is intended for usage with threads, although at the moment you can't signal individual
4957 + threads in python, only the master thread, so it's a questionable option."""
4958 + if (retval & 0xff)==0:
4959 + return retval >> 8 # return exit code
4960 + else:
4961 + if throw_signals:
4962 + #use systemexit, since portage is stupid about exception catching.
4963 + raise SystemExit()
4964 + return (retval & 0xff) << 8 # interrupted by signal
4965 +
4966 +def file_locate(settings,filelist,expand=1):
4967 + #if expand=1, non-absolute paths will be accepted and
4968 + # expanded to os.getcwd()+"/"+localpath if file exists
4969 + for myfile in filelist:
4970 + if myfile not in settings:
4971 + #filenames such as cdtar are optional, so we don't assume the variable is defined.
4972 + pass
4973 + else:
4974 + if len(settings[myfile])==0:
4975 + raise CatalystError, "File variable \""+myfile+"\" has a length of zero (not specified.)"
4976 + if settings[myfile][0]=="/":
4977 + if not os.path.exists(settings[myfile]):
4978 + raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]
4979 + elif expand and os.path.exists(os.getcwd()+"/"+settings[myfile]):
4980 + settings[myfile]=os.getcwd()+"/"+settings[myfile]
4981 + else:
4982 + raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]+" (2nd try)"
4983 +"""
4984 +Spec file format:
4985 +
4986 +The spec file format is a very simple and easy-to-use format for storing data. Here's an example
4987 +file:
4988 +
4989 +item1: value1
4990 +item2: foo bar oni
4991 +item3:
4992 + meep
4993 + bark
4994 + gleep moop
4995 +
4996 +This file would be interpreted as defining three items: item1, item2 and item3. item1 would contain
4997 +the string value "value1". Item2 would contain an ordered list [ "foo", "bar", "oni" ]. item3
4998 +would contain an ordered list as well: [ "meep", "bark", "gleep", "moop" ]. It's important to note
4999 +that the order of multiple-value items is preserved, but the order that the items themselves are
5000 +defined are not preserved. In other words, "foo", "bar", "oni" ordering is preserved but "item1"
5001 +"item2" "item3" ordering is not, as the item strings are stored in a dictionary (hash).
5002 +"""
5003 +
5004 +def parse_makeconf(mylines):
5005 + mymakeconf={}
5006 + pos=0
5007 + pat=re.compile("([0-9a-zA-Z_]*)=(.*)")
5008 + while pos<len(mylines):
5009 + if len(mylines[pos])<=1:
5010 + #skip blanks
5011 + pos += 1
5012 + continue
5013 + if mylines[pos][0] in ["#"," ","\t"]:
5014 + #skip indented lines, comments
5015 + pos += 1
5016 + continue
5017 + else:
5018 + myline=mylines[pos]
5019 + mobj=pat.match(myline)
5020 + pos += 1
5021 + if mobj.group(2):
5022 + clean_string = re.sub(r"\"",r"",mobj.group(2))
5023 + mymakeconf[mobj.group(1)]=clean_string
5024 + return mymakeconf
5025 +
5026 +def read_makeconf(mymakeconffile):
5027 + if os.path.exists(mymakeconffile):
5028 + try:
5029 + try:
5030 + import snakeoil.fileutils
5031 + return snakeoil.fileutils.read_bash_dict(mymakeconffile, sourcing_command="source")
5032 + except ImportError:
5033 + try:
5034 + import portage.util
5035 + return portage.util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
5036 + except:
5037 + try:
5038 + import portage_util
5039 + return portage_util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
5040 + except ImportError:
5041 + myf=open(mymakeconffile,"r")
5042 + mylines=myf.readlines()
5043 + myf.close()
5044 + return parse_makeconf(mylines)
5045 + except:
5046 + raise CatalystError, "Could not parse make.conf file "+mymakeconffile
5047 + else:
5048 + makeconf={}
5049 + return makeconf
5050 +
5051 +def msg(mymsg,verblevel=1):
5052 + if verbosity>=verblevel:
5053 + print mymsg
5054 +
5055 +def pathcompare(path1,path2):
5056 + # Change double slashes to slash
5057 + path1 = re.sub(r"//",r"/",path1)
5058 + path2 = re.sub(r"//",r"/",path2)
5059 + # Removing ending slash
5060 + path1 = re.sub("/$","",path1)
5061 + path2 = re.sub("/$","",path2)
5062 +
5063 + if path1 == path2:
5064 + return 1
5065 + return 0
5066 +
5067 +def ismount(path):
5068 + "enhanced to handle bind mounts"
5069 + if os.path.ismount(path):
5070 + return 1
5071 + a=os.popen("mount")
5072 + mylines=a.readlines()
5073 + a.close()
5074 + for line in mylines:
5075 + mysplit=line.split()
5076 + if pathcompare(path,mysplit[2]):
5077 + return 1
5078 + return 0
5079 +
5080 +def addl_arg_parse(myspec,addlargs,requiredspec,validspec):
5081 + "helper function to help targets parse additional arguments"
5082 + global valid_config_file_values
5083 +
5084 + messages = []
5085 + for x in addlargs.keys():
5086 + if x not in validspec and x not in valid_config_file_values and x not in requiredspec:
5087 + messages.append("Argument \""+x+"\" not recognized.")
5088 + else:
5089 + myspec[x]=addlargs[x]
5090 +
5091 + for x in requiredspec:
5092 + if x not in myspec:
5093 + messages.append("Required argument \""+x+"\" not specified.")
5094 +
5095 + if messages:
5096 + raise CatalystError, '\n\tAlso: '.join(messages)
5097 +
5098 +def touch(myfile):
5099 + try:
5100 + myf=open(myfile,"w")
5101 + myf.close()
5102 + except IOError:
5103 + raise CatalystError, "Could not touch "+myfile+"."
5104 +
5105 +def countdown(secs=5, doing="Starting"):
5106 + if secs:
5107 + print ">>> Waiting",secs,"seconds before starting..."
5108 + print ">>> (Control-C to abort)...\n"+doing+" in: ",
5109 + ticks=range(secs)
5110 + ticks.reverse()
5111 + for sec in ticks:
5112 + sys.stdout.write(str(sec+1)+" ")
5113 + sys.stdout.flush()
5114 + time.sleep(1)
5115 + print
5116 +
5117 +def normpath(mypath):
5118 + TrailingSlash=False
5119 + if mypath[-1] == "/":
5120 + TrailingSlash=True
5121 + newpath = os.path.normpath(mypath)
5122 + if len(newpath) > 1:
5123 + if newpath[:2] == "//":
5124 + newpath = newpath[1:]
5125 + if TrailingSlash:
5126 + newpath=newpath+'/'
5127 + return newpath
5128 diff --git a/catalyst/modules/embedded_target.py b/catalyst/modules/embedded_target.py
5129 new file mode 100644
5130 index 0000000..f38ea00
5131 --- /dev/null
5132 +++ b/catalyst/modules/embedded_target.py
5133 @@ -0,0 +1,51 @@
5134 +"""
5135 +Enbedded target, similar to the stage2 target, builds upon a stage2 tarball.
5136 +
5137 +A stage2 tarball is unpacked, but instead
5138 +of building a stage3, it emerges @system into another directory
5139 +inside the stage2 system. This way, we do not have to emerge GCC/portage
5140 +into the staged system.
5141 +It may sound complicated but basically it runs
5142 +ROOT=/tmp/submerge emerge --something foo bar .
5143 +"""
5144 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
5145 +
5146 +import os,string,imp,types,shutil
5147 +from catalyst_support import *
5148 +from generic_stage_target import *
5149 +from stat import *
5150 +
5151 +class embedded_target(generic_stage_target):
5152 + """
5153 + Builder class for embedded target
5154 + """
5155 + def __init__(self,spec,addlargs):
5156 + self.required_values=[]
5157 + self.valid_values=[]
5158 + self.valid_values.extend(["embedded/empty","embedded/rm","embedded/unmerge","embedded/fs-prepare","embedded/fs-finish","embedded/mergeroot","embedded/packages","embedded/fs-type","embedded/runscript","boot/kernel","embedded/linuxrc"])
5159 + self.valid_values.extend(["embedded/use"])
5160 + if "embedded/fs-type" in addlargs:
5161 + self.valid_values.append("embedded/fs-ops")
5162 +
5163 + generic_stage_target.__init__(self,spec,addlargs)
5164 + self.set_build_kernel_vars(addlargs)
5165 +
5166 + def set_action_sequence(self):
5167 + self.settings["action_sequence"]=["dir_setup","unpack","unpack_snapshot",\
5168 + "config_profile_link","setup_confdir",\
5169 + "portage_overlay","bind","chroot_setup",\
5170 + "setup_environment","build_kernel","build_packages",\
5171 + "bootloader","root_overlay","fsscript","unmerge",\
5172 + "unbind","remove","empty","clean","capture","clear_autoresume"]
5173 +
5174 + def set_stage_path(self):
5175 + self.settings["stage_path"]=normpath(self.settings["chroot_path"]+"/tmp/mergeroot")
5176 + print "embedded stage path is "+self.settings["stage_path"]
5177 +
5178 + def set_root_path(self):
5179 + self.settings["root_path"]=normpath("/tmp/mergeroot")
5180 + print "embedded root path is "+self.settings["root_path"]
5181 +
5182 +def register(foo):
5183 + foo.update({"embedded":embedded_target})
5184 + return foo
5185 diff --git a/catalyst/modules/generic_stage_target.py b/catalyst/modules/generic_stage_target.py
5186 new file mode 100644
5187 index 0000000..e99e652
5188 --- /dev/null
5189 +++ b/catalyst/modules/generic_stage_target.py
5190 @@ -0,0 +1,1691 @@
5191 +import os,string,imp,types,shutil
5192 +from catalyst_support import *
5193 +from generic_target import *
5194 +from stat import *
5195 +import catalyst_lock
5196 +
5197 +class generic_stage_target(generic_target):
5198 + """
5199 + This class does all of the chroot setup, copying of files, etc. It is
5200 + the driver class for pretty much everything that Catalyst does.
5201 + """
5202 + def __init__(self,myspec,addlargs):
5203 + self.required_values.extend(["version_stamp","target","subarch",\
5204 + "rel_type","profile","snapshot","source_subpath"])
5205 +
5206 + self.valid_values.extend(["version_stamp","target","subarch",\
5207 + "rel_type","profile","snapshot","source_subpath","portage_confdir",\
5208 + "cflags","cxxflags","ldflags","cbuild","hostuse","portage_overlay",\
5209 + "distcc_hosts","makeopts","pkgcache_path","kerncache_path"])
5210 +
5211 + self.set_valid_build_kernel_vars(addlargs)
5212 + generic_target.__init__(self,myspec,addlargs)
5213 +
5214 + """
5215 + The semantics of subarchmap and machinemap changed a bit in 2.0.3 to
5216 + work better with vapier's CBUILD stuff. I've removed the "monolithic"
5217 + machinemap from this file and split up its contents amongst the
5218 + various arch/foo.py files.
5219 +
5220 + When register() is called on each module in the arch/ dir, it now
5221 + returns a tuple instead of acting on the subarchmap dict that is
5222 + passed to it. The tuple contains the values that were previously
5223 + added to subarchmap as well as a new list of CHOSTs that go along
5224 + with that arch. This allows us to build machinemap on the fly based
5225 + on the keys in subarchmap and the values of the 2nd list returned
5226 + (tmpmachinemap).
5227 +
5228 + Also, after talking with vapier. I have a slightly better idea of what
5229 + certain variables are used for and what they should be set to. Neither
5230 + 'buildarch' or 'hostarch' are used directly, so their value doesn't
5231 + really matter. They are just compared to determine if we are
5232 + cross-compiling. Because of this, they are just set to the name of the
5233 + module in arch/ that the subarch is part of to make things simpler.
5234 + The entire build process is still based off of 'subarch' like it was
5235 + previously. -agaffney
5236 + """
5237 +
5238 + self.archmap = {}
5239 + self.subarchmap = {}
5240 + machinemap = {}
5241 + arch_dir = self.settings["PythonDir"] + "/arch/"
5242 + for x in [x[:-3] for x in os.listdir(arch_dir) if x.endswith(".py")]:
5243 + try:
5244 + fh=open(arch_dir + x + ".py")
5245 + """
5246 + This next line loads the plugin as a module and assigns it to
5247 + archmap[x]
5248 + """
5249 + self.archmap[x]=imp.load_module(x,fh,"../arch/" + x + ".py",
5250 + (".py", "r", imp.PY_SOURCE))
5251 + """
5252 + This next line registers all the subarches supported in the
5253 + plugin
5254 + """
5255 + tmpsubarchmap, tmpmachinemap = self.archmap[x].register()
5256 + self.subarchmap.update(tmpsubarchmap)
5257 + for machine in tmpmachinemap:
5258 + machinemap[machine] = x
5259 + for subarch in tmpsubarchmap:
5260 + machinemap[subarch] = x
5261 + fh.close()
5262 + except IOError:
5263 + """
5264 + This message should probably change a bit, since everything in
5265 + the dir should load just fine. If it doesn't, it's probably a
5266 + syntax error in the module
5267 + """
5268 + msg("Can't find/load " + x + ".py plugin in " + arch_dir)
5269 +
5270 + if "chost" in self.settings:
5271 + hostmachine = self.settings["chost"].split("-")[0]
5272 + if hostmachine not in machinemap:
5273 + raise CatalystError, "Unknown host machine type "+hostmachine
5274 + self.settings["hostarch"]=machinemap[hostmachine]
5275 + else:
5276 + hostmachine = self.settings["subarch"]
5277 + if hostmachine in machinemap:
5278 + hostmachine = machinemap[hostmachine]
5279 + self.settings["hostarch"]=hostmachine
5280 + if "cbuild" in self.settings:
5281 + buildmachine = self.settings["cbuild"].split("-")[0]
5282 + else:
5283 + buildmachine = os.uname()[4]
5284 + if buildmachine not in machinemap:
5285 + raise CatalystError, "Unknown build machine type "+buildmachine
5286 + self.settings["buildarch"]=machinemap[buildmachine]
5287 + self.settings["crosscompile"]=(self.settings["hostarch"]!=\
5288 + self.settings["buildarch"])
5289 +
5290 + """ Call arch constructor, pass our settings """
5291 + try:
5292 + self.arch=self.subarchmap[self.settings["subarch"]](self.settings)
5293 + except KeyError:
5294 + print "Invalid subarch: "+self.settings["subarch"]
5295 + print "Choose one of the following:",
5296 + for x in self.subarchmap:
5297 + print x,
5298 + print
5299 + sys.exit(2)
5300 +
5301 + print "Using target:",self.settings["target"]
5302 + """ Print a nice informational message """
5303 + if self.settings["buildarch"]==self.settings["hostarch"]:
5304 + print "Building natively for",self.settings["hostarch"]
5305 + elif self.settings["crosscompile"]:
5306 + print "Cross-compiling on",self.settings["buildarch"],\
5307 + "for different machine type",self.settings["hostarch"]
5308 + else:
5309 + print "Building on",self.settings["buildarch"],\
5310 + "for alternate personality type",self.settings["hostarch"]
5311 +
5312 + """ This must be set first as other set_ options depend on this """
5313 + self.set_spec_prefix()
5314 +
5315 + """ Define all of our core variables """
5316 + self.set_target_profile()
5317 + self.set_target_subpath()
5318 + self.set_source_subpath()
5319 +
5320 + """ Set paths """
5321 + self.set_snapshot_path()
5322 + self.set_root_path()
5323 + self.set_source_path()
5324 + self.set_snapcache_path()
5325 + self.set_chroot_path()
5326 + self.set_autoresume_path()
5327 + self.set_dest_path()
5328 + self.set_stage_path()
5329 + self.set_target_path()
5330 +
5331 + self.set_controller_file()
5332 + self.set_action_sequence()
5333 + self.set_use()
5334 + self.set_cleanables()
5335 + self.set_iso_volume_id()
5336 + self.set_build_kernel_vars()
5337 + self.set_fsscript()
5338 + self.set_install_mask()
5339 + self.set_rcadd()
5340 + self.set_rcdel()
5341 + self.set_cdtar()
5342 + self.set_fstype()
5343 + self.set_fsops()
5344 + self.set_iso()
5345 + self.set_packages()
5346 + self.set_rm()
5347 + self.set_linuxrc()
5348 + self.set_busybox_config()
5349 + self.set_overlay()
5350 + self.set_portage_overlay()
5351 + self.set_root_overlay()
5352 +
5353 + """
5354 + This next line checks to make sure that the specified variables exist
5355 + on disk.
5356 + """
5357 + #pdb.set_trace()
5358 + file_locate(self.settings,["source_path","snapshot_path","distdir"],\
5359 + expand=0)
5360 + """ If we are using portage_confdir, check that as well. """
5361 + if "portage_confdir" in self.settings:
5362 + file_locate(self.settings,["portage_confdir"],expand=0)
5363 +
5364 + """ Setup our mount points """
5365 + if "SNAPCACHE" in self.settings:
5366 + self.mounts=["/proc","/dev","/usr/portage","/usr/portage/distfiles","/var/tmp/portage"]
5367 + self.mountmap={"/proc":"/proc","/dev":"/dev","/dev/pts":"/dev/pts",\
5368 + "/usr/portage":self.settings["snapshot_cache_path"]+"/portage",\
5369 + "/usr/portage/distfiles":self.settings["distdir"],"/var/tmp/portage":"tmpfs"}
5370 + else:
5371 + self.mounts=["proc","dev", "distdir", "port_tmpdir"]
5372 + self.mountmap={"proc":"/proc", "dev":"/dev", "pts":"/dev/pts",
5373 + "distdir":self.settings["distdir"], "port_tmpdir":"tmpfs"}
5374 + if os.uname()[0] == "Linux":
5375 + self.mounts.append("pts")
5376 +
5377 + self.set_mounts()
5378 +
5379 + """
5380 + Configure any user specified options (either in catalyst.conf or on
5381 + the command line).
5382 + """
5383 + if "PKGCACHE" in self.settings:
5384 + self.set_pkgcache_path()
5385 + print "Location of the package cache is "+\
5386 + self.settings["pkgcache_path"]
5387 + self.mounts.append("packagedir")
5388 + self.mountmap["packagedir"] = self.settings["pkgcache_path"]
5389 +
5390 + if "KERNCACHE" in self.settings:
5391 + self.set_kerncache_path()
5392 + print "Location of the kerncache is "+\
5393 + self.settings["kerncache_path"]
5394 + self.mounts.append("kerncache")
5395 + self.mountmap["kerncache"]=self.settings["kerncache_path"]
5396 +
5397 + if "CCACHE" in self.settings:
5398 + if "CCACHE_DIR" in os.environ:
5399 + ccdir=os.environ["CCACHE_DIR"]
5400 + del os.environ["CCACHE_DIR"]
5401 + else:
5402 + ccdir="/root/.ccache"
5403 + if not os.path.isdir(ccdir):
5404 + raise CatalystError,\
5405 + "Compiler cache support can't be enabled (can't find "+\
5406 + ccdir+")"
5407 + self.mounts.append("ccache")
5408 + self.mountmap["ccache"]=ccdir
5409 + """ for the chroot: """
5410 + self.env["CCACHE_DIR"]="/var/tmp/ccache"
5411 +
5412 + if "ICECREAM" in self.settings:
5413 + self.mounts.append("/var/cache/icecream")
5414 + self.mountmap["/var/cache/icecream"]="/var/cache/icecream"
5415 + self.env["PATH"]="/usr/lib/icecc/bin:"+self.env["PATH"]
5416 +
5417 + if "port_logdir" in self.settings:
5418 + self.mounts.append("/var/log/portage")
5419 + self.mountmap["/var/log/portage"]=self.settings["port_logdir"]
5420 + self.env["PORT_LOGDIR"]="/var/log/portage"
5421 + self.env["PORT_LOGDIR_CLEAN"]='find "${PORT_LOGDIR}" -type f ! -name "summary.log*" -mtime +30 -delete'
5422 +
5423 + def override_cbuild(self):
5424 + if "CBUILD" in self.makeconf:
5425 + self.settings["CBUILD"]=self.makeconf["CBUILD"]
5426 +
5427 + def override_chost(self):
5428 + if "CHOST" in self.makeconf:
5429 + self.settings["CHOST"]=self.makeconf["CHOST"]
5430 +
5431 + def override_cflags(self):
5432 + if "CFLAGS" in self.makeconf:
5433 + self.settings["CFLAGS"]=self.makeconf["CFLAGS"]
5434 +
5435 + def override_cxxflags(self):
5436 + if "CXXFLAGS" in self.makeconf:
5437 + self.settings["CXXFLAGS"]=self.makeconf["CXXFLAGS"]
5438 +
5439 + def override_ldflags(self):
5440 + if "LDFLAGS" in self.makeconf:
5441 + self.settings["LDFLAGS"]=self.makeconf["LDFLAGS"]
5442 +
5443 + def set_install_mask(self):
5444 + if "install_mask" in self.settings:
5445 + if type(self.settings["install_mask"])!=types.StringType:
5446 + self.settings["install_mask"]=\
5447 + string.join(self.settings["install_mask"])
5448 +
5449 + def set_spec_prefix(self):
5450 + self.settings["spec_prefix"]=self.settings["target"]
5451 +
5452 + def set_target_profile(self):
5453 + self.settings["target_profile"]=self.settings["profile"]
5454 +
5455 + def set_target_subpath(self):
5456 + self.settings["target_subpath"]=self.settings["rel_type"]+"/"+\
5457 + self.settings["target"]+"-"+self.settings["subarch"]+"-"+\
5458 + self.settings["version_stamp"]
5459 +
5460 + def set_source_subpath(self):
5461 + if type(self.settings["source_subpath"])!=types.StringType:
5462 + raise CatalystError,\
5463 + "source_subpath should have been a string. Perhaps you have something wrong in your spec file?"
5464 +
5465 + def set_pkgcache_path(self):
5466 + if "pkgcache_path" in self.settings:
5467 + if type(self.settings["pkgcache_path"])!=types.StringType:
5468 + self.settings["pkgcache_path"]=\
5469 + normpath(string.join(self.settings["pkgcache_path"]))
5470 + else:
5471 + self.settings["pkgcache_path"]=\
5472 + normpath(self.settings["storedir"]+"/packages/"+\
5473 + self.settings["target_subpath"]+"/")
5474 +
5475 + def set_kerncache_path(self):
5476 + if "kerncache_path" in self.settings:
5477 + if type(self.settings["kerncache_path"])!=types.StringType:
5478 + self.settings["kerncache_path"]=\
5479 + normpath(string.join(self.settings["kerncache_path"]))
5480 + else:
5481 + self.settings["kerncache_path"]=normpath(self.settings["storedir"]+\
5482 + "/kerncache/"+self.settings["target_subpath"]+"/")
5483 +
5484 + def set_target_path(self):
5485 + self.settings["target_path"]=normpath(self.settings["storedir"]+\
5486 + "/builds/"+self.settings["target_subpath"]+".tar.bz2")
5487 + if "AUTORESUME" in self.settings\
5488 + and os.path.exists(self.settings["autoresume_path"]+\
5489 + "setup_target_path"):
5490 + print \
5491 + "Resume point detected, skipping target path setup operation..."
5492 + else:
5493 + """ First clean up any existing target stuff """
5494 + # XXX WTF are we removing the old tarball before we start building the
5495 + # XXX new one? If the build fails, you don't want to be left with
5496 + # XXX nothing at all
5497 +# if os.path.isfile(self.settings["target_path"]):
5498 +# cmd("rm -f "+self.settings["target_path"],\
5499 +# "Could not remove existing file: "\
5500 +# +self.settings["target_path"],env=self.env)
5501 + touch(self.settings["autoresume_path"]+"setup_target_path")
5502 +
5503 + if not os.path.exists(self.settings["storedir"]+"/builds/"):
5504 + os.makedirs(self.settings["storedir"]+"/builds/")
5505 +
5506 + def set_fsscript(self):
5507 + if self.settings["spec_prefix"]+"/fsscript" in self.settings:
5508 + self.settings["fsscript"]=\
5509 + self.settings[self.settings["spec_prefix"]+"/fsscript"]
5510 + del self.settings[self.settings["spec_prefix"]+"/fsscript"]
5511 +
5512 + def set_rcadd(self):
5513 + if self.settings["spec_prefix"]+"/rcadd" in self.settings:
5514 + self.settings["rcadd"]=\
5515 + self.settings[self.settings["spec_prefix"]+"/rcadd"]
5516 + del self.settings[self.settings["spec_prefix"]+"/rcadd"]
5517 +
5518 + def set_rcdel(self):
5519 + if self.settings["spec_prefix"]+"/rcdel" in self.settings:
5520 + self.settings["rcdel"]=\
5521 + self.settings[self.settings["spec_prefix"]+"/rcdel"]
5522 + del self.settings[self.settings["spec_prefix"]+"/rcdel"]
5523 +
5524 + def set_cdtar(self):
5525 + if self.settings["spec_prefix"]+"/cdtar" in self.settings:
5526 + self.settings["cdtar"]=\
5527 + normpath(self.settings[self.settings["spec_prefix"]+"/cdtar"])
5528 + del self.settings[self.settings["spec_prefix"]+"/cdtar"]
5529 +
5530 + def set_iso(self):
5531 + if self.settings["spec_prefix"]+"/iso" in self.settings:
5532 + if self.settings[self.settings["spec_prefix"]+"/iso"].startswith('/'):
5533 + self.settings["iso"]=\
5534 + normpath(self.settings[self.settings["spec_prefix"]+"/iso"])
5535 + else:
5536 + # This automatically prepends the build dir to the ISO output path
5537 + # if it doesn't start with a /
5538 + self.settings["iso"] = normpath(self.settings["storedir"] + \
5539 + "/builds/" + self.settings["rel_type"] + "/" + \
5540 + self.settings[self.settings["spec_prefix"]+"/iso"])
5541 + del self.settings[self.settings["spec_prefix"]+"/iso"]
5542 +
5543 + def set_fstype(self):
5544 + if self.settings["spec_prefix"]+"/fstype" in self.settings:
5545 + self.settings["fstype"]=\
5546 + self.settings[self.settings["spec_prefix"]+"/fstype"]
5547 + del self.settings[self.settings["spec_prefix"]+"/fstype"]
5548 +
5549 + if "fstype" not in self.settings:
5550 + self.settings["fstype"]="normal"
5551 + for x in self.valid_values:
5552 + if x == self.settings["spec_prefix"]+"/fstype":
5553 + print "\n"+self.settings["spec_prefix"]+\
5554 + "/fstype is being set to the default of \"normal\"\n"
5555 +
5556 + def set_fsops(self):
5557 + if "fstype" in self.settings:
5558 + self.valid_values.append("fsops")
5559 + if self.settings["spec_prefix"]+"/fsops" in self.settings:
5560 + self.settings["fsops"]=\
5561 + self.settings[self.settings["spec_prefix"]+"/fsops"]
5562 + del self.settings[self.settings["spec_prefix"]+"/fsops"]
5563 +
5564 + def set_source_path(self):
5565 + if "SEEDCACHE" in self.settings\
5566 + and os.path.isdir(normpath(self.settings["storedir"]+"/tmp/"+\
5567 + self.settings["source_subpath"]+"/")):
5568 + self.settings["source_path"]=normpath(self.settings["storedir"]+\
5569 + "/tmp/"+self.settings["source_subpath"]+"/")
5570 + else:
5571 + self.settings["source_path"]=normpath(self.settings["storedir"]+\
5572 + "/builds/"+self.settings["source_subpath"]+".tar.bz2")
5573 + if os.path.isfile(self.settings["source_path"]):
5574 + # XXX: Is this even necessary if the previous check passes?
5575 + if os.path.exists(self.settings["source_path"]):
5576 + self.settings["source_path_hash"]=\
5577 + generate_hash(self.settings["source_path"],\
5578 + hash_function=self.settings["hash_function"],\
5579 + verbose=False)
5580 + print "Source path set to "+self.settings["source_path"]
5581 + if os.path.isdir(self.settings["source_path"]):
5582 + print "\tIf this is not desired, remove this directory or turn off"
5583 + print "\tseedcache in the options of catalyst.conf the source path"
5584 + print "\twill then be "+\
5585 + normpath(self.settings["storedir"]+"/builds/"+\
5586 + self.settings["source_subpath"]+".tar.bz2\n")
5587 +
5588 + def set_dest_path(self):
5589 + if "root_path" in self.settings:
5590 + self.settings["destpath"]=normpath(self.settings["chroot_path"]+\
5591 + self.settings["root_path"])
5592 + else:
5593 + self.settings["destpath"]=normpath(self.settings["chroot_path"])
5594 +
5595 + def set_cleanables(self):
5596 + self.settings["cleanables"]=["/etc/resolv.conf","/var/tmp/*","/tmp/*",\
5597 + "/root/*", self.settings["portdir"]]
5598 +
5599 + def set_snapshot_path(self):
5600 + self.settings["snapshot_path"]=normpath(self.settings["storedir"]+\
5601 + "/snapshots/" + self.settings["snapshot_name"] +
5602 + self.settings["snapshot"]+".tar.xz")
5603 +
5604 + if os.path.exists(self.settings["snapshot_path"]):
5605 + self.settings["snapshot_path_hash"]=\
5606 + generate_hash(self.settings["snapshot_path"],\
5607 + hash_function=self.settings["hash_function"],verbose=False)
5608 + else:
5609 + self.settings["snapshot_path"]=normpath(self.settings["storedir"]+\
5610 + "/snapshots/" + self.settings["snapshot_name"] +
5611 + self.settings["snapshot"]+".tar.bz2")
5612 +
5613 + if os.path.exists(self.settings["snapshot_path"]):
5614 + self.settings["snapshot_path_hash"]=\
5615 + generate_hash(self.settings["snapshot_path"],\
5616 + hash_function=self.settings["hash_function"],verbose=False)
5617 +
5618 + def set_snapcache_path(self):
5619 + if "SNAPCACHE" in self.settings:
5620 + self.settings["snapshot_cache_path"]=\
5621 + normpath(self.settings["snapshot_cache"]+"/"+\
5622 + self.settings["snapshot"]+"/")
5623 + self.snapcache_lock=\
5624 + catalyst_lock.LockDir(self.settings["snapshot_cache_path"])
5625 + print "Caching snapshot to "+self.settings["snapshot_cache_path"]
5626 +
5627 + def set_chroot_path(self):
5628 + """
5629 + NOTE: the trailing slash is very important!
5630 + Things *will* break without it!
5631 + """
5632 + self.settings["chroot_path"]=normpath(self.settings["storedir"]+\
5633 + "/tmp/"+self.settings["target_subpath"]+"/")
5634 + self.chroot_lock=catalyst_lock.LockDir(self.settings["chroot_path"])
5635 +
5636 + def set_autoresume_path(self):
5637 + self.settings["autoresume_path"]=normpath(self.settings["storedir"]+\
5638 + "/tmp/"+self.settings["rel_type"]+"/"+".autoresume-"+\
5639 + self.settings["target"]+"-"+self.settings["subarch"]+"-"+\
5640 + self.settings["version_stamp"]+"/")
5641 + if "AUTORESUME" in self.settings:
5642 + print "The autoresume path is " + self.settings["autoresume_path"]
5643 + if not os.path.exists(self.settings["autoresume_path"]):
5644 + os.makedirs(self.settings["autoresume_path"],0755)
5645 +
5646 + def set_controller_file(self):
5647 + self.settings["controller_file"]=normpath(self.settings["sharedir"]+\
5648 + "/targets/"+self.settings["target"]+"/"+self.settings["target"]+\
5649 + "-controller.sh")
5650 +
5651 + def set_iso_volume_id(self):
5652 + if self.settings["spec_prefix"]+"/volid" in self.settings:
5653 + self.settings["iso_volume_id"]=\
5654 + self.settings[self.settings["spec_prefix"]+"/volid"]
5655 + if len(self.settings["iso_volume_id"])>32:
5656 + raise CatalystError,\
5657 + "ISO volume ID must not exceed 32 characters."
5658 + else:
5659 + self.settings["iso_volume_id"]="catalyst "+self.settings["snapshot"]
5660 +
5661 + def set_action_sequence(self):
5662 + """ Default action sequence for run method """
5663 + self.settings["action_sequence"]=["unpack","unpack_snapshot",\
5664 + "setup_confdir","portage_overlay",\
5665 + "base_dirs","bind","chroot_setup","setup_environment",\
5666 + "run_local","preclean","unbind","clean"]
5667 +# if "TARBALL" in self.settings or \
5668 +# "FETCH" not in self.settings:
5669 + if "FETCH" not in self.settings:
5670 + self.settings["action_sequence"].append("capture")
5671 + self.settings["action_sequence"].append("clear_autoresume")
5672 +
5673 + def set_use(self):
5674 + if self.settings["spec_prefix"]+"/use" in self.settings:
5675 + self.settings["use"]=\
5676 + self.settings[self.settings["spec_prefix"]+"/use"]
5677 + del self.settings[self.settings["spec_prefix"]+"/use"]
5678 + if "use" not in self.settings:
5679 + self.settings["use"]=""
5680 + if type(self.settings["use"])==types.StringType:
5681 + self.settings["use"]=self.settings["use"].split()
5682 +
5683 + # Force bindist when options ask for it
5684 + if "BINDIST" in self.settings:
5685 + self.settings["use"].append("bindist")
5686 +
5687 + def set_stage_path(self):
5688 + self.settings["stage_path"]=normpath(self.settings["chroot_path"])
5689 +
5690 + def set_mounts(self):
5691 + pass
5692 +
5693 + def set_packages(self):
5694 + pass
5695 +
5696 + def set_rm(self):
5697 + if self.settings["spec_prefix"]+"/rm" in self.settings:
5698 + if type(self.settings[self.settings["spec_prefix"]+\
5699 + "/rm"])==types.StringType:
5700 + self.settings[self.settings["spec_prefix"]+"/rm"]=\
5701 + self.settings[self.settings["spec_prefix"]+"/rm"].split()
5702 +
5703 + def set_linuxrc(self):
5704 + if self.settings["spec_prefix"]+"/linuxrc" in self.settings:
5705 + if type(self.settings[self.settings["spec_prefix"]+\
5706 + "/linuxrc"])==types.StringType:
5707 + self.settings["linuxrc"]=\
5708 + self.settings[self.settings["spec_prefix"]+"/linuxrc"]
5709 + del self.settings[self.settings["spec_prefix"]+"/linuxrc"]
5710 +
5711 + def set_busybox_config(self):
5712 + if self.settings["spec_prefix"]+"/busybox_config" in self.settings:
5713 + if type(self.settings[self.settings["spec_prefix"]+\
5714 + "/busybox_config"])==types.StringType:
5715 + self.settings["busybox_config"]=\
5716 + self.settings[self.settings["spec_prefix"]+"/busybox_config"]
5717 + del self.settings[self.settings["spec_prefix"]+"/busybox_config"]
5718 +
5719 + def set_portage_overlay(self):
5720 + if "portage_overlay" in self.settings:
5721 + if type(self.settings["portage_overlay"])==types.StringType:
5722 + self.settings["portage_overlay"]=\
5723 + self.settings["portage_overlay"].split()
5724 + print "portage_overlay directories are set to: \""+\
5725 + string.join(self.settings["portage_overlay"])+"\""
5726 +
5727 + def set_overlay(self):
5728 + if self.settings["spec_prefix"]+"/overlay" in self.settings:
5729 + if type(self.settings[self.settings["spec_prefix"]+\
5730 + "/overlay"])==types.StringType:
5731 + self.settings[self.settings["spec_prefix"]+"/overlay"]=\
5732 + self.settings[self.settings["spec_prefix"]+\
5733 + "/overlay"].split()
5734 +
5735 + def set_root_overlay(self):
5736 + if self.settings["spec_prefix"]+"/root_overlay" in self.settings:
5737 + if type(self.settings[self.settings["spec_prefix"]+\
5738 + "/root_overlay"])==types.StringType:
5739 + self.settings[self.settings["spec_prefix"]+"/root_overlay"]=\
5740 + self.settings[self.settings["spec_prefix"]+\
5741 + "/root_overlay"].split()
5742 +
5743 + def set_root_path(self):
5744 + """ ROOT= variable for emerges """
5745 + self.settings["root_path"]="/"
5746 +
5747 + def set_valid_build_kernel_vars(self,addlargs):
5748 + if "boot/kernel" in addlargs:
5749 + if type(addlargs["boot/kernel"])==types.StringType:
5750 + loopy=[addlargs["boot/kernel"]]
5751 + else:
5752 + loopy=addlargs["boot/kernel"]
5753 +
5754 + for x in loopy:
5755 + self.valid_values.append("boot/kernel/"+x+"/aliases")
5756 + self.valid_values.append("boot/kernel/"+x+"/config")
5757 + self.valid_values.append("boot/kernel/"+x+"/console")
5758 + self.valid_values.append("boot/kernel/"+x+"/extraversion")
5759 + self.valid_values.append("boot/kernel/"+x+"/gk_action")
5760 + self.valid_values.append("boot/kernel/"+x+"/gk_kernargs")
5761 + self.valid_values.append("boot/kernel/"+x+"/initramfs_overlay")
5762 + self.valid_values.append("boot/kernel/"+x+"/machine_type")
5763 + self.valid_values.append("boot/kernel/"+x+"/sources")
5764 + self.valid_values.append("boot/kernel/"+x+"/softlevel")
5765 + self.valid_values.append("boot/kernel/"+x+"/use")
5766 + self.valid_values.append("boot/kernel/"+x+"/packages")
5767 + if "boot/kernel/"+x+"/packages" in addlargs:
5768 + if type(addlargs["boot/kernel/"+x+\
5769 + "/packages"])==types.StringType:
5770 + addlargs["boot/kernel/"+x+"/packages"]=\
5771 + [addlargs["boot/kernel/"+x+"/packages"]]
5772 +
5773 + def set_build_kernel_vars(self):
5774 + if self.settings["spec_prefix"]+"/gk_mainargs" in self.settings:
5775 + self.settings["gk_mainargs"]=\
5776 + self.settings[self.settings["spec_prefix"]+"/gk_mainargs"]
5777 + del self.settings[self.settings["spec_prefix"]+"/gk_mainargs"]
5778 +
5779 + def kill_chroot_pids(self):
5780 + print "Checking for processes running in chroot and killing them."
5781 +
5782 + """
5783 + Force environment variables to be exported so script can see them
5784 + """
5785 + self.setup_environment()
5786 +
5787 + if os.path.exists(self.settings["sharedir"]+\
5788 + "/targets/support/kill-chroot-pids.sh"):
5789 + cmd("/bin/bash "+self.settings["sharedir"]+\
5790 + "/targets/support/kill-chroot-pids.sh",\
5791 + "kill-chroot-pids script failed.",env=self.env)
5792 +
5793 + def mount_safety_check(self):
5794 + mypath=self.settings["chroot_path"]
5795 +
5796 + """
5797 + Check and verify that none of our paths in mypath are mounted. We don't
5798 + want to clean up with things still mounted, and this allows us to check.
5799 + Returns 1 on ok, 0 on "something is still mounted" case.
5800 + """
5801 +
5802 + if not os.path.exists(mypath):
5803 + return
5804 +
5805 + for x in self.mounts:
5806 + if not os.path.exists(mypath + self.mountmap[x]):
5807 + continue
5808 +
5809 + if ismount(mypath +self.mountmap[x]):
5810 + """ Something is still mounted "" """
5811 + try:
5812 + print self.mountmap[x] + " is still mounted; performing auto-bind-umount...",
5813 + """ Try to umount stuff ourselves """
5814 + self.unbind()
5815 + if ismount(mypath + self.mountmap[x]):
5816 + raise CatalystError, "Auto-unbind failed for " + self.mountmap[x]
5817 + else:
5818 + print "Auto-unbind successful..."
5819 + except CatalystError:
5820 + raise CatalystError, "Unable to auto-unbind " + self.mountmap[x]
5821 +
5822 + def unpack(self):
5823 + unpack=True
5824 +
5825 + clst_unpack_hash=read_from_clst(self.settings["autoresume_path"]+\
5826 + "unpack")
5827 +
5828 + if "SEEDCACHE" in self.settings:
5829 + if os.path.isdir(self.settings["source_path"]):
5830 + """ SEEDCACHE Is a directory, use rsync """
5831 + unpack_cmd="rsync -a --delete "+self.settings["source_path"]+\
5832 + " "+self.settings["chroot_path"]
5833 + display_msg="\nStarting rsync from "+\
5834 + self.settings["source_path"]+"\nto "+\
5835 + self.settings["chroot_path"]+\
5836 + " (This may take some time) ...\n"
5837 + error_msg="Rsync of "+self.settings["source_path"]+" to "+\
5838 + self.settings["chroot_path"]+" failed."
5839 + else:
5840 + """ SEEDCACHE is a not a directory, try untar'ing """
5841 + print "Referenced SEEDCACHE does not appear to be a directory, trying to untar..."
5842 + display_msg="\nStarting tar extract from "+\
5843 + self.settings["source_path"]+"\nto "+\
5844 + self.settings["chroot_path"]+\
5845 + " (This may take some time) ...\n"
5846 + if "bz2" == self.settings["chroot_path"][-3:]:
5847 + unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
5848 + self.settings["chroot_path"]
5849 + else:
5850 + unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
5851 + self.settings["chroot_path"]
5852 + error_msg="Tarball extraction of "+\
5853 + self.settings["source_path"]+" to "+\
5854 + self.settings["chroot_path"]+" failed."
5855 + else:
5856 + """ No SEEDCACHE, use tar """
5857 + display_msg="\nStarting tar extract from "+\
5858 + self.settings["source_path"]+"\nto "+\
5859 + self.settings["chroot_path"]+\
5860 + " (This may take some time) ...\n"
5861 + if "bz2" == self.settings["chroot_path"][-3:]:
5862 + unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
5863 + self.settings["chroot_path"]
5864 + else:
5865 + unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
5866 + self.settings["chroot_path"]
5867 + error_msg="Tarball extraction of "+self.settings["source_path"]+\
5868 + " to "+self.settings["chroot_path"]+" failed."
5869 +
5870 + if "AUTORESUME" in self.settings:
5871 + if os.path.isdir(self.settings["source_path"]) \
5872 + and os.path.exists(self.settings["autoresume_path"]+"unpack"):
5873 + """ Autoresume is valid, SEEDCACHE is valid """
5874 + unpack=False
5875 + invalid_snapshot=False
5876 +
5877 + elif os.path.isfile(self.settings["source_path"]) \
5878 + and self.settings["source_path_hash"]==clst_unpack_hash:
5879 + """ Autoresume is valid, tarball is valid """
5880 + unpack=False
5881 + invalid_snapshot=True
5882 +
5883 + elif os.path.isdir(self.settings["source_path"]) \
5884 + and not os.path.exists(self.settings["autoresume_path"]+\
5885 + "unpack"):
5886 + """ Autoresume is invalid, SEEDCACHE """
5887 + unpack=True
5888 + invalid_snapshot=False
5889 +
5890 + elif os.path.isfile(self.settings["source_path"]) \
5891 + and self.settings["source_path_hash"]!=clst_unpack_hash:
5892 + """ Autoresume is invalid, tarball """
5893 + unpack=True
5894 + invalid_snapshot=True
5895 + else:
5896 + """ No autoresume, SEEDCACHE """
5897 + if "SEEDCACHE" in self.settings:
5898 + """ SEEDCACHE so let's run rsync and let it clean up """
5899 + if os.path.isdir(self.settings["source_path"]):
5900 + unpack=True
5901 + invalid_snapshot=False
5902 + elif os.path.isfile(self.settings["source_path"]):
5903 + """ Tarball so unpack and remove anything already there """
5904 + unpack=True
5905 + invalid_snapshot=True
5906 + """ No autoresume, no SEEDCACHE """
5907 + else:
5908 + """ Tarball so unpack and remove anything already there """
5909 + if os.path.isfile(self.settings["source_path"]):
5910 + unpack=True
5911 + invalid_snapshot=True
5912 + elif os.path.isdir(self.settings["source_path"]):
5913 + """ We should never reach this, so something is very wrong """
5914 + raise CatalystError,\
5915 + "source path is a dir but seedcache is not enabled"
5916 +
5917 + if unpack:
5918 + self.mount_safety_check()
5919 +
5920 + if invalid_snapshot:
5921 + if "AUTORESUME" in self.settings:
5922 + print "No Valid Resume point detected, cleaning up..."
5923 +
5924 + self.clear_autoresume()
5925 + self.clear_chroot()
5926 +
5927 + if not os.path.exists(self.settings["chroot_path"]):
5928 + os.makedirs(self.settings["chroot_path"])
5929 +
5930 + if not os.path.exists(self.settings["chroot_path"]+"/tmp"):
5931 + os.makedirs(self.settings["chroot_path"]+"/tmp",1777)
5932 +
5933 + if "PKGCACHE" in self.settings:
5934 + if not os.path.exists(self.settings["pkgcache_path"]):
5935 + os.makedirs(self.settings["pkgcache_path"],0755)
5936 +
5937 + if "KERNCACHE" in self.settings:
5938 + if not os.path.exists(self.settings["kerncache_path"]):
5939 + os.makedirs(self.settings["kerncache_path"],0755)
5940 +
5941 + print display_msg
5942 + cmd(unpack_cmd,error_msg,env=self.env)
5943 +
5944 + if "source_path_hash" in self.settings:
5945 + myf=open(self.settings["autoresume_path"]+"unpack","w")
5946 + myf.write(self.settings["source_path_hash"])
5947 + myf.close()
5948 + else:
5949 + touch(self.settings["autoresume_path"]+"unpack")
5950 + else:
5951 + print "Resume point detected, skipping unpack operation..."
5952 +
5953 + def unpack_snapshot(self):
5954 + unpack=True
5955 + snapshot_hash=read_from_clst(self.settings["autoresume_path"]+\
5956 + "unpack_portage")
5957 +
5958 + if "SNAPCACHE" in self.settings:
5959 + snapshot_cache_hash=\
5960 + read_from_clst(self.settings["snapshot_cache_path"]+\
5961 + "catalyst-hash")
5962 + destdir=self.settings["snapshot_cache_path"]
5963 + if "bz2" == self.settings["chroot_path"][-3:]:
5964 + unpack_cmd="tar -I lbzip2 -xpf "+self.settings["snapshot_path"]+" -C "+destdir
5965 + else:
5966 + unpack_cmd="tar xpf "+self.settings["snapshot_path"]+" -C "+destdir
5967 + unpack_errmsg="Error unpacking snapshot"
5968 + cleanup_msg="Cleaning up invalid snapshot cache at \n\t"+\
5969 + self.settings["snapshot_cache_path"]+\
5970 + " (This can take a long time)..."
5971 + cleanup_errmsg="Error removing existing snapshot cache directory."
5972 + self.snapshot_lock_object=self.snapcache_lock
5973 +
5974 + if self.settings["snapshot_path_hash"]==snapshot_cache_hash:
5975 + print "Valid snapshot cache, skipping unpack of portage tree..."
5976 + unpack=False
5977 + else:
5978 + destdir=normpath(self.settings["chroot_path"] + self.settings["portdir"])
5979 + cleanup_errmsg="Error removing existing snapshot directory."
5980 + cleanup_msg=\
5981 + "Cleaning up existing portage tree (This can take a long time)..."
5982 + if "bz2" == self.settings["chroot_path"][-3:]:
5983 + unpack_cmd="tar -I lbzip2 -xpf "+self.settings["snapshot_path"]+" -C "+\
5984 + self.settings["chroot_path"]+"/usr"
5985 + else:
5986 + unpack_cmd="tar xpf "+self.settings["snapshot_path"]+" -C "+\
5987 + self.settings["chroot_path"]+"/usr"
5988 + unpack_errmsg="Error unpacking snapshot"
5989 +
5990 + if "AUTORESUME" in self.settings \
5991 + and os.path.exists(self.settings["chroot_path"]+\
5992 + self.settings["portdir"]) \
5993 + and os.path.exists(self.settings["autoresume_path"]\
5994 + +"unpack_portage") \
5995 + and self.settings["snapshot_path_hash"] == snapshot_hash:
5996 + print \
5997 + "Valid Resume point detected, skipping unpack of portage tree..."
5998 + unpack=False
5999 +
6000 + if unpack:
6001 + if "SNAPCACHE" in self.settings:
6002 + self.snapshot_lock_object.write_lock()
6003 + if os.path.exists(destdir):
6004 + print cleanup_msg
6005 + cleanup_cmd="rm -rf "+destdir
6006 + cmd(cleanup_cmd,cleanup_errmsg,env=self.env)
6007 + if not os.path.exists(destdir):
6008 + os.makedirs(destdir,0755)
6009 +
6010 + print "Unpacking portage tree (This can take a long time) ..."
6011 + cmd(unpack_cmd,unpack_errmsg,env=self.env)
6012 +
6013 + if "SNAPCACHE" in self.settings:
6014 + myf=open(self.settings["snapshot_cache_path"]+"catalyst-hash","w")
6015 + myf.write(self.settings["snapshot_path_hash"])
6016 + myf.close()
6017 + else:
6018 + print "Setting snapshot autoresume point"
6019 + myf=open(self.settings["autoresume_path"]+"unpack_portage","w")
6020 + myf.write(self.settings["snapshot_path_hash"])
6021 + myf.close()
6022 +
6023 + if "SNAPCACHE" in self.settings:
6024 + self.snapshot_lock_object.unlock()
6025 +
6026 + def config_profile_link(self):
6027 + if "AUTORESUME" in self.settings \
6028 + and os.path.exists(self.settings["autoresume_path"]+\
6029 + "config_profile_link"):
6030 + print \
6031 + "Resume point detected, skipping config_profile_link operation..."
6032 + else:
6033 + # TODO: zmedico and I discussed making this a directory and pushing
6034 + # in a parent file, as well as other user-specified configuration.
6035 + print "Configuring profile link..."
6036 + cmd("rm -f "+self.settings["chroot_path"]+"/etc/portage/make.profile",\
6037 + "Error zapping profile link",env=self.env)
6038 + cmd("mkdir -p "+self.settings["chroot_path"]+"/etc/portage/")
6039 + cmd("ln -sf ../.." + self.settings["portdir"] + "/profiles/"+\
6040 + self.settings["target_profile"]+" "+\
6041 + self.settings["chroot_path"]+"/etc/portage/make.profile",\
6042 + "Error creating profile link",env=self.env)
6043 + touch(self.settings["autoresume_path"]+"config_profile_link")
6044 +
6045 + def setup_confdir(self):
6046 + if "AUTORESUME" in self.settings \
6047 + and os.path.exists(self.settings["autoresume_path"]+\
6048 + "setup_confdir"):
6049 + print "Resume point detected, skipping setup_confdir operation..."
6050 + else:
6051 + if "portage_confdir" in self.settings:
6052 + print "Configuring /etc/portage..."
6053 + cmd("rsync -a "+self.settings["portage_confdir"]+"/ "+\
6054 + self.settings["chroot_path"]+"/etc/portage/",\
6055 + "Error copying /etc/portage",env=self.env)
6056 + touch(self.settings["autoresume_path"]+"setup_confdir")
6057 +
6058 + def portage_overlay(self):
6059 + """ We copy the contents of our overlays to /usr/local/portage """
6060 + if "portage_overlay" in self.settings:
6061 + for x in self.settings["portage_overlay"]:
6062 + if os.path.exists(x):
6063 + print "Copying overlay dir " +x
6064 + cmd("mkdir -p "+self.settings["chroot_path"]+\
6065 + self.settings["local_overlay"],\
6066 + "Could not make portage_overlay dir",env=self.env)
6067 + cmd("cp -R "+x+"/* "+self.settings["chroot_path"]+\
6068 + self.settings["local_overlay"],\
6069 + "Could not copy portage_overlay",env=self.env)
6070 +
6071 + def root_overlay(self):
6072 + """ Copy over the root_overlay """
6073 + if self.settings["spec_prefix"]+"/root_overlay" in self.settings:
6074 + for x in self.settings[self.settings["spec_prefix"]+\
6075 + "/root_overlay"]:
6076 + if os.path.exists(x):
6077 + print "Copying root_overlay: "+x
6078 + cmd("rsync -a "+x+"/ "+\
6079 + self.settings["chroot_path"],\
6080 + self.settings["spec_prefix"]+"/root_overlay: "+x+\
6081 + " copy failed.",env=self.env)
6082 +
6083 + def base_dirs(self):
6084 + pass
6085 +
6086 + def bind(self):
6087 + for x in self.mounts:
6088 + if not os.path.exists(self.settings["chroot_path"] + self.mountmap[x]):
6089 + os.makedirs(self.settings["chroot_path"]+x,0755)
6090 +
6091 + if not os.path.exists(self.mountmap[x]):
6092 + if not self.mountmap[x] == "tmpfs":
6093 + os.makedirs(self.mountmap[x],0755)
6094 +
6095 + src=self.mountmap[x]
6096 + if "SNAPCACHE" in self.settings and x == "/usr/portage":
6097 + self.snapshot_lock_object.read_lock()
6098 + if os.uname()[0] == "FreeBSD":
6099 + if src == "/dev":
6100 + retval=os.system("mount -t devfs none " +
6101 + self.settings["chroot_path"] + src)
6102 + else:
6103 + retval=os.system("mount_nullfs " + src + " " +
6104 + self.settings["chroot_path"] + src)
6105 + else:
6106 + if src == "tmpfs":
6107 + if "var_tmpfs_portage" in self.settings:
6108 + retval=os.system("mount -t tmpfs -o size="+\
6109 + self.settings["var_tmpfs_portage"]+"G "+src+" "+\
6110 + self.settings["chroot_path"]+x)
6111 + else:
6112 + retval=os.system("mount --bind " + src + " " +
6113 + self.settings["chroot_path"] + src)
6114 + if retval!=0:
6115 + self.unbind()
6116 + raise CatalystError,"Couldn't bind mount " + src
6117 +
6118 + def unbind(self):
6119 + ouch=0
6120 + mypath=self.settings["chroot_path"]
6121 + myrevmounts=self.mounts[:]
6122 + myrevmounts.reverse()
6123 + """ Unmount in reverse order for nested bind-mounts """
6124 + for x in myrevmounts:
6125 + if not os.path.exists(mypath + self.mountmap[x]):
6126 + continue
6127 +
6128 + if not ismount(mypath + self.mountmap[x]):
6129 + continue
6130 +
6131 + retval=os.system("umount "+\
6132 + os.path.join(mypath, self.mountmap[x].lstrip(os.path.sep)))
6133 +
6134 + if retval!=0:
6135 + warn("First attempt to unmount: " + mypath +
6136 + self.mountmap[x] +" failed.")
6137 + warn("Killing any pids still running in the chroot")
6138 +
6139 + self.kill_chroot_pids()
6140 +
6141 + retval2=os.system("umount " + mypath + self.mountmap[x])
6142 + if retval2!=0:
6143 + ouch=1
6144 + warn("Couldn't umount bind mount: " + mypath + self.mountmap[x])
6145 +
6146 + if "SNAPCACHE" in self.settings and x == "/usr/portage":
6147 + try:
6148 + """
6149 + It's possible the snapshot lock object isn't created yet.
6150 + This is because mount safety check calls unbind before the
6151 + target is fully initialized
6152 + """
6153 + self.snapshot_lock_object.unlock()
6154 + except:
6155 + pass
6156 + if ouch:
6157 + """
6158 + if any bind mounts really failed, then we need to raise
6159 + this to potentially prevent an upcoming bash stage cleanup script
6160 + from wiping our bind mounts.
6161 + """
6162 + raise CatalystError,\
6163 + "Couldn't umount one or more bind-mounts; aborting for safety."
6164 +
6165 + def chroot_setup(self):
6166 + self.makeconf=read_makeconf(self.settings["chroot_path"]+\
6167 + "/etc/portage/make.conf")
6168 + self.override_cbuild()
6169 + self.override_chost()
6170 + self.override_cflags()
6171 + self.override_cxxflags()
6172 + self.override_ldflags()
6173 + if "AUTORESUME" in self.settings \
6174 + and os.path.exists(self.settings["autoresume_path"]+"chroot_setup"):
6175 + print "Resume point detected, skipping chroot_setup operation..."
6176 + else:
6177 + print "Setting up chroot..."
6178 +
6179 + #self.makeconf=read_makeconf(self.settings["chroot_path"]+"/etc/portage/make.conf")
6180 +
6181 + cmd("cp /etc/resolv.conf "+self.settings["chroot_path"]+"/etc",\
6182 + "Could not copy resolv.conf into place.",env=self.env)
6183 +
6184 + """ Copy over the envscript, if applicable """
6185 + if "ENVSCRIPT" in self.settings:
6186 + if not os.path.exists(self.settings["ENVSCRIPT"]):
6187 + raise CatalystError,\
6188 + "Can't find envscript "+self.settings["ENVSCRIPT"]
6189 +
6190 + print "\nWarning!!!!"
6191 + print "\tOverriding certain env variables may cause catastrophic failure."
6192 + print "\tIf your build fails look here first as the possible problem."
6193 + print "\tCatalyst assumes you know what you are doing when setting"
6194 + print "\t\tthese variables."
6195 + print "\tCatalyst Maintainers use VERY minimal envscripts if used at all"
6196 + print "\tYou have been warned\n"
6197 +
6198 + cmd("cp "+self.settings["ENVSCRIPT"]+" "+\
6199 + self.settings["chroot_path"]+"/tmp/envscript",\
6200 + "Could not copy envscript into place.",env=self.env)
6201 +
6202 + """
6203 + Copy over /etc/hosts from the host in case there are any
6204 + specialties in there
6205 + """
6206 + if os.path.exists(self.settings["chroot_path"]+"/etc/hosts"):
6207 + cmd("mv "+self.settings["chroot_path"]+"/etc/hosts "+\
6208 + self.settings["chroot_path"]+"/etc/hosts.catalyst",\
6209 + "Could not backup /etc/hosts",env=self.env)
6210 + cmd("cp /etc/hosts "+self.settings["chroot_path"]+"/etc/hosts",\
6211 + "Could not copy /etc/hosts",env=self.env)
6212 +
6213 + """ Modify and write out make.conf (for the chroot) """
6214 + cmd("rm -f "+self.settings["chroot_path"]+"/etc/portage/make.conf",\
6215 + "Could not remove "+self.settings["chroot_path"]+\
6216 + "/etc/portage/make.conf",env=self.env)
6217 + myf=open(self.settings["chroot_path"]+"/etc/portage/make.conf","w")
6218 + myf.write("# These settings were set by the catalyst build script that automatically\n# built this stage.\n")
6219 + myf.write("# Please consult /usr/share/portage/config/make.conf.example for a more\n# detailed example.\n")
6220 + if "CFLAGS" in self.settings:
6221 + myf.write('CFLAGS="'+self.settings["CFLAGS"]+'"\n')
6222 + if "CXXFLAGS" in self.settings:
6223 + if self.settings["CXXFLAGS"]!=self.settings["CFLAGS"]:
6224 + myf.write('CXXFLAGS="'+self.settings["CXXFLAGS"]+'"\n')
6225 + else:
6226 + myf.write('CXXFLAGS="${CFLAGS}"\n')
6227 + else:
6228 + myf.write('CXXFLAGS="${CFLAGS}"\n')
6229 +
6230 + if "LDFLAGS" in self.settings:
6231 + myf.write("# LDFLAGS is unsupported. USE AT YOUR OWN RISK!\n")
6232 + myf.write('LDFLAGS="'+self.settings["LDFLAGS"]+'"\n')
6233 + if "CBUILD" in self.settings:
6234 + myf.write("# This should not be changed unless you know exactly what you are doing. You\n# should probably be using a different stage, instead.\n")
6235 + myf.write('CBUILD="'+self.settings["CBUILD"]+'"\n')
6236 +
6237 + myf.write("# WARNING: Changing your CHOST is not something that should be done lightly.\n# Please consult http://www.gentoo.org/doc/en/change-chost.xml before changing.\n")
6238 + myf.write('CHOST="'+self.settings["CHOST"]+'"\n')
6239 +
6240 + """ Figure out what our USE vars are for building """
6241 + myusevars=[]
6242 + if "HOSTUSE" in self.settings:
6243 + myusevars.extend(self.settings["HOSTUSE"])
6244 +
6245 + if "use" in self.settings:
6246 + myusevars.extend(self.settings["use"])
6247 +
6248 + if myusevars:
6249 + myf.write("# These are the USE flags that were used in addition to what is provided by the\n# profile used for building.\n")
6250 + myusevars = sorted(set(myusevars))
6251 + myf.write('USE="'+string.join(myusevars)+'"\n')
6252 + if '-*' in myusevars:
6253 + print "\nWarning!!! "
6254 + print "\tThe use of -* in "+self.settings["spec_prefix"]+\
6255 + "/use will cause portage to ignore"
6256 + print "\tpackage.use in the profile and portage_confdir. You've been warned!"
6257 +
6258 + myf.write('PORTDIR="%s"\n' % self.settings['portdir'])
6259 + myf.write('DISTDIR="%s"\n' % self.settings['distdir'])
6260 + myf.write('PKGDIR="%s"\n' % self.settings['packagedir'])
6261 +
6262 + """ Setup the portage overlay """
6263 + if "portage_overlay" in self.settings:
6264 + myf.write('PORTDIR_OVERLAY="/usr/local/portage"\n')
6265 +
6266 + myf.close()
6267 + cmd("cp "+self.settings["chroot_path"]+"/etc/portage/make.conf "+\
6268 + self.settings["chroot_path"]+"/etc/portage/make.conf.catalyst",\
6269 + "Could not backup /etc/portage/make.conf",env=self.env)
6270 + touch(self.settings["autoresume_path"]+"chroot_setup")
6271 +
6272 + def fsscript(self):
6273 + if "AUTORESUME" in self.settings \
6274 + and os.path.exists(self.settings["autoresume_path"]+"fsscript"):
6275 + print "Resume point detected, skipping fsscript operation..."
6276 + else:
6277 + if "fsscript" in self.settings:
6278 + if os.path.exists(self.settings["controller_file"]):
6279 + cmd("/bin/bash "+self.settings["controller_file"]+\
6280 + " fsscript","fsscript script failed.",env=self.env)
6281 + touch(self.settings["autoresume_path"]+"fsscript")
6282 +
6283 + def rcupdate(self):
6284 + if "AUTORESUME" in self.settings \
6285 + and os.path.exists(self.settings["autoresume_path"]+"rcupdate"):
6286 + print "Resume point detected, skipping rcupdate operation..."
6287 + else:
6288 + if os.path.exists(self.settings["controller_file"]):
6289 + cmd("/bin/bash "+self.settings["controller_file"]+" rc-update",\
6290 + "rc-update script failed.",env=self.env)
6291 + touch(self.settings["autoresume_path"]+"rcupdate")
6292 +
6293 + def clean(self):
6294 + if "AUTORESUME" in self.settings \
6295 + and os.path.exists(self.settings["autoresume_path"]+"clean"):
6296 + print "Resume point detected, skipping clean operation..."
6297 + else:
6298 + for x in self.settings["cleanables"]:
6299 + print "Cleaning chroot: "+x+"... "
6300 + cmd("rm -rf "+self.settings["destpath"]+x,"Couldn't clean "+\
6301 + x,env=self.env)
6302 +
6303 + """ Put /etc/hosts back into place """
6304 + if os.path.exists(self.settings["chroot_path"]+"/etc/hosts.catalyst"):
6305 + cmd("mv -f "+self.settings["chroot_path"]+"/etc/hosts.catalyst "+\
6306 + self.settings["chroot_path"]+"/etc/hosts",\
6307 + "Could not replace /etc/hosts",env=self.env)
6308 +
6309 + """ Remove our overlay """
6310 + if os.path.exists(self.settings["chroot_path"] + self.settings["local_overlay"]):
6311 + cmd("rm -rf " + self.settings["chroot_path"] + self.settings["local_overlay"],
6312 + "Could not remove " + self.settings["local_overlay"], env=self.env)
6313 + cmd("sed -i '/^PORTDIR_OVERLAY/d' "+self.settings["chroot_path"]+\
6314 + "/etc/portage/make.conf",\
6315 + "Could not remove PORTDIR_OVERLAY from make.conf",env=self.env)
6316 +
6317 + """ Clean up old and obsoleted files in /etc """
6318 + if os.path.exists(self.settings["stage_path"]+"/etc"):
6319 + cmd("find "+self.settings["stage_path"]+\
6320 + "/etc -maxdepth 1 -name \"*-\" | xargs rm -f",\
6321 + "Could not remove stray files in /etc",env=self.env)
6322 +
6323 + if os.path.exists(self.settings["controller_file"]):
6324 + cmd("/bin/bash "+self.settings["controller_file"]+" clean",\
6325 + "clean script failed.",env=self.env)
6326 + touch(self.settings["autoresume_path"]+"clean")
6327 +
6328 + def empty(self):
6329 + if "AUTORESUME" in self.settings \
6330 + and os.path.exists(self.settings["autoresume_path"]+"empty"):
6331 + print "Resume point detected, skipping empty operation..."
6332 + else:
6333 + if self.settings["spec_prefix"]+"/empty" in self.settings:
6334 + if type(self.settings[self.settings["spec_prefix"]+\
6335 + "/empty"])==types.StringType:
6336 + self.settings[self.settings["spec_prefix"]+"/empty"]=\
6337 + self.settings[self.settings["spec_prefix"]+\
6338 + "/empty"].split()
6339 + for x in self.settings[self.settings["spec_prefix"]+"/empty"]:
6340 + myemp=self.settings["destpath"]+x
6341 + if not os.path.isdir(myemp) or os.path.islink(myemp):
6342 + print x,"not a directory or does not exist, skipping 'empty' operation."
6343 + continue
6344 + print "Emptying directory",x
6345 + """
6346 + stat the dir, delete the dir, recreate the dir and set
6347 + the proper perms and ownership
6348 + """
6349 + mystat=os.stat(myemp)
6350 + shutil.rmtree(myemp)
6351 + os.makedirs(myemp,0755)
6352 + os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
6353 + os.chmod(myemp,mystat[ST_MODE])
6354 + touch(self.settings["autoresume_path"]+"empty")
6355 +
6356 + def remove(self):
6357 + if "AUTORESUME" in self.settings \
6358 + and os.path.exists(self.settings["autoresume_path"]+"remove"):
6359 + print "Resume point detected, skipping remove operation..."
6360 + else:
6361 + if self.settings["spec_prefix"]+"/rm" in self.settings:
6362 + for x in self.settings[self.settings["spec_prefix"]+"/rm"]:
6363 + """
6364 + We're going to shell out for all these cleaning
6365 + operations, so we get easy glob handling.
6366 + """
6367 + print "livecd: removing "+x
6368 + os.system("rm -rf "+self.settings["chroot_path"]+x)
6369 + try:
6370 + if os.path.exists(self.settings["controller_file"]):
6371 + cmd("/bin/bash "+self.settings["controller_file"]+\
6372 + " clean","Clean failed.",env=self.env)
6373 + touch(self.settings["autoresume_path"]+"remove")
6374 + except:
6375 + self.unbind()
6376 + raise
6377 +
6378 + def preclean(self):
6379 + if "AUTORESUME" in self.settings \
6380 + and os.path.exists(self.settings["autoresume_path"]+"preclean"):
6381 + print "Resume point detected, skipping preclean operation..."
6382 + else:
6383 + try:
6384 + if os.path.exists(self.settings["controller_file"]):
6385 + cmd("/bin/bash "+self.settings["controller_file"]+\
6386 + " preclean","preclean script failed.",env=self.env)
6387 + touch(self.settings["autoresume_path"]+"preclean")
6388 +
6389 + except:
6390 + self.unbind()
6391 + raise CatalystError, "Build failed, could not execute preclean"
6392 +
6393 + def capture(self):
6394 + if "AUTORESUME" in self.settings \
6395 + and os.path.exists(self.settings["autoresume_path"]+"capture"):
6396 + print "Resume point detected, skipping capture operation..."
6397 + else:
6398 + """ Capture target in a tarball """
6399 + mypath=self.settings["target_path"].split("/")
6400 + """ Remove filename from path """
6401 + mypath=string.join(mypath[:-1],"/")
6402 +
6403 + """ Now make sure path exists """
6404 + if not os.path.exists(mypath):
6405 + os.makedirs(mypath)
6406 +
6407 + print "Creating stage tarball..."
6408 +
6409 + cmd("tar -I lbzip2 -cpf "+self.settings["target_path"]+" -C "+\
6410 + self.settings["stage_path"]+" .",\
6411 + "Couldn't create stage tarball",env=self.env)
6412 +
6413 + self.gen_contents_file(self.settings["target_path"])
6414 + self.gen_digest_file(self.settings["target_path"])
6415 +
6416 + touch(self.settings["autoresume_path"]+"capture")
6417 +
6418 + def run_local(self):
6419 + if "AUTORESUME" in self.settings \
6420 + and os.path.exists(self.settings["autoresume_path"]+"run_local"):
6421 + print "Resume point detected, skipping run_local operation..."
6422 + else:
6423 + try:
6424 + if os.path.exists(self.settings["controller_file"]):
6425 + cmd("/bin/bash "+self.settings["controller_file"]+" run",\
6426 + "run script failed.",env=self.env)
6427 + touch(self.settings["autoresume_path"]+"run_local")
6428 +
6429 + except CatalystError:
6430 + self.unbind()
6431 + raise CatalystError,"Stage build aborting due to error."
6432 +
6433 + def setup_environment(self):
6434 + """
6435 + Modify the current environment. This is an ugly hack that should be
6436 + fixed. We need this to use the os.system() call since we can't
6437 + specify our own environ
6438 + """
6439 + for x in self.settings.keys():
6440 + """ Sanitize var names by doing "s|/-.|_|g" """
6441 + varname="clst_"+string.replace(x,"/","_")
6442 + varname=string.replace(varname,"-","_")
6443 + varname=string.replace(varname,".","_")
6444 + if type(self.settings[x])==types.StringType:
6445 + """ Prefix to prevent namespace clashes """
6446 + #os.environ[varname]=self.settings[x]
6447 + self.env[varname]=self.settings[x]
6448 + elif type(self.settings[x])==types.ListType:
6449 + #os.environ[varname]=string.join(self.settings[x])
6450 + self.env[varname]=string.join(self.settings[x])
6451 + elif type(self.settings[x])==types.BooleanType:
6452 + if self.settings[x]:
6453 + self.env[varname]="true"
6454 + else:
6455 + self.env[varname]="false"
6456 + if "makeopts" in self.settings:
6457 + self.env["MAKEOPTS"]=self.settings["makeopts"]
6458 +
6459 + def run(self):
6460 + self.chroot_lock.write_lock()
6461 +
6462 + """ Kill any pids in the chroot "" """
6463 + self.kill_chroot_pids()
6464 +
6465 + """ Check for mounts right away and abort if we cannot unmount them """
6466 + self.mount_safety_check()
6467 +
6468 + if "CLEAR_AUTORESUME" in self.settings:
6469 + self.clear_autoresume()
6470 +
6471 + if "PURGETMPONLY" in self.settings:
6472 + self.purge()
6473 + return
6474 +
6475 + if "PURGEONLY" in self.settings:
6476 + self.purge()
6477 + return
6478 +
6479 + if "PURGE" in self.settings:
6480 + self.purge()
6481 +
6482 + for x in self.settings["action_sequence"]:
6483 + print "--- Running action sequence: "+x
6484 + sys.stdout.flush()
6485 + try:
6486 + apply(getattr(self,x))
6487 + except:
6488 + self.mount_safety_check()
6489 + raise
6490 +
6491 + self.chroot_lock.unlock()
6492 +
6493 + def unmerge(self):
6494 + if "AUTORESUME" in self.settings \
6495 + and os.path.exists(self.settings["autoresume_path"]+"unmerge"):
6496 + print "Resume point detected, skipping unmerge operation..."
6497 + else:
6498 + if self.settings["spec_prefix"]+"/unmerge" in self.settings:
6499 + if type(self.settings[self.settings["spec_prefix"]+\
6500 + "/unmerge"])==types.StringType:
6501 + self.settings[self.settings["spec_prefix"]+"/unmerge"]=\
6502 + [self.settings[self.settings["spec_prefix"]+"/unmerge"]]
6503 + myunmerge=\
6504 + self.settings[self.settings["spec_prefix"]+"/unmerge"][:]
6505 +
6506 + for x in range(0,len(myunmerge)):
6507 + """
6508 + Surround args with quotes for passing to bash, allows
6509 + things like "<" to remain intact
6510 + """
6511 + myunmerge[x]="'"+myunmerge[x]+"'"
6512 + myunmerge=string.join(myunmerge)
6513 +
6514 + """ Before cleaning, unmerge stuff """
6515 + try:
6516 + cmd("/bin/bash "+self.settings["controller_file"]+\
6517 + " unmerge "+ myunmerge,"Unmerge script failed.",\
6518 + env=self.env)
6519 + print "unmerge shell script"
6520 + except CatalystError:
6521 + self.unbind()
6522 + raise
6523 + touch(self.settings["autoresume_path"]+"unmerge")
6524 +
6525 + def target_setup(self):
6526 + if "AUTORESUME" in self.settings \
6527 + and os.path.exists(self.settings["autoresume_path"]+"target_setup"):
6528 + print "Resume point detected, skipping target_setup operation..."
6529 + else:
6530 + print "Setting up filesystems per filesystem type"
6531 + cmd("/bin/bash "+self.settings["controller_file"]+\
6532 + " target_image_setup "+ self.settings["target_path"],\
6533 + "target_image_setup script failed.",env=self.env)
6534 + touch(self.settings["autoresume_path"]+"target_setup")
6535 +
6536 + def setup_overlay(self):
6537 + if "AUTORESUME" in self.settings \
6538 + and os.path.exists(self.settings["autoresume_path"]+"setup_overlay"):
6539 + print "Resume point detected, skipping setup_overlay operation..."
6540 + else:
6541 + if self.settings["spec_prefix"]+"/overlay" in self.settings:
6542 + for x in self.settings[self.settings["spec_prefix"]+"/overlay"]:
6543 + if os.path.exists(x):
6544 + cmd("rsync -a "+x+"/ "+\
6545 + self.settings["target_path"],\
6546 + self.settings["spec_prefix"]+"overlay: "+x+\
6547 + " copy failed.",env=self.env)
6548 + touch(self.settings["autoresume_path"]+"setup_overlay")
6549 +
6550 + def create_iso(self):
6551 + if "AUTORESUME" in self.settings \
6552 + and os.path.exists(self.settings["autoresume_path"]+"create_iso"):
6553 + print "Resume point detected, skipping create_iso operation..."
6554 + else:
6555 + """ Create the ISO """
6556 + if "iso" in self.settings:
6557 + cmd("/bin/bash "+self.settings["controller_file"]+" iso "+\
6558 + self.settings["iso"],"ISO creation script failed.",\
6559 + env=self.env)
6560 + self.gen_contents_file(self.settings["iso"])
6561 + self.gen_digest_file(self.settings["iso"])
6562 + touch(self.settings["autoresume_path"]+"create_iso")
6563 + else:
6564 + print "WARNING: livecd/iso was not defined."
6565 + print "An ISO Image will not be created."
6566 +
6567 + def build_packages(self):
6568 + if "AUTORESUME" in self.settings \
6569 + and os.path.exists(self.settings["autoresume_path"]+\
6570 + "build_packages"):
6571 + print "Resume point detected, skipping build_packages operation..."
6572 + else:
6573 + if self.settings["spec_prefix"]+"/packages" in self.settings:
6574 + if "AUTORESUME" in self.settings \
6575 + and os.path.exists(self.settings["autoresume_path"]+\
6576 + "build_packages"):
6577 + print "Resume point detected, skipping build_packages operation..."
6578 + else:
6579 + mypack=\
6580 + list_bashify(self.settings[self.settings["spec_prefix"]\
6581 + +"/packages"])
6582 + try:
6583 + cmd("/bin/bash "+self.settings["controller_file"]+\
6584 + " build_packages "+mypack,\
6585 + "Error in attempt to build packages",env=self.env)
6586 + touch(self.settings["autoresume_path"]+"build_packages")
6587 + except CatalystError:
6588 + self.unbind()
6589 + raise CatalystError,self.settings["spec_prefix"]+\
6590 + "build aborting due to error."
6591 +
6592 + def build_kernel(self):
6593 + "Build all configured kernels"
6594 + if "AUTORESUME" in self.settings \
6595 + and os.path.exists(self.settings["autoresume_path"]+"build_kernel"):
6596 + print "Resume point detected, skipping build_kernel operation..."
6597 + else:
6598 + if "boot/kernel" in self.settings:
6599 + try:
6600 + mynames=self.settings["boot/kernel"]
6601 + if type(mynames)==types.StringType:
6602 + mynames=[mynames]
6603 + """
6604 + Execute the script that sets up the kernel build environment
6605 + """
6606 + cmd("/bin/bash "+self.settings["controller_file"]+\
6607 + " pre-kmerge ","Runscript pre-kmerge failed",\
6608 + env=self.env)
6609 + for kname in mynames:
6610 + self._build_kernel(kname=kname)
6611 + touch(self.settings["autoresume_path"]+"build_kernel")
6612 + except CatalystError:
6613 + self.unbind()
6614 + raise CatalystError,\
6615 + "build aborting due to kernel build error."
6616 +
6617 + def _build_kernel(self, kname):
6618 + "Build a single configured kernel by name"
6619 + if "AUTORESUME" in self.settings \
6620 + and os.path.exists(self.settings["autoresume_path"]\
6621 + +"build_kernel_"+kname):
6622 + print "Resume point detected, skipping build_kernel for "+kname+" operation..."
6623 + return
6624 + self._copy_kernel_config(kname=kname)
6625 +
6626 + """
6627 + If we need to pass special options to the bootloader
6628 + for this kernel put them into the environment
6629 + """
6630 + if "boot/kernel/"+kname+"/kernelopts" in self.settings:
6631 + myopts=self.settings["boot/kernel/"+kname+\
6632 + "/kernelopts"]
6633 +
6634 + if type(myopts) != types.StringType:
6635 + myopts = string.join(myopts)
6636 + self.env[kname+"_kernelopts"]=myopts
6637 +
6638 + else:
6639 + self.env[kname+"_kernelopts"]=""
6640 +
6641 + if "boot/kernel/"+kname+"/extraversion" not in self.settings:
6642 + self.settings["boot/kernel/"+kname+\
6643 + "/extraversion"]=""
6644 +
6645 + self.env["clst_kextraversion"]=\
6646 + self.settings["boot/kernel/"+kname+\
6647 + "/extraversion"]
6648 +
6649 + self._copy_initramfs_overlay(kname=kname)
6650 +
6651 + """ Execute the script that builds the kernel """
6652 + cmd("/bin/bash "+self.settings["controller_file"]+\
6653 + " kernel "+kname,\
6654 + "Runscript kernel build failed",env=self.env)
6655 +
6656 + if "boot/kernel/"+kname+"/initramfs_overlay" in self.settings:
6657 + if os.path.exists(self.settings["chroot_path"]+\
6658 + "/tmp/initramfs_overlay/"):
6659 + print "Cleaning up temporary overlay dir"
6660 + cmd("rm -R "+self.settings["chroot_path"]+\
6661 + "/tmp/initramfs_overlay/",env=self.env)
6662 +
6663 + touch(self.settings["autoresume_path"]+\
6664 + "build_kernel_"+kname)
6665 +
6666 + """
6667 + Execute the script that cleans up the kernel build
6668 + environment
6669 + """
6670 + cmd("/bin/bash "+self.settings["controller_file"]+\
6671 + " post-kmerge ",
6672 + "Runscript post-kmerge failed",env=self.env)
6673 +
6674 + def _copy_kernel_config(self, kname):
6675 + if "boot/kernel/"+kname+"/config" in self.settings:
6676 + if not os.path.exists(self.settings["boot/kernel/"+kname+"/config"]):
6677 + self.unbind()
6678 + raise CatalystError,\
6679 + "Can't find kernel config: "+\
6680 + self.settings["boot/kernel/"+kname+\
6681 + "/config"]
6682 +
6683 + try:
6684 + cmd("cp "+self.settings["boot/kernel/"+kname+\
6685 + "/config"]+" "+\
6686 + self.settings["chroot_path"]+"/var/tmp/"+\
6687 + kname+".config",\
6688 + "Couldn't copy kernel config: "+\
6689 + self.settings["boot/kernel/"+kname+\
6690 + "/config"],env=self.env)
6691 +
6692 + except CatalystError:
6693 + self.unbind()
6694 +
6695 + def _copy_initramfs_overlay(self, kname):
6696 + if "boot/kernel/"+kname+"/initramfs_overlay" in self.settings:
6697 + if os.path.exists(self.settings["boot/kernel/"+\
6698 + kname+"/initramfs_overlay"]):
6699 + print "Copying initramfs_overlay dir "+\
6700 + self.settings["boot/kernel/"+kname+\
6701 + "/initramfs_overlay"]
6702 +
6703 + cmd("mkdir -p "+\
6704 + self.settings["chroot_path"]+\
6705 + "/tmp/initramfs_overlay/"+\
6706 + self.settings["boot/kernel/"+kname+\
6707 + "/initramfs_overlay"],env=self.env)
6708 +
6709 + cmd("cp -R "+self.settings["boot/kernel/"+\
6710 + kname+"/initramfs_overlay"]+"/* "+\
6711 + self.settings["chroot_path"]+\
6712 + "/tmp/initramfs_overlay/"+\
6713 + self.settings["boot/kernel/"+kname+\
6714 + "/initramfs_overlay"],env=self.env)
6715 +
6716 + def bootloader(self):
6717 + if "AUTORESUME" in self.settings \
6718 + and os.path.exists(self.settings["autoresume_path"]+"bootloader"):
6719 + print "Resume point detected, skipping bootloader operation..."
6720 + else:
6721 + try:
6722 + cmd("/bin/bash "+self.settings["controller_file"]+\
6723 + " bootloader " + self.settings["target_path"],\
6724 + "Bootloader script failed.",env=self.env)
6725 + touch(self.settings["autoresume_path"]+"bootloader")
6726 + except CatalystError:
6727 + self.unbind()
6728 + raise CatalystError,"Script aborting due to error."
6729 +
6730 + def livecd_update(self):
6731 + if "AUTORESUME" in self.settings \
6732 + and os.path.exists(self.settings["autoresume_path"]+\
6733 + "livecd_update"):
6734 + print "Resume point detected, skipping build_packages operation..."
6735 + else:
6736 + try:
6737 + cmd("/bin/bash "+self.settings["controller_file"]+\
6738 + " livecd-update","livecd-update failed.",env=self.env)
6739 + touch(self.settings["autoresume_path"]+"livecd_update")
6740 +
6741 + except CatalystError:
6742 + self.unbind()
6743 + raise CatalystError,"build aborting due to livecd_update error."
6744 +
6745 + def clear_chroot(self):
6746 + myemp=self.settings["chroot_path"]
6747 + if os.path.isdir(myemp):
6748 + print "Emptying directory",myemp
6749 + """
6750 + stat the dir, delete the dir, recreate the dir and set
6751 + the proper perms and ownership
6752 + """
6753 + mystat=os.stat(myemp)
6754 + #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
6755 + """ There's no easy way to change flags recursively in python """
6756 + if os.uname()[0] == "FreeBSD":
6757 + os.system("chflags -R noschg "+myemp)
6758 + shutil.rmtree(myemp)
6759 + os.makedirs(myemp,0755)
6760 + os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
6761 + os.chmod(myemp,mystat[ST_MODE])
6762 +
6763 + def clear_packages(self):
6764 + if "PKGCACHE" in self.settings:
6765 + print "purging the pkgcache ..."
6766 +
6767 + myemp=self.settings["pkgcache_path"]
6768 + if os.path.isdir(myemp):
6769 + print "Emptying directory",myemp
6770 + """
6771 + stat the dir, delete the dir, recreate the dir and set
6772 + the proper perms and ownership
6773 + """
6774 + mystat=os.stat(myemp)
6775 + #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
6776 + shutil.rmtree(myemp)
6777 + os.makedirs(myemp,0755)
6778 + os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
6779 + os.chmod(myemp,mystat[ST_MODE])
6780 +
6781 + def clear_kerncache(self):
6782 + if "KERNCACHE" in self.settings:
6783 + print "purging the kerncache ..."
6784 +
6785 + myemp=self.settings["kerncache_path"]
6786 + if os.path.isdir(myemp):
6787 + print "Emptying directory",myemp
6788 + """
6789 + stat the dir, delete the dir, recreate the dir and set
6790 + the proper perms and ownership
6791 + """
6792 + mystat=os.stat(myemp)
6793 + #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
6794 + shutil.rmtree(myemp)
6795 + os.makedirs(myemp,0755)
6796 + os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
6797 + os.chmod(myemp,mystat[ST_MODE])
6798 +
6799 + def clear_autoresume(self):
6800 + """ Clean resume points since they are no longer needed """
6801 + if "AUTORESUME" in self.settings:
6802 + print "Removing AutoResume Points: ..."
6803 + myemp=self.settings["autoresume_path"]
6804 + if os.path.isdir(myemp):
6805 + if "AUTORESUME" in self.settings:
6806 + print "Emptying directory",myemp
6807 + """
6808 + stat the dir, delete the dir, recreate the dir and set
6809 + the proper perms and ownership
6810 + """
6811 + mystat=os.stat(myemp)
6812 + if os.uname()[0] == "FreeBSD":
6813 + cmd("chflags -R noschg "+myemp,\
6814 + "Could not remove immutable flag for file "\
6815 + +myemp)
6816 + #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env-self.env)
6817 + shutil.rmtree(myemp)
6818 + os.makedirs(myemp,0755)
6819 + os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
6820 + os.chmod(myemp,mystat[ST_MODE])
6821 +
6822 + def gen_contents_file(self,file):
6823 + if os.path.exists(file+".CONTENTS"):
6824 + os.remove(file+".CONTENTS")
6825 + if "contents" in self.settings:
6826 + if os.path.exists(file):
6827 + myf=open(file+".CONTENTS","w")
6828 + keys={}
6829 + for i in self.settings["contents"].split():
6830 + keys[i]=1
6831 + array=keys.keys()
6832 + array.sort()
6833 + for j in array:
6834 + contents=generate_contents(file,contents_function=j,\
6835 + verbose="VERBOSE" in self.settings)
6836 + if contents:
6837 + myf.write(contents)
6838 + myf.close()
6839 +
6840 + def gen_digest_file(self,file):
6841 + if os.path.exists(file+".DIGESTS"):
6842 + os.remove(file+".DIGESTS")
6843 + if "digests" in self.settings:
6844 + if os.path.exists(file):
6845 + myf=open(file+".DIGESTS","w")
6846 + keys={}
6847 + for i in self.settings["digests"].split():
6848 + keys[i]=1
6849 + array=keys.keys()
6850 + array.sort()
6851 + for f in [file, file+'.CONTENTS']:
6852 + if os.path.exists(f):
6853 + if "all" in array:
6854 + for k in hash_map.keys():
6855 + hash=generate_hash(f,hash_function=k,verbose=\
6856 + "VERBOSE" in self.settings)
6857 + myf.write(hash)
6858 + else:
6859 + for j in array:
6860 + hash=generate_hash(f,hash_function=j,verbose=\
6861 + "VERBOSE" in self.settings)
6862 + myf.write(hash)
6863 + myf.close()
6864 +
6865 + def purge(self):
6866 + countdown(10,"Purging Caches ...")
6867 + if any(k in self.settings for k in ("PURGE","PURGEONLY","PURGETMPONLY")):
6868 + print "clearing autoresume ..."
6869 + self.clear_autoresume()
6870 +
6871 + print "clearing chroot ..."
6872 + self.clear_chroot()
6873 +
6874 + if "PURGETMPONLY" not in self.settings:
6875 + print "clearing package cache ..."
6876 + self.clear_packages()
6877 +
6878 + print "clearing kerncache ..."
6879 + self.clear_kerncache()
6880 +
6881 +# vim: ts=4 sw=4 sta et sts=4 ai
6882 diff --git a/catalyst/modules/generic_target.py b/catalyst/modules/generic_target.py
6883 new file mode 100644
6884 index 0000000..fe96bd7
6885 --- /dev/null
6886 +++ b/catalyst/modules/generic_target.py
6887 @@ -0,0 +1,11 @@
6888 +from catalyst_support import *
6889 +
6890 +class generic_target:
6891 + """
6892 + The toplevel class for generic_stage_target. This is about as generic as we get.
6893 + """
6894 + def __init__(self,myspec,addlargs):
6895 + addl_arg_parse(myspec,addlargs,self.required_values,self.valid_values)
6896 + self.settings=myspec
6897 + self.env={}
6898 + self.env["PATH"]="/bin:/sbin:/usr/bin:/usr/sbin"
6899 diff --git a/catalyst/modules/grp_target.py b/catalyst/modules/grp_target.py
6900 new file mode 100644
6901 index 0000000..6941522
6902 --- /dev/null
6903 +++ b/catalyst/modules/grp_target.py
6904 @@ -0,0 +1,118 @@
6905 +"""
6906 +Gentoo Reference Platform (GRP) target
6907 +"""
6908 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
6909 +
6910 +import os,types,glob
6911 +from catalyst_support import *
6912 +from generic_stage_target import *
6913 +
6914 +class grp_target(generic_stage_target):
6915 + """
6916 + The builder class for GRP (Gentoo Reference Platform) builds.
6917 + """
6918 + def __init__(self,spec,addlargs):
6919 + self.required_values=["version_stamp","target","subarch",\
6920 + "rel_type","profile","snapshot","source_subpath"]
6921 +
6922 + self.valid_values=self.required_values[:]
6923 + self.valid_values.extend(["grp/use"])
6924 + if "grp" not in addlargs:
6925 + raise CatalystError,"Required value \"grp\" not specified in spec."
6926 +
6927 + self.required_values.extend(["grp"])
6928 + if type(addlargs["grp"])==types.StringType:
6929 + addlargs["grp"]=[addlargs["grp"]]
6930 +
6931 + if "grp/use" in addlargs:
6932 + if type(addlargs["grp/use"])==types.StringType:
6933 + addlargs["grp/use"]=[addlargs["grp/use"]]
6934 +
6935 + for x in addlargs["grp"]:
6936 + self.required_values.append("grp/"+x+"/packages")
6937 + self.required_values.append("grp/"+x+"/type")
6938 +
6939 + generic_stage_target.__init__(self,spec,addlargs)
6940 +
6941 + def set_target_path(self):
6942 + self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"]+"/")
6943 + if "AUTORESUME" in self.settings \
6944 + and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
6945 + print "Resume point detected, skipping target path setup operation..."
6946 + else:
6947 + # first clean up any existing target stuff
6948 + #if os.path.isdir(self.settings["target_path"]):
6949 + #cmd("rm -rf "+self.settings["target_path"],
6950 + #"Could not remove existing directory: "+self.settings["target_path"],env=self.env)
6951 + if not os.path.exists(self.settings["target_path"]):
6952 + os.makedirs(self.settings["target_path"])
6953 +
6954 + touch(self.settings["autoresume_path"]+"setup_target_path")
6955 +
6956 + def run_local(self):
6957 + for pkgset in self.settings["grp"]:
6958 + # example call: "grp.sh run pkgset cd1 xmms vim sys-apps/gleep"
6959 + mypackages=list_bashify(self.settings["grp/"+pkgset+"/packages"])
6960 + try:
6961 + cmd("/bin/bash "+self.settings["controller_file"]+" run "+self.settings["grp/"+pkgset+"/type"]\
6962 + +" "+pkgset+" "+mypackages,env=self.env)
6963 +
6964 + except CatalystError:
6965 + self.unbind()
6966 + raise CatalystError,"GRP build aborting due to error."
6967 +
6968 + def set_use(self):
6969 + generic_stage_target.set_use(self)
6970 + if "BINDIST" in self.settings:
6971 + if "use" in self.settings:
6972 + self.settings["use"].append("bindist")
6973 + else:
6974 + self.settings["use"]=["bindist"]
6975 +
6976 + def set_mounts(self):
6977 + self.mounts.append("/tmp/grp")
6978 + self.mountmap["/tmp/grp"]=self.settings["target_path"]
6979 +
6980 + def generate_digests(self):
6981 + for pkgset in self.settings["grp"]:
6982 + if self.settings["grp/"+pkgset+"/type"] == "pkgset":
6983 + destdir=normpath(self.settings["target_path"]+"/"+pkgset+"/All")
6984 + print "Digesting files in the pkgset....."
6985 + digests=glob.glob(destdir+'/*.DIGESTS')
6986 + for i in digests:
6987 + if os.path.exists(i):
6988 + os.remove(i)
6989 +
6990 + files=os.listdir(destdir)
6991 + #ignore files starting with '.' using list comprehension
6992 + files=[filename for filename in files if filename[0] != '.']
6993 + for i in files:
6994 + if os.path.isfile(normpath(destdir+"/"+i)):
6995 + self.gen_contents_file(normpath(destdir+"/"+i))
6996 + self.gen_digest_file(normpath(destdir+"/"+i))
6997 + else:
6998 + destdir=normpath(self.settings["target_path"]+"/"+pkgset)
6999 + print "Digesting files in the srcset....."
7000 +
7001 + digests=glob.glob(destdir+'/*.DIGESTS')
7002 + for i in digests:
7003 + if os.path.exists(i):
7004 + os.remove(i)
7005 +
7006 + files=os.listdir(destdir)
7007 + #ignore files starting with '.' using list comprehension
7008 + files=[filename for filename in files if filename[0] != '.']
7009 + for i in files:
7010 + if os.path.isfile(normpath(destdir+"/"+i)):
7011 + #self.gen_contents_file(normpath(destdir+"/"+i))
7012 + self.gen_digest_file(normpath(destdir+"/"+i))
7013 +
7014 + def set_action_sequence(self):
7015 + self.settings["action_sequence"]=["unpack","unpack_snapshot",\
7016 + "config_profile_link","setup_confdir","portage_overlay","bind","chroot_setup",\
7017 + "setup_environment","run_local","unbind",\
7018 + "generate_digests","clear_autoresume"]
7019 +
7020 +def register(foo):
7021 + foo.update({"grp":grp_target})
7022 + return foo
7023 diff --git a/catalyst/modules/livecd_stage1_target.py b/catalyst/modules/livecd_stage1_target.py
7024 new file mode 100644
7025 index 0000000..59de9bb
7026 --- /dev/null
7027 +++ b/catalyst/modules/livecd_stage1_target.py
7028 @@ -0,0 +1,75 @@
7029 +"""
7030 +LiveCD stage1 target
7031 +"""
7032 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7033 +
7034 +from catalyst_support import *
7035 +from generic_stage_target import *
7036 +
7037 +class livecd_stage1_target(generic_stage_target):
7038 + """
7039 + Builder class for LiveCD stage1.
7040 + """
7041 + def __init__(self,spec,addlargs):
7042 + self.required_values=["livecd/packages"]
7043 + self.valid_values=self.required_values[:]
7044 +
7045 + self.valid_values.extend(["livecd/use"])
7046 + generic_stage_target.__init__(self,spec,addlargs)
7047 +
7048 + def set_action_sequence(self):
7049 + self.settings["action_sequence"]=["unpack","unpack_snapshot",\
7050 + "config_profile_link","setup_confdir","portage_overlay",\
7051 + "bind","chroot_setup","setup_environment","build_packages",\
7052 + "unbind", "clean","clear_autoresume"]
7053 +
7054 + def set_target_path(self):
7055 + self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"])
7056 + if "AUTORESUME" in self.settings \
7057 + and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
7058 + print "Resume point detected, skipping target path setup operation..."
7059 + else:
7060 + # first clean up any existing target stuff
7061 + if os.path.exists(self.settings["target_path"]):
7062 + cmd("rm -rf "+self.settings["target_path"],\
7063 + "Could not remove existing directory: "+self.settings["target_path"],env=self.env)
7064 + touch(self.settings["autoresume_path"]+"setup_target_path")
7065 +
7066 + if not os.path.exists(self.settings["target_path"]):
7067 + os.makedirs(self.settings["target_path"])
7068 +
7069 + def set_target_path(self):
7070 + pass
7071 +
7072 + def set_spec_prefix(self):
7073 + self.settings["spec_prefix"]="livecd"
7074 +
7075 + def set_use(self):
7076 + generic_stage_target.set_use(self)
7077 + if "use" in self.settings:
7078 + self.settings["use"].append("livecd")
7079 + if "BINDIST" in self.settings:
7080 + self.settings["use"].append("bindist")
7081 + else:
7082 + self.settings["use"]=["livecd"]
7083 + if "BINDIST" in self.settings:
7084 + self.settings["use"].append("bindist")
7085 +
7086 + def set_packages(self):
7087 + generic_stage_target.set_packages(self)
7088 + if self.settings["spec_prefix"]+"/packages" in self.settings:
7089 + if type(self.settings[self.settings["spec_prefix"]+"/packages"]) == types.StringType:
7090 + self.settings[self.settings["spec_prefix"]+"/packages"] = \
7091 + self.settings[self.settings["spec_prefix"]+"/packages"].split()
7092 + self.settings[self.settings["spec_prefix"]+"/packages"].append("app-misc/livecd-tools")
7093 +
7094 + def set_pkgcache_path(self):
7095 + if "pkgcache_path" in self.settings:
7096 + if type(self.settings["pkgcache_path"]) != types.StringType:
7097 + self.settings["pkgcache_path"]=normpath(string.join(self.settings["pkgcache_path"]))
7098 + else:
7099 + generic_stage_target.set_pkgcache_path(self)
7100 +
7101 +def register(foo):
7102 + foo.update({"livecd-stage1":livecd_stage1_target})
7103 + return foo
7104 diff --git a/catalyst/modules/livecd_stage2_target.py b/catalyst/modules/livecd_stage2_target.py
7105 new file mode 100644
7106 index 0000000..5be8fd2
7107 --- /dev/null
7108 +++ b/catalyst/modules/livecd_stage2_target.py
7109 @@ -0,0 +1,146 @@
7110 +"""
7111 +LiveCD stage2 target, builds upon previous LiveCD stage1 tarball
7112 +"""
7113 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7114 +
7115 +import os,string,types,stat,shutil
7116 +from catalyst_support import *
7117 +from generic_stage_target import *
7118 +
7119 +class livecd_stage2_target(generic_stage_target):
7120 + """
7121 + Builder class for a LiveCD stage2 build.
7122 + """
7123 + def __init__(self,spec,addlargs):
7124 + self.required_values=["boot/kernel"]
7125 +
7126 + self.valid_values=[]
7127 +
7128 + self.valid_values.extend(self.required_values)
7129 + self.valid_values.extend(["livecd/cdtar","livecd/empty","livecd/rm",\
7130 + "livecd/unmerge","livecd/iso","livecd/gk_mainargs","livecd/type",\
7131 + "livecd/readme","livecd/motd","livecd/overlay",\
7132 + "livecd/modblacklist","livecd/splash_theme","livecd/rcadd",\
7133 + "livecd/rcdel","livecd/fsscript","livecd/xinitrc",\
7134 + "livecd/root_overlay","livecd/users","portage_overlay",\
7135 + "livecd/fstype","livecd/fsops","livecd/linuxrc","livecd/bootargs",\
7136 + "gamecd/conf","livecd/xdm","livecd/xsession","livecd/volid"])
7137 +
7138 + generic_stage_target.__init__(self,spec,addlargs)
7139 + if "livecd/type" not in self.settings:
7140 + self.settings["livecd/type"] = "generic-livecd"
7141 +
7142 + file_locate(self.settings, ["cdtar","controller_file"])
7143 +
7144 + def set_source_path(self):
7145 + self.settings["source_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["source_subpath"]+".tar.bz2")
7146 + if os.path.isfile(self.settings["source_path"]):
7147 + self.settings["source_path_hash"]=generate_hash(self.settings["source_path"])
7148 + else:
7149 + self.settings["source_path"]=normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/")
7150 + if not os.path.exists(self.settings["source_path"]):
7151 + raise CatalystError,"Source Path: "+self.settings["source_path"]+" does not exist."
7152 +
7153 + def set_spec_prefix(self):
7154 + self.settings["spec_prefix"]="livecd"
7155 +
7156 + def set_target_path(self):
7157 + self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"]+"/")
7158 + if "AUTORESUME" in self.settings \
7159 + and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
7160 + print "Resume point detected, skipping target path setup operation..."
7161 + else:
7162 + # first clean up any existing target stuff
7163 + if os.path.isdir(self.settings["target_path"]):
7164 + cmd("rm -rf "+self.settings["target_path"],
7165 + "Could not remove existing directory: "+self.settings["target_path"],env=self.env)
7166 + touch(self.settings["autoresume_path"]+"setup_target_path")
7167 + if not os.path.exists(self.settings["target_path"]):
7168 + os.makedirs(self.settings["target_path"])
7169 +
7170 + def run_local(self):
7171 + # what modules do we want to blacklist?
7172 + if "livecd/modblacklist" in self.settings:
7173 + try:
7174 + myf=open(self.settings["chroot_path"]+"/etc/modprobe.d/blacklist.conf","a")
7175 + except:
7176 + self.unbind()
7177 + raise CatalystError,"Couldn't open "+self.settings["chroot_path"]+"/etc/modprobe.d/blacklist.conf."
7178 +
7179 + myf.write("\n#Added by Catalyst:")
7180 + # workaround until config.py is using configparser
7181 + if isinstance(self.settings["livecd/modblacklist"], str):
7182 + self.settings["livecd/modblacklist"] = self.settings["livecd/modblacklist"].split()
7183 + for x in self.settings["livecd/modblacklist"]:
7184 + myf.write("\nblacklist "+x)
7185 + myf.close()
7186 +
7187 + def unpack(self):
7188 + unpack=True
7189 + display_msg=None
7190 +
7191 + clst_unpack_hash=read_from_clst(self.settings["autoresume_path"]+"unpack")
7192 +
7193 + if os.path.isdir(self.settings["source_path"]):
7194 + unpack_cmd="rsync -a --delete "+self.settings["source_path"]+" "+self.settings["chroot_path"]
7195 + display_msg="\nStarting rsync from "+self.settings["source_path"]+"\nto "+\
7196 + self.settings["chroot_path"]+" (This may take some time) ...\n"
7197 + error_msg="Rsync of "+self.settings["source_path"]+" to "+self.settings["chroot_path"]+" failed."
7198 + invalid_snapshot=False
7199 +
7200 + if "AUTORESUME" in self.settings:
7201 + if os.path.isdir(self.settings["source_path"]) and \
7202 + os.path.exists(self.settings["autoresume_path"]+"unpack"):
7203 + print "Resume point detected, skipping unpack operation..."
7204 + unpack=False
7205 + elif "source_path_hash" in self.settings:
7206 + if self.settings["source_path_hash"] != clst_unpack_hash:
7207 + invalid_snapshot=True
7208 +
7209 + if unpack:
7210 + self.mount_safety_check()
7211 + if invalid_snapshot:
7212 + print "No Valid Resume point detected, cleaning up ..."
7213 + #os.remove(self.settings["autoresume_path"]+"dir_setup")
7214 + self.clear_autoresume()
7215 + self.clear_chroot()
7216 + #self.dir_setup()
7217 +
7218 + if not os.path.exists(self.settings["chroot_path"]):
7219 + os.makedirs(self.settings["chroot_path"])
7220 +
7221 + if not os.path.exists(self.settings["chroot_path"]+"/tmp"):
7222 + os.makedirs(self.settings["chroot_path"]+"/tmp",1777)
7223 +
7224 + if "PKGCACHE" in self.settings:
7225 + if not os.path.exists(self.settings["pkgcache_path"]):
7226 + os.makedirs(self.settings["pkgcache_path"],0755)
7227 +
7228 + if not display_msg:
7229 + raise CatalystError,"Could not find appropriate source. Please check the 'source_subpath' setting in the spec file."
7230 +
7231 + print display_msg
7232 + cmd(unpack_cmd,error_msg,env=self.env)
7233 +
7234 + if "source_path_hash" in self.settings:
7235 + myf=open(self.settings["autoresume_path"]+"unpack","w")
7236 + myf.write(self.settings["source_path_hash"])
7237 + myf.close()
7238 + else:
7239 + touch(self.settings["autoresume_path"]+"unpack")
7240 +
7241 + def set_action_sequence(self):
7242 + self.settings["action_sequence"]=["unpack","unpack_snapshot",\
7243 + "config_profile_link","setup_confdir","portage_overlay",\
7244 + "bind","chroot_setup","setup_environment","run_local",\
7245 + "build_kernel"]
7246 + if "FETCH" not in self.settings:
7247 + self.settings["action_sequence"] += ["bootloader","preclean",\
7248 + "livecd_update","root_overlay","fsscript","rcupdate","unmerge",\
7249 + "unbind","remove","empty","target_setup",\
7250 + "setup_overlay","create_iso"]
7251 + self.settings["action_sequence"].append("clear_autoresume")
7252 +
7253 +def register(foo):
7254 + foo.update({"livecd-stage2":livecd_stage2_target})
7255 + return foo
7256 diff --git a/catalyst/modules/netboot2_target.py b/catalyst/modules/netboot2_target.py
7257 new file mode 100644
7258 index 0000000..1ab7e7d
7259 --- /dev/null
7260 +++ b/catalyst/modules/netboot2_target.py
7261 @@ -0,0 +1,166 @@
7262 +"""
7263 +netboot target, version 2
7264 +"""
7265 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7266 +
7267 +import os,string,types
7268 +from catalyst_support import *
7269 +from generic_stage_target import *
7270 +
7271 +class netboot2_target(generic_stage_target):
7272 + """
7273 + Builder class for a netboot build, version 2
7274 + """
7275 + def __init__(self,spec,addlargs):
7276 + self.required_values=[
7277 + "boot/kernel"
7278 + ]
7279 + self.valid_values=self.required_values[:]
7280 + self.valid_values.extend([
7281 + "netboot2/packages",
7282 + "netboot2/use",
7283 + "netboot2/extra_files",
7284 + "netboot2/overlay",
7285 + "netboot2/busybox_config",
7286 + "netboot2/root_overlay",
7287 + "netboot2/linuxrc"
7288 + ])
7289 +
7290 + try:
7291 + if "netboot2/packages" in addlargs:
7292 + if type(addlargs["netboot2/packages"]) == types.StringType:
7293 + loopy=[addlargs["netboot2/packages"]]
7294 + else:
7295 + loopy=addlargs["netboot2/packages"]
7296 +
7297 + for x in loopy:
7298 + self.valid_values.append("netboot2/packages/"+x+"/files")
7299 + except:
7300 + raise CatalystError,"configuration error in netboot2/packages."
7301 +
7302 + generic_stage_target.__init__(self,spec,addlargs)
7303 + self.set_build_kernel_vars()
7304 + self.settings["merge_path"]=normpath("/tmp/image/")
7305 +
7306 + def set_target_path(self):
7307 + self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+\
7308 + self.settings["target_subpath"]+"/")
7309 + if "AUTORESUME" in self.settings \
7310 + and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
7311 + print "Resume point detected, skipping target path setup operation..."
7312 + else:
7313 + # first clean up any existing target stuff
7314 + if os.path.isfile(self.settings["target_path"]):
7315 + cmd("rm -f "+self.settings["target_path"], \
7316 + "Could not remove existing file: "+self.settings["target_path"],env=self.env)
7317 + touch(self.settings["autoresume_path"]+"setup_target_path")
7318 +
7319 + if not os.path.exists(self.settings["storedir"]+"/builds/"):
7320 + os.makedirs(self.settings["storedir"]+"/builds/")
7321 +
7322 + def copy_files_to_image(self):
7323 + # copies specific files from the buildroot to merge_path
7324 + myfiles=[]
7325 +
7326 + # check for autoresume point
7327 + if "AUTORESUME" in self.settings \
7328 + and os.path.exists(self.settings["autoresume_path"]+"copy_files_to_image"):
7329 + print "Resume point detected, skipping target path setup operation..."
7330 + else:
7331 + if "netboot2/packages" in self.settings:
7332 + if type(self.settings["netboot2/packages"]) == types.StringType:
7333 + loopy=[self.settings["netboot2/packages"]]
7334 + else:
7335 + loopy=self.settings["netboot2/packages"]
7336 +
7337 + for x in loopy:
7338 + if "netboot2/packages/"+x+"/files" in self.settings:
7339 + if type(self.settings["netboot2/packages/"+x+"/files"]) == types.ListType:
7340 + myfiles.extend(self.settings["netboot2/packages/"+x+"/files"])
7341 + else:
7342 + myfiles.append(self.settings["netboot2/packages/"+x+"/files"])
7343 +
7344 + if "netboot2/extra_files" in self.settings:
7345 + if type(self.settings["netboot2/extra_files"]) == types.ListType:
7346 + myfiles.extend(self.settings["netboot2/extra_files"])
7347 + else:
7348 + myfiles.append(self.settings["netboot2/extra_files"])
7349 +
7350 + try:
7351 + cmd("/bin/bash "+self.settings["controller_file"]+\
7352 + " image " + list_bashify(myfiles),env=self.env)
7353 + except CatalystError:
7354 + self.unbind()
7355 + raise CatalystError,"Failed to copy files to image!"
7356 +
7357 + touch(self.settings["autoresume_path"]+"copy_files_to_image")
7358 +
7359 + def setup_overlay(self):
7360 + if "AUTORESUME" in self.settings \
7361 + and os.path.exists(self.settings["autoresume_path"]+"setup_overlay"):
7362 + print "Resume point detected, skipping setup_overlay operation..."
7363 + else:
7364 + if "netboot2/overlay" in self.settings:
7365 + for x in self.settings["netboot2/overlay"]:
7366 + if os.path.exists(x):
7367 + cmd("rsync -a "+x+"/ "+\
7368 + self.settings["chroot_path"] + self.settings["merge_path"], "netboot2/overlay: "+x+" copy failed.",env=self.env)
7369 + touch(self.settings["autoresume_path"]+"setup_overlay")
7370 +
7371 + def move_kernels(self):
7372 + # we're done, move the kernels to builds/*
7373 + # no auto resume here as we always want the
7374 + # freshest images moved
7375 + try:
7376 + cmd("/bin/bash "+self.settings["controller_file"]+\
7377 + " final",env=self.env)
7378 + print ">>> Netboot Build Finished!"
7379 + except CatalystError:
7380 + self.unbind()
7381 + raise CatalystError,"Failed to move kernel images!"
7382 +
7383 + def remove(self):
7384 + if "AUTORESUME" in self.settings \
7385 + and os.path.exists(self.settings["autoresume_path"]+"remove"):
7386 + print "Resume point detected, skipping remove operation..."
7387 + else:
7388 + if self.settings["spec_prefix"]+"/rm" in self.settings:
7389 + for x in self.settings[self.settings["spec_prefix"]+"/rm"]:
7390 + # we're going to shell out for all these cleaning operations,
7391 + # so we get easy glob handling
7392 + print "netboot2: removing " + x
7393 + os.system("rm -rf " + self.settings["chroot_path"] + self.settings["merge_path"] + x)
7394 +
7395 + def empty(self):
7396 + if "AUTORESUME" in self.settings \
7397 + and os.path.exists(self.settings["autoresume_path"]+"empty"):
7398 + print "Resume point detected, skipping empty operation..."
7399 + else:
7400 + if "netboot2/empty" in self.settings:
7401 + if type(self.settings["netboot2/empty"])==types.StringType:
7402 + self.settings["netboot2/empty"]=self.settings["netboot2/empty"].split()
7403 + for x in self.settings["netboot2/empty"]:
7404 + myemp=self.settings["chroot_path"] + self.settings["merge_path"] + x
7405 + if not os.path.isdir(myemp):
7406 + print x,"not a directory or does not exist, skipping 'empty' operation."
7407 + continue
7408 + print "Emptying directory", x
7409 + # stat the dir, delete the dir, recreate the dir and set
7410 + # the proper perms and ownership
7411 + mystat=os.stat(myemp)
7412 + shutil.rmtree(myemp)
7413 + os.makedirs(myemp,0755)
7414 + os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
7415 + os.chmod(myemp,mystat[ST_MODE])
7416 + touch(self.settings["autoresume_path"]+"empty")
7417 +
7418 + def set_action_sequence(self):
7419 + self.settings["action_sequence"]=["unpack","unpack_snapshot","config_profile_link",
7420 + "setup_confdir","portage_overlay","bind","chroot_setup",\
7421 + "setup_environment","build_packages","root_overlay",\
7422 + "copy_files_to_image","setup_overlay","build_kernel","move_kernels",\
7423 + "remove","empty","unbind","clean","clear_autoresume"]
7424 +
7425 +def register(foo):
7426 + foo.update({"netboot2":netboot2_target})
7427 + return foo
7428 diff --git a/catalyst/modules/netboot_target.py b/catalyst/modules/netboot_target.py
7429 new file mode 100644
7430 index 0000000..ff2c81f
7431 --- /dev/null
7432 +++ b/catalyst/modules/netboot_target.py
7433 @@ -0,0 +1,128 @@
7434 +"""
7435 +netboot target, version 1
7436 +"""
7437 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7438 +
7439 +import os,string,types
7440 +from catalyst_support import *
7441 +from generic_stage_target import *
7442 +
7443 +class netboot_target(generic_stage_target):
7444 + """
7445 + Builder class for a netboot build.
7446 + """
7447 + def __init__(self,spec,addlargs):
7448 + self.valid_values = [
7449 + "netboot/kernel/sources",
7450 + "netboot/kernel/config",
7451 + "netboot/kernel/prebuilt",
7452 +
7453 + "netboot/busybox_config",
7454 +
7455 + "netboot/extra_files",
7456 + "netboot/packages"
7457 + ]
7458 + self.required_values=[]
7459 +
7460 + try:
7461 + if "netboot/packages" in addlargs:
7462 + if type(addlargs["netboot/packages"]) == types.StringType:
7463 + loopy=[addlargs["netboot/packages"]]
7464 + else:
7465 + loopy=addlargs["netboot/packages"]
7466 +
7467 + # for x in loopy:
7468 + # self.required_values.append("netboot/packages/"+x+"/files")
7469 + except:
7470 + raise CatalystError,"configuration error in netboot/packages."
7471 +
7472 + generic_stage_target.__init__(self,spec,addlargs)
7473 + self.set_build_kernel_vars(addlargs)
7474 + if "netboot/busybox_config" in addlargs:
7475 + file_locate(self.settings, ["netboot/busybox_config"])
7476 +
7477 + # Custom Kernel Tarball --- use that instead ...
7478 +
7479 + # unless the user wants specific CFLAGS/CXXFLAGS, let's use -Os
7480 +
7481 + for envvar in "CFLAGS", "CXXFLAGS":
7482 + if envvar not in os.environ and envvar not in addlargs:
7483 + self.settings[envvar] = "-Os -pipe"
7484 +
7485 + def set_root_path(self):
7486 + # ROOT= variable for emerges
7487 + self.settings["root_path"]=normpath("/tmp/image")
7488 + print "netboot root path is "+self.settings["root_path"]
7489 +
7490 +# def build_packages(self):
7491 +# # build packages
7492 +# if "netboot/packages" in self.settings:
7493 +# mypack=list_bashify(self.settings["netboot/packages"])
7494 +# try:
7495 +# cmd("/bin/bash "+self.settings["controller_file"]+" packages "+mypack,env=self.env)
7496 +# except CatalystError:
7497 +# self.unbind()
7498 +# raise CatalystError,"netboot build aborting due to error."
7499 +
7500 + def build_busybox(self):
7501 + # build busybox
7502 + if "netboot/busybox_config" in self.settings:
7503 + mycmd = self.settings["netboot/busybox_config"]
7504 + else:
7505 + mycmd = ""
7506 + try:
7507 + cmd("/bin/bash "+self.settings["controller_file"]+" busybox "+ mycmd,env=self.env)
7508 + except CatalystError:
7509 + self.unbind()
7510 + raise CatalystError,"netboot build aborting due to error."
7511 +
7512 + def copy_files_to_image(self):
7513 + # create image
7514 + myfiles=[]
7515 + if "netboot/packages" in self.settings:
7516 + if type(self.settings["netboot/packages"]) == types.StringType:
7517 + loopy=[self.settings["netboot/packages"]]
7518 + else:
7519 + loopy=self.settings["netboot/packages"]
7520 +
7521 + for x in loopy:
7522 + if "netboot/packages/"+x+"/files" in self.settings:
7523 + if type(self.settings["netboot/packages/"+x+"/files"]) == types.ListType:
7524 + myfiles.extend(self.settings["netboot/packages/"+x+"/files"])
7525 + else:
7526 + myfiles.append(self.settings["netboot/packages/"+x+"/files"])
7527 +
7528 + if "netboot/extra_files" in self.settings:
7529 + if type(self.settings["netboot/extra_files"]) == types.ListType:
7530 + myfiles.extend(self.settings["netboot/extra_files"])
7531 + else:
7532 + myfiles.append(self.settings["netboot/extra_files"])
7533 +
7534 + try:
7535 + cmd("/bin/bash "+self.settings["controller_file"]+\
7536 + " image " + list_bashify(myfiles),env=self.env)
7537 + except CatalystError:
7538 + self.unbind()
7539 + raise CatalystError,"netboot build aborting due to error."
7540 +
7541 + def create_netboot_files(self):
7542 + # finish it all up
7543 + try:
7544 + cmd("/bin/bash "+self.settings["controller_file"]+" finish",env=self.env)
7545 + except CatalystError:
7546 + self.unbind()
7547 + raise CatalystError,"netboot build aborting due to error."
7548 +
7549 + # end
7550 + print "netboot: build finished !"
7551 +
7552 + def set_action_sequence(self):
7553 + self.settings["action_sequence"]=["unpack","unpack_snapshot",
7554 + "config_profile_link","setup_confdir","bind","chroot_setup",\
7555 + "setup_environment","build_packages","build_busybox",\
7556 + "build_kernel","copy_files_to_image",\
7557 + "clean","create_netboot_files","unbind","clear_autoresume"]
7558 +
7559 +def register(foo):
7560 + foo.update({"netboot":netboot_target})
7561 + return foo
7562 diff --git a/catalyst/modules/snapshot_target.py b/catalyst/modules/snapshot_target.py
7563 new file mode 100644
7564 index 0000000..29d6e87
7565 --- /dev/null
7566 +++ b/catalyst/modules/snapshot_target.py
7567 @@ -0,0 +1,91 @@
7568 +"""
7569 +Snapshot target
7570 +"""
7571 +
7572 +import os
7573 +from catalyst_support import *
7574 +from generic_stage_target import *
7575 +
7576 +class snapshot_target(generic_stage_target):
7577 + """
7578 + Builder class for snapshots.
7579 + """
7580 + def __init__(self,myspec,addlargs):
7581 + self.required_values=["version_stamp","target"]
7582 + self.valid_values=["version_stamp","target"]
7583 +
7584 + generic_target.__init__(self,myspec,addlargs)
7585 + self.settings=myspec
7586 + self.settings["target_subpath"]="portage"
7587 + st=self.settings["storedir"]
7588 + self.settings["snapshot_path"]=normpath(st + "/snapshots/"
7589 + + self.settings["snapshot_name"]
7590 + + self.settings["version_stamp"] + ".tar.bz2")
7591 + self.settings["tmp_path"]=normpath(st+"/tmp/"+self.settings["target_subpath"])
7592 +
7593 + def setup(self):
7594 + x=normpath(self.settings["storedir"]+"/snapshots")
7595 + if not os.path.exists(x):
7596 + os.makedirs(x)
7597 +
7598 + def mount_safety_check(self):
7599 + pass
7600 +
7601 + def run(self):
7602 + if "PURGEONLY" in self.settings:
7603 + self.purge()
7604 + return
7605 +
7606 + if "PURGE" in self.settings:
7607 + self.purge()
7608 +
7609 + self.setup()
7610 + print "Creating Portage tree snapshot "+self.settings["version_stamp"]+\
7611 + " from "+self.settings["portdir"]+"..."
7612 +
7613 + mytmp=self.settings["tmp_path"]
7614 + if not os.path.exists(mytmp):
7615 + os.makedirs(mytmp)
7616 +
7617 + cmd("rsync -a --delete --exclude /packages/ --exclude /distfiles/ " +
7618 + "--exclude /local/ --exclude CVS/ --exclude .svn --filter=H_**/files/digest-* " +
7619 + self.settings["portdir"] + "/ " + mytmp + "/%s/" % self.settings["repo_name"],
7620 + "Snapshot failure",env=self.env)
7621 +
7622 + print "Compressing Portage snapshot tarball..."
7623 + cmd("tar -I lbzip2 -cf " + self.settings["snapshot_path"] + " -C " +
7624 + mytmp + " %s" % self.settings["repo_name"],
7625 + "Snapshot creation failure",env=self.env)
7626 +
7627 + self.gen_contents_file(self.settings["snapshot_path"])
7628 + self.gen_digest_file(self.settings["snapshot_path"])
7629 +
7630 + self.cleanup()
7631 + print "snapshot: complete!"
7632 +
7633 + def kill_chroot_pids(self):
7634 + pass
7635 +
7636 + def cleanup(self):
7637 + print "Cleaning up..."
7638 +
7639 + def purge(self):
7640 + myemp=self.settings["tmp_path"]
7641 + if os.path.isdir(myemp):
7642 + print "Emptying directory",myemp
7643 + """
7644 + stat the dir, delete the dir, recreate the dir and set
7645 + the proper perms and ownership
7646 + """
7647 + mystat=os.stat(myemp)
7648 + """ There's no easy way to change flags recursively in python """
7649 + if os.uname()[0] == "FreeBSD":
7650 + os.system("chflags -R noschg "+myemp)
7651 + shutil.rmtree(myemp)
7652 + os.makedirs(myemp,0755)
7653 + os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
7654 + os.chmod(myemp,mystat[ST_MODE])
7655 +
7656 +def register(foo):
7657 + foo.update({"snapshot":snapshot_target})
7658 + return foo
7659 diff --git a/catalyst/modules/stage1_target.py b/catalyst/modules/stage1_target.py
7660 new file mode 100644
7661 index 0000000..aa43926
7662 --- /dev/null
7663 +++ b/catalyst/modules/stage1_target.py
7664 @@ -0,0 +1,96 @@
7665 +"""
7666 +stage1 target
7667 +"""
7668 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7669 +
7670 +from catalyst_support import *
7671 +from generic_stage_target import *
7672 +
7673 +class stage1_target(generic_stage_target):
7674 + """
7675 + Builder class for a stage1 installation tarball build.
7676 + """
7677 + def __init__(self,spec,addlargs):
7678 + self.required_values=[]
7679 + self.valid_values=["chost"]
7680 + self.valid_values.extend(["update_seed","update_seed_command"])
7681 + generic_stage_target.__init__(self,spec,addlargs)
7682 +
7683 + def set_stage_path(self):
7684 + self.settings["stage_path"]=normpath(self.settings["chroot_path"]+self.settings["root_path"])
7685 + print "stage1 stage path is "+self.settings["stage_path"]
7686 +
7687 + def set_root_path(self):
7688 + # sets the root path, relative to 'chroot_path', of the stage1 root
7689 + self.settings["root_path"]=normpath("/tmp/stage1root")
7690 + print "stage1 root path is "+self.settings["root_path"]
7691 +
7692 + def set_cleanables(self):
7693 + generic_stage_target.set_cleanables(self)
7694 + self.settings["cleanables"].extend([\
7695 + "/usr/share/zoneinfo", "/etc/portage/package*"])
7696 +
7697 + # XXX: How do these override_foo() functions differ from the ones in generic_stage_target and why aren't they in stage3_target?
7698 +
7699 + def override_chost(self):
7700 + if "chost" in self.settings:
7701 + self.settings["CHOST"]=list_to_string(self.settings["chost"])
7702 +
7703 + def override_cflags(self):
7704 + if "cflags" in self.settings:
7705 + self.settings["CFLAGS"]=list_to_string(self.settings["cflags"])
7706 +
7707 + def override_cxxflags(self):
7708 + if "cxxflags" in self.settings:
7709 + self.settings["CXXFLAGS"]=list_to_string(self.settings["cxxflags"])
7710 +
7711 + def override_ldflags(self):
7712 + if "ldflags" in self.settings:
7713 + self.settings["LDFLAGS"]=list_to_string(self.settings["ldflags"])
7714 +
7715 + def set_portage_overlay(self):
7716 + generic_stage_target.set_portage_overlay(self)
7717 + if "portage_overlay" in self.settings:
7718 + print "\nWARNING !!!!!"
7719 + print "\tUsing an portage overlay for earlier stages could cause build issues."
7720 + print "\tIf you break it, you buy it. Don't complain to us about it."
7721 + print "\tDont say we did not warn you\n"
7722 +
7723 + def base_dirs(self):
7724 + if os.uname()[0] == "FreeBSD":
7725 + # baselayout no longer creates the .keep files in proc and dev for FreeBSD as it
7726 + # would create them too late...we need them earlier before bind mounting filesystems
7727 + # since proc and dev are not writeable, so...create them here
7728 + if not os.path.exists(self.settings["stage_path"]+"/proc"):
7729 + os.makedirs(self.settings["stage_path"]+"/proc")
7730 + if not os.path.exists(self.settings["stage_path"]+"/dev"):
7731 + os.makedirs(self.settings["stage_path"]+"/dev")
7732 + if not os.path.isfile(self.settings["stage_path"]+"/proc/.keep"):
7733 + try:
7734 + proc_keepfile = open(self.settings["stage_path"]+"/proc/.keep","w")
7735 + proc_keepfile.write('')
7736 + proc_keepfile.close()
7737 + except IOError:
7738 + print "!!! Failed to create %s" % (self.settings["stage_path"]+"/dev/.keep")
7739 + if not os.path.isfile(self.settings["stage_path"]+"/dev/.keep"):
7740 + try:
7741 + dev_keepfile = open(self.settings["stage_path"]+"/dev/.keep","w")
7742 + dev_keepfile.write('')
7743 + dev_keepfile.close()
7744 + except IOError:
7745 + print "!!! Failed to create %s" % (self.settings["stage_path"]+"/dev/.keep")
7746 + else:
7747 + pass
7748 +
7749 + def set_mounts(self):
7750 + # stage_path/proc probably doesn't exist yet, so create it
7751 + if not os.path.exists(self.settings["stage_path"]+"/proc"):
7752 + os.makedirs(self.settings["stage_path"]+"/proc")
7753 +
7754 + # alter the mount mappings to bind mount proc onto it
7755 + self.mounts.append("/tmp/stage1root/proc")
7756 + self.mountmap["/tmp/stage1root/proc"]="/proc"
7757 +
7758 +def register(foo):
7759 + foo.update({"stage1":stage1_target})
7760 + return foo
7761 diff --git a/catalyst/modules/stage2_target.py b/catalyst/modules/stage2_target.py
7762 new file mode 100644
7763 index 0000000..6083e2b
7764 --- /dev/null
7765 +++ b/catalyst/modules/stage2_target.py
7766 @@ -0,0 +1,62 @@
7767 +"""
7768 +stage2 target, builds upon previous stage1 tarball
7769 +"""
7770 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7771 +
7772 +from catalyst_support import *
7773 +from generic_stage_target import *
7774 +
7775 +class stage2_target(generic_stage_target):
7776 + """
7777 + Builder class for a stage2 installation tarball build.
7778 + """
7779 + def __init__(self,spec,addlargs):
7780 + self.required_values=[]
7781 + self.valid_values=["chost"]
7782 + generic_stage_target.__init__(self,spec,addlargs)
7783 +
7784 + def set_source_path(self):
7785 + if "SEEDCACHE" in self.settings and os.path.isdir(normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/tmp/stage1root/")):
7786 + self.settings["source_path"]=normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/tmp/stage1root/")
7787 + else:
7788 + self.settings["source_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["source_subpath"]+".tar.bz2")
7789 + if os.path.isfile(self.settings["source_path"]):
7790 + if os.path.exists(self.settings["source_path"]):
7791 + # XXX: Is this even necessary if the previous check passes?
7792 + self.settings["source_path_hash"]=generate_hash(self.settings["source_path"],\
7793 + hash_function=self.settings["hash_function"],verbose=False)
7794 + print "Source path set to "+self.settings["source_path"]
7795 + if os.path.isdir(self.settings["source_path"]):
7796 + print "\tIf this is not desired, remove this directory or turn of seedcache in the options of catalyst.conf"
7797 + print "\tthe source path will then be "+normpath(self.settings["storedir"]+"/builds/"+self.settings["source_subpath"]+".tar.bz2\n")
7798 +
7799 + # XXX: How do these override_foo() functions differ from the ones in
7800 + # generic_stage_target and why aren't they in stage3_target?
7801 +
7802 + def override_chost(self):
7803 + if "chost" in self.settings:
7804 + self.settings["CHOST"]=list_to_string(self.settings["chost"])
7805 +
7806 + def override_cflags(self):
7807 + if "cflags" in self.settings:
7808 + self.settings["CFLAGS"]=list_to_string(self.settings["cflags"])
7809 +
7810 + def override_cxxflags(self):
7811 + if "cxxflags" in self.settings:
7812 + self.settings["CXXFLAGS"]=list_to_string(self.settings["cxxflags"])
7813 +
7814 + def override_ldflags(self):
7815 + if "ldflags" in self.settings:
7816 + self.settings["LDFLAGS"]=list_to_string(self.settings["ldflags"])
7817 +
7818 + def set_portage_overlay(self):
7819 + generic_stage_target.set_portage_overlay(self)
7820 + if "portage_overlay" in self.settings:
7821 + print "\nWARNING !!!!!"
7822 + print "\tUsing an portage overlay for earlier stages could cause build issues."
7823 + print "\tIf you break it, you buy it. Don't complain to us about it."
7824 + print "\tDont say we did not warn you\n"
7825 +
7826 +def register(foo):
7827 + foo.update({"stage2":stage2_target})
7828 + return foo
7829 diff --git a/catalyst/modules/stage3_target.py b/catalyst/modules/stage3_target.py
7830 new file mode 100644
7831 index 0000000..4d3a008
7832 --- /dev/null
7833 +++ b/catalyst/modules/stage3_target.py
7834 @@ -0,0 +1,31 @@
7835 +"""
7836 +stage3 target, builds upon previous stage2/stage3 tarball
7837 +"""
7838 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7839 +
7840 +from catalyst_support import *
7841 +from generic_stage_target import *
7842 +
7843 +class stage3_target(generic_stage_target):
7844 + """
7845 + Builder class for a stage3 installation tarball build.
7846 + """
7847 + def __init__(self,spec,addlargs):
7848 + self.required_values=[]
7849 + self.valid_values=[]
7850 + generic_stage_target.__init__(self,spec,addlargs)
7851 +
7852 + def set_portage_overlay(self):
7853 + generic_stage_target.set_portage_overlay(self)
7854 + if "portage_overlay" in self.settings:
7855 + print "\nWARNING !!!!!"
7856 + print "\tUsing an overlay for earlier stages could cause build issues."
7857 + print "\tIf you break it, you buy it. Don't complain to us about it."
7858 + print "\tDont say we did not warn you\n"
7859 +
7860 + def set_cleanables(self):
7861 + generic_stage_target.set_cleanables(self)
7862 +
7863 +def register(foo):
7864 + foo.update({"stage3":stage3_target})
7865 + return foo
7866 diff --git a/catalyst/modules/stage4_target.py b/catalyst/modules/stage4_target.py
7867 new file mode 100644
7868 index 0000000..ce41b2d
7869 --- /dev/null
7870 +++ b/catalyst/modules/stage4_target.py
7871 @@ -0,0 +1,43 @@
7872 +"""
7873 +stage4 target, builds upon previous stage3/stage4 tarball
7874 +"""
7875 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7876 +
7877 +from catalyst_support import *
7878 +from generic_stage_target import *
7879 +
7880 +class stage4_target(generic_stage_target):
7881 + """
7882 + Builder class for stage4.
7883 + """
7884 + def __init__(self,spec,addlargs):
7885 + self.required_values=["stage4/packages"]
7886 + self.valid_values=self.required_values[:]
7887 + self.valid_values.extend(["stage4/use","boot/kernel",\
7888 + "stage4/root_overlay","stage4/fsscript",\
7889 + "stage4/gk_mainargs","splash_theme",\
7890 + "portage_overlay","stage4/rcadd","stage4/rcdel",\
7891 + "stage4/linuxrc","stage4/unmerge","stage4/rm","stage4/empty"])
7892 + generic_stage_target.__init__(self,spec,addlargs)
7893 +
7894 + def set_cleanables(self):
7895 + self.settings["cleanables"]=["/var/tmp/*","/tmp/*"]
7896 +
7897 + def set_action_sequence(self):
7898 + self.settings["action_sequence"]=["unpack","unpack_snapshot",\
7899 + "config_profile_link","setup_confdir","portage_overlay",\
7900 + "bind","chroot_setup","setup_environment","build_packages",\
7901 + "build_kernel","bootloader","root_overlay","fsscript",\
7902 + "preclean","rcupdate","unmerge","unbind","remove","empty",\
7903 + "clean"]
7904 +
7905 +# if "TARBALL" in self.settings or \
7906 +# "FETCH" not in self.settings:
7907 + if "FETCH" not in self.settings:
7908 + self.settings["action_sequence"].append("capture")
7909 + self.settings["action_sequence"].append("clear_autoresume")
7910 +
7911 +def register(foo):
7912 + foo.update({"stage4":stage4_target})
7913 + return foo
7914 +
7915 diff --git a/catalyst/modules/tinderbox_target.py b/catalyst/modules/tinderbox_target.py
7916 new file mode 100644
7917 index 0000000..d6d3ea3
7918 --- /dev/null
7919 +++ b/catalyst/modules/tinderbox_target.py
7920 @@ -0,0 +1,44 @@
7921 +"""
7922 +Tinderbox target
7923 +"""
7924 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7925 +
7926 +from catalyst_support import *
7927 +from generic_stage_target import *
7928 +
7929 +class tinderbox_target(generic_stage_target):
7930 + """
7931 + Builder class for the tinderbox target
7932 + """
7933 + def __init__(self,spec,addlargs):
7934 + self.required_values=["tinderbox/packages"]
7935 + self.valid_values=self.required_values[:]
7936 + self.valid_values.extend(["tinderbox/use"])
7937 + generic_stage_target.__init__(self,spec,addlargs)
7938 +
7939 + def run_local(self):
7940 + # tinderbox
7941 + # example call: "grp.sh run xmms vim sys-apps/gleep"
7942 + try:
7943 + if os.path.exists(self.settings["controller_file"]):
7944 + cmd("/bin/bash "+self.settings["controller_file"]+" run "+\
7945 + list_bashify(self.settings["tinderbox/packages"]),"run script failed.",env=self.env)
7946 +
7947 + except CatalystError:
7948 + self.unbind()
7949 + raise CatalystError,"Tinderbox aborting due to error."
7950 +
7951 + def set_cleanables(self):
7952 + self.settings["cleanables"]=["/etc/resolv.conf","/var/tmp/*","/root/*",
7953 + self.settings['portdir']]
7954 +
7955 + def set_action_sequence(self):
7956 + #Default action sequence for run method
7957 + self.settings["action_sequence"]=["unpack","unpack_snapshot",\
7958 + "config_profile_link","setup_confdir","bind","chroot_setup",\
7959 + "setup_environment","run_local","preclean","unbind","clean",\
7960 + "clear_autoresume"]
7961 +
7962 +def register(foo):
7963 + foo.update({"tinderbox":tinderbox_target})
7964 + return foo
7965 diff --git a/catalyst/util.py b/catalyst/util.py
7966 new file mode 100644
7967 index 0000000..ff12086
7968 --- /dev/null
7969 +++ b/catalyst/util.py
7970 @@ -0,0 +1,14 @@
7971 +"""
7972 +Collection of utility functions for catalyst
7973 +"""
7974 +
7975 +import sys, traceback
7976 +
7977 +def capture_traceback():
7978 + etype, value, tb = sys.exc_info()
7979 + s = [x.strip() for x in traceback.format_exception(etype, value, tb)]
7980 + return s
7981 +
7982 +def print_traceback():
7983 + for x in capture_traceback():
7984 + print x
7985 diff --git a/modules/builder.py b/modules/builder.py
7986 deleted file mode 100644
7987 index ad27d78..0000000
7988 --- a/modules/builder.py
7989 +++ /dev/null
7990 @@ -1,20 +0,0 @@
7991 -
7992 -class generic:
7993 - def __init__(self,myspec):
7994 - self.settings=myspec
7995 -
7996 - def mount_safety_check(self):
7997 - """
7998 - Make sure that no bind mounts exist in chrootdir (to use before
7999 - cleaning the directory, to make sure we don't wipe the contents of
8000 - a bind mount
8001 - """
8002 - pass
8003 -
8004 - def mount_all(self):
8005 - """do all bind mounts"""
8006 - pass
8007 -
8008 - def umount_all(self):
8009 - """unmount all bind mounts"""
8010 - pass
8011 diff --git a/modules/catalyst/__init__.py b/modules/catalyst/__init__.py
8012 deleted file mode 100644
8013 index e69de29..0000000
8014 diff --git a/modules/catalyst/config.py b/modules/catalyst/config.py
8015 deleted file mode 100644
8016 index 726bf74..0000000
8017 --- a/modules/catalyst/config.py
8018 +++ /dev/null
8019 @@ -1,122 +0,0 @@
8020 -import re
8021 -from modules.catalyst_support import *
8022 -
8023 -class ParserBase:
8024 -
8025 - filename = ""
8026 - lines = None
8027 - values = None
8028 - key_value_separator = "="
8029 - multiple_values = False
8030 - empty_values = True
8031 -
8032 - def __getitem__(self, key):
8033 - return self.values[key]
8034 -
8035 - def get_values(self):
8036 - return self.values
8037 -
8038 - def dump(self):
8039 - dump = ""
8040 - for x in self.values.keys():
8041 - dump += x + " = " + repr(self.values[x]) + "\n"
8042 - return dump
8043 -
8044 - def parse_file(self, filename):
8045 - try:
8046 - myf = open(filename, "r")
8047 - except:
8048 - raise CatalystError, "Could not open file " + filename
8049 - self.lines = myf.readlines()
8050 - myf.close()
8051 - self.filename = filename
8052 - self.parse()
8053 -
8054 - def parse_lines(self, lines):
8055 - self.lines = lines
8056 - self.parse()
8057 -
8058 - def parse(self):
8059 - values = {}
8060 - cur_array = []
8061 -
8062 - trailing_comment=re.compile('\s*#.*$')
8063 - white_space=re.compile('\s+')
8064 -
8065 - for x, myline in enumerate(self.lines):
8066 - myline = myline.strip()
8067 -
8068 - # Force the line to be clean
8069 - # Remove Comments ( anything following # )
8070 - myline = trailing_comment.sub("", myline)
8071 -
8072 - # Skip any blank lines
8073 - if not myline: continue
8074 -
8075 - # Look for separator
8076 - msearch = myline.find(self.key_value_separator)
8077 -
8078 - # If separator found assume its a new key
8079 - if msearch != -1:
8080 - # Split on the first occurence of the separator creating two strings in the array mobjs
8081 - mobjs = myline.split(self.key_value_separator, 1)
8082 - mobjs[1] = mobjs[1].strip().strip('"')
8083 -
8084 -# # Check that this key doesn't exist already in the spec
8085 -# if mobjs[0] in values:
8086 -# raise Exception("You have a duplicate key (" + mobjs[0] + ") in your spec. Please fix it")
8087 -
8088 - # Start a new array using the first element of mobjs
8089 - cur_array = [mobjs[0]]
8090 - if mobjs[1]:
8091 - if self.multiple_values:
8092 - # split on white space creating additional array elements
8093 -# subarray = white_space.split(mobjs[1])
8094 - subarray = mobjs[1].split()
8095 - cur_array += subarray
8096 - else:
8097 - cur_array += [mobjs[1]]
8098 -
8099 - # Else add on to the last key we were working on
8100 - else:
8101 - if self.multiple_values:
8102 -# mobjs = white_space.split(myline)
8103 -# cur_array += mobjs
8104 - cur_array += myline.split()
8105 - else:
8106 - raise CatalystError, "Syntax error: " + x
8107 -
8108 - # XXX: Do we really still need this "single value is a string" behavior?
8109 - if len(cur_array) == 2:
8110 - values[cur_array[0]] = cur_array[1]
8111 - else:
8112 - values[cur_array[0]] = cur_array[1:]
8113 -
8114 - if not self.empty_values:
8115 - for x in values.keys():
8116 - # Delete empty key pairs
8117 - if not values[x]:
8118 - print "\n\tWARNING: No value set for key " + x + "...deleting"
8119 - del values[x]
8120 -
8121 - self.values = values
8122 -
8123 -class SpecParser(ParserBase):
8124 -
8125 - key_value_separator = ':'
8126 - multiple_values = True
8127 - empty_values = False
8128 -
8129 - def __init__(self, filename=""):
8130 - if filename:
8131 - self.parse_file(filename)
8132 -
8133 -class ConfigParser(ParserBase):
8134 -
8135 - key_value_separator = '='
8136 - multiple_values = False
8137 - empty_values = True
8138 -
8139 - def __init__(self, filename=""):
8140 - if filename:
8141 - self.parse_file(filename)
8142 diff --git a/modules/catalyst/util.py b/modules/catalyst/util.py
8143 deleted file mode 100644
8144 index ff12086..0000000
8145 --- a/modules/catalyst/util.py
8146 +++ /dev/null
8147 @@ -1,14 +0,0 @@
8148 -"""
8149 -Collection of utility functions for catalyst
8150 -"""
8151 -
8152 -import sys, traceback
8153 -
8154 -def capture_traceback():
8155 - etype, value, tb = sys.exc_info()
8156 - s = [x.strip() for x in traceback.format_exception(etype, value, tb)]
8157 - return s
8158 -
8159 -def print_traceback():
8160 - for x in capture_traceback():
8161 - print x
8162 diff --git a/modules/catalyst_lock.py b/modules/catalyst_lock.py
8163 deleted file mode 100644
8164 index 5311cf8..0000000
8165 --- a/modules/catalyst_lock.py
8166 +++ /dev/null
8167 @@ -1,468 +0,0 @@
8168 -#!/usr/bin/python
8169 -import os
8170 -import fcntl
8171 -import errno
8172 -import sys
8173 -import string
8174 -import time
8175 -from catalyst_support import *
8176 -
8177 -def writemsg(mystr):
8178 - sys.stderr.write(mystr)
8179 - sys.stderr.flush()
8180 -
8181 -class LockDir:
8182 - locking_method=fcntl.flock
8183 - lock_dirs_in_use=[]
8184 - die_on_failed_lock=True
8185 - def __del__(self):
8186 - self.clean_my_hardlocks()
8187 - self.delete_lock_from_path_list()
8188 - if self.islocked():
8189 - self.fcntl_unlock()
8190 -
8191 - def __init__(self,lockdir):
8192 - self.locked=False
8193 - self.myfd=None
8194 - self.set_gid(250)
8195 - self.locking_method=LockDir.locking_method
8196 - self.set_lockdir(lockdir)
8197 - self.set_lockfilename(".catalyst_lock")
8198 - self.set_lockfile()
8199 -
8200 - if LockDir.lock_dirs_in_use.count(lockdir)>0:
8201 - raise "This directory already associated with a lock object"
8202 - else:
8203 - LockDir.lock_dirs_in_use.append(lockdir)
8204 -
8205 - self.hardlock_paths={}
8206 -
8207 - def delete_lock_from_path_list(self):
8208 - i=0
8209 - try:
8210 - if LockDir.lock_dirs_in_use:
8211 - for x in LockDir.lock_dirs_in_use:
8212 - if LockDir.lock_dirs_in_use[i] == self.lockdir:
8213 - del LockDir.lock_dirs_in_use[i]
8214 - break
8215 - i=i+1
8216 - except AttributeError:
8217 - pass
8218 -
8219 - def islocked(self):
8220 - if self.locked:
8221 - return True
8222 - else:
8223 - return False
8224 -
8225 - def set_gid(self,gid):
8226 - if not self.islocked():
8227 -# if "DEBUG" in self.settings:
8228 -# print "setting gid to", gid
8229 - self.gid=gid
8230 -
8231 - def set_lockdir(self,lockdir):
8232 - if not os.path.exists(lockdir):
8233 - os.makedirs(lockdir)
8234 - if os.path.isdir(lockdir):
8235 - if not self.islocked():
8236 - if lockdir[-1] == "/":
8237 - lockdir=lockdir[:-1]
8238 - self.lockdir=normpath(lockdir)
8239 -# if "DEBUG" in self.settings:
8240 -# print "setting lockdir to", self.lockdir
8241 - else:
8242 - raise "the lock object needs a path to a dir"
8243 -
8244 - def set_lockfilename(self,lockfilename):
8245 - if not self.islocked():
8246 - self.lockfilename=lockfilename
8247 -# if "DEBUG" in self.settings:
8248 -# print "setting lockfilename to", self.lockfilename
8249 -
8250 - def set_lockfile(self):
8251 - if not self.islocked():
8252 - self.lockfile=normpath(self.lockdir+'/'+self.lockfilename)
8253 -# if "DEBUG" in self.settings:
8254 -# print "setting lockfile to", self.lockfile
8255 -
8256 - def read_lock(self):
8257 - if not self.locking_method == "HARDLOCK":
8258 - self.fcntl_lock("read")
8259 - else:
8260 - print "HARDLOCKING doesnt support shared-read locks"
8261 - print "using exclusive write locks"
8262 - self.hard_lock()
8263 -
8264 - def write_lock(self):
8265 - if not self.locking_method == "HARDLOCK":
8266 - self.fcntl_lock("write")
8267 - else:
8268 - self.hard_lock()
8269 -
8270 - def unlock(self):
8271 - if not self.locking_method == "HARDLOCK":
8272 - self.fcntl_unlock()
8273 - else:
8274 - self.hard_unlock()
8275 -
8276 - def fcntl_lock(self,locktype):
8277 - if self.myfd==None:
8278 - if not os.path.exists(os.path.dirname(self.lockdir)):
8279 - raise DirectoryNotFound, os.path.dirname(self.lockdir)
8280 - if not os.path.exists(self.lockfile):
8281 - old_mask=os.umask(000)
8282 - self.myfd = os.open(self.lockfile, os.O_CREAT|os.O_RDWR,0660)
8283 - try:
8284 - if os.stat(self.lockfile).st_gid != self.gid:
8285 - os.chown(self.lockfile,os.getuid(),self.gid)
8286 - except SystemExit, e:
8287 - raise
8288 - except OSError, e:
8289 - if e[0] == 2: #XXX: No such file or directory
8290 - return self.fcntl_locking(locktype)
8291 - else:
8292 - writemsg("Cannot chown a lockfile. This could cause inconvenience later.\n")
8293 -
8294 - os.umask(old_mask)
8295 - else:
8296 - self.myfd = os.open(self.lockfile, os.O_CREAT|os.O_RDWR,0660)
8297 -
8298 - try:
8299 - if locktype == "read":
8300 - self.locking_method(self.myfd,fcntl.LOCK_SH|fcntl.LOCK_NB)
8301 - else:
8302 - self.locking_method(self.myfd,fcntl.LOCK_EX|fcntl.LOCK_NB)
8303 - except IOError, e:
8304 - if "errno" not in dir(e):
8305 - raise
8306 - if e.errno == errno.EAGAIN:
8307 - if not LockDir.die_on_failed_lock:
8308 - # Resource temp unavailable; eg, someone beat us to the lock.
8309 - writemsg("waiting for lock on %s\n" % self.lockfile)
8310 -
8311 - # Try for the exclusive or shared lock again.
8312 - if locktype == "read":
8313 - self.locking_method(self.myfd,fcntl.LOCK_SH)
8314 - else:
8315 - self.locking_method(self.myfd,fcntl.LOCK_EX)
8316 - else:
8317 - raise LockInUse,self.lockfile
8318 - elif e.errno == errno.ENOLCK:
8319 - pass
8320 - else:
8321 - raise
8322 - if not os.path.exists(self.lockfile):
8323 - os.close(self.myfd)
8324 - self.myfd=None
8325 - #writemsg("lockfile recurse\n")
8326 - self.fcntl_lock(locktype)
8327 - else:
8328 - self.locked=True
8329 - #writemsg("Lockfile obtained\n")
8330 -
8331 - def fcntl_unlock(self):
8332 - import fcntl
8333 - unlinkfile = 1
8334 - if not os.path.exists(self.lockfile):
8335 - print "lockfile does not exist '%s'" % self.lockfile
8336 - if (self.myfd != None):
8337 - try:
8338 - os.close(myfd)
8339 - self.myfd=None
8340 - except:
8341 - pass
8342 - return False
8343 -
8344 - try:
8345 - if self.myfd == None:
8346 - self.myfd = os.open(self.lockfile, os.O_WRONLY,0660)
8347 - unlinkfile = 1
8348 - self.locking_method(self.myfd,fcntl.LOCK_UN)
8349 - except SystemExit, e:
8350 - raise
8351 - except Exception, e:
8352 - os.close(self.myfd)
8353 - self.myfd=None
8354 - raise IOError, "Failed to unlock file '%s'\n" % self.lockfile
8355 - try:
8356 - # This sleep call was added to allow other processes that are
8357 - # waiting for a lock to be able to grab it before it is deleted.
8358 - # lockfile() already accounts for this situation, however, and
8359 - # the sleep here adds more time than is saved overall, so am
8360 - # commenting until it is proved necessary.
8361 - #time.sleep(0.0001)
8362 - if unlinkfile:
8363 - InUse=False
8364 - try:
8365 - self.locking_method(self.myfd,fcntl.LOCK_EX|fcntl.LOCK_NB)
8366 - except:
8367 - print "Read lock may be in effect. skipping lockfile delete..."
8368 - InUse=True
8369 - # We won the lock, so there isn't competition for it.
8370 - # We can safely delete the file.
8371 - #writemsg("Got the lockfile...\n")
8372 - #writemsg("Unlinking...\n")
8373 - self.locking_method(self.myfd,fcntl.LOCK_UN)
8374 - if not InUse:
8375 - os.unlink(self.lockfile)
8376 - os.close(self.myfd)
8377 - self.myfd=None
8378 -# if "DEBUG" in self.settings:
8379 -# print "Unlinked lockfile..."
8380 - except SystemExit, e:
8381 - raise
8382 - except Exception, e:
8383 - # We really don't care... Someone else has the lock.
8384 - # So it is their problem now.
8385 - print "Failed to get lock... someone took it."
8386 - print str(e)
8387 -
8388 - # Why test lockfilename? Because we may have been handed an
8389 - # fd originally, and the caller might not like having their
8390 - # open fd closed automatically on them.
8391 - #if type(lockfilename) == types.StringType:
8392 - # os.close(myfd)
8393 -
8394 - if (self.myfd != None):
8395 - os.close(self.myfd)
8396 - self.myfd=None
8397 - self.locked=False
8398 - time.sleep(.0001)
8399 -
8400 - def hard_lock(self,max_wait=14400):
8401 - """Does the NFS, hardlink shuffle to ensure locking on the disk.
8402 - We create a PRIVATE lockfile, that is just a placeholder on the disk.
8403 - Then we HARDLINK the real lockfile to that private file.
8404 - If our file can 2 references, then we have the lock. :)
8405 - Otherwise we lather, rise, and repeat.
8406 - We default to a 4 hour timeout.
8407 - """
8408 -
8409 - self.myhardlock = self.hardlock_name(self.lockdir)
8410 -
8411 - start_time = time.time()
8412 - reported_waiting = False
8413 -
8414 - while(time.time() < (start_time + max_wait)):
8415 - # We only need it to exist.
8416 - self.myfd = os.open(self.myhardlock, os.O_CREAT|os.O_RDWR,0660)
8417 - os.close(self.myfd)
8418 -
8419 - self.add_hardlock_file_to_cleanup()
8420 - if not os.path.exists(self.myhardlock):
8421 - raise FileNotFound, "Created lockfile is missing: %(filename)s" % {"filename":self.myhardlock}
8422 - try:
8423 - res = os.link(self.myhardlock, self.lockfile)
8424 - except SystemExit, e:
8425 - raise
8426 - except Exception, e:
8427 -# if "DEBUG" in self.settings:
8428 -# print "lockfile(): Hardlink: Link failed."
8429 -# print "Exception: ",e
8430 - pass
8431 -
8432 - if self.hardlink_is_mine(self.myhardlock, self.lockfile):
8433 - # We have the lock.
8434 - if reported_waiting:
8435 - print
8436 - return True
8437 -
8438 - if reported_waiting:
8439 - writemsg(".")
8440 - else:
8441 - reported_waiting = True
8442 - print
8443 - print "Waiting on (hardlink) lockfile: (one '.' per 3 seconds)"
8444 - print "Lockfile: " + self.lockfile
8445 - time.sleep(3)
8446 -
8447 - os.unlink(self.myhardlock)
8448 - return False
8449 -
8450 - def hard_unlock(self):
8451 - try:
8452 - if os.path.exists(self.myhardlock):
8453 - os.unlink(self.myhardlock)
8454 - if os.path.exists(self.lockfile):
8455 - os.unlink(self.lockfile)
8456 - except SystemExit, e:
8457 - raise
8458 - except:
8459 - writemsg("Something strange happened to our hardlink locks.\n")
8460 -
8461 - def add_hardlock_file_to_cleanup(self):
8462 - #mypath = self.normpath(path)
8463 - if os.path.isdir(self.lockdir) and os.path.isfile(self.myhardlock):
8464 - self.hardlock_paths[self.lockdir]=self.myhardlock
8465 -
8466 - def remove_hardlock_file_from_cleanup(self):
8467 - if self.lockdir in self.hardlock_paths:
8468 - del self.hardlock_paths[self.lockdir]
8469 - print self.hardlock_paths
8470 -
8471 - def hardlock_name(self, path):
8472 - mypath=path+"/.hardlock-"+os.uname()[1]+"-"+str(os.getpid())
8473 - newpath = os.path.normpath(mypath)
8474 - if len(newpath) > 1:
8475 - if newpath[1] == "/":
8476 - newpath = "/"+newpath.lstrip("/")
8477 - return newpath
8478 -
8479 - def hardlink_is_mine(self,link,lock):
8480 - import stat
8481 - try:
8482 - myhls = os.stat(link)
8483 - mylfs = os.stat(lock)
8484 - except SystemExit, e:
8485 - raise
8486 - except:
8487 - myhls = None
8488 - mylfs = None
8489 -
8490 - if myhls:
8491 - if myhls[stat.ST_NLINK] == 2:
8492 - return True
8493 - if mylfs:
8494 - if mylfs[stat.ST_INO] == myhls[stat.ST_INO]:
8495 - return True
8496 - return False
8497 -
8498 - def hardlink_active(lock):
8499 - if not os.path.exists(lock):
8500 - return False
8501 -
8502 - def clean_my_hardlocks(self):
8503 - try:
8504 - for x in self.hardlock_paths.keys():
8505 - self.hardlock_cleanup(x)
8506 - except AttributeError:
8507 - pass
8508 -
8509 - def hardlock_cleanup(self,path):
8510 - mypid = str(os.getpid())
8511 - myhost = os.uname()[1]
8512 - mydl = os.listdir(path)
8513 - results = []
8514 - mycount = 0
8515 -
8516 - mylist = {}
8517 - for x in mydl:
8518 - filepath=path+"/"+x
8519 - if os.path.isfile(filepath):
8520 - parts = filepath.split(".hardlock-")
8521 - if len(parts) == 2:
8522 - filename = parts[0]
8523 - hostpid = parts[1].split("-")
8524 - host = "-".join(hostpid[:-1])
8525 - pid = hostpid[-1]
8526 - if filename not in mylist:
8527 - mylist[filename] = {}
8528 -
8529 - if host not in mylist[filename]:
8530 - mylist[filename][host] = []
8531 - mylist[filename][host].append(pid)
8532 - mycount += 1
8533 - else:
8534 - mylist[filename][host].append(pid)
8535 - mycount += 1
8536 -
8537 -
8538 - results.append("Found %(count)s locks" % {"count":mycount})
8539 - for x in mylist.keys():
8540 - if myhost in mylist[x]:
8541 - mylockname = self.hardlock_name(x)
8542 - if self.hardlink_is_mine(mylockname, self.lockfile) or \
8543 - not os.path.exists(self.lockfile):
8544 - for y in mylist[x].keys():
8545 - for z in mylist[x][y]:
8546 - filename = x+".hardlock-"+y+"-"+z
8547 - if filename == mylockname:
8548 - self.hard_unlock()
8549 - continue
8550 - try:
8551 - # We're sweeping through, unlinking everyone's locks.
8552 - os.unlink(filename)
8553 - results.append("Unlinked: " + filename)
8554 - except SystemExit, e:
8555 - raise
8556 - except Exception,e:
8557 - pass
8558 - try:
8559 - os.unlink(x)
8560 - results.append("Unlinked: " + x)
8561 - os.unlink(mylockname)
8562 - results.append("Unlinked: " + mylockname)
8563 - except SystemExit, e:
8564 - raise
8565 - except Exception,e:
8566 - pass
8567 - else:
8568 - try:
8569 - os.unlink(mylockname)
8570 - results.append("Unlinked: " + mylockname)
8571 - except SystemExit, e:
8572 - raise
8573 - except Exception,e:
8574 - pass
8575 - return results
8576 -
8577 -if __name__ == "__main__":
8578 -
8579 - def lock_work():
8580 - print
8581 - for i in range(1,6):
8582 - print i,time.time()
8583 - time.sleep(1)
8584 - print
8585 - def normpath(mypath):
8586 - newpath = os.path.normpath(mypath)
8587 - if len(newpath) > 1:
8588 - if newpath[1] == "/":
8589 - newpath = "/"+newpath.lstrip("/")
8590 - return newpath
8591 -
8592 - print "Lock 5 starting"
8593 - import time
8594 - Lock1=LockDir("/tmp/lock_path")
8595 - Lock1.write_lock()
8596 - print "Lock1 write lock"
8597 -
8598 - lock_work()
8599 -
8600 - Lock1.unlock()
8601 - print "Lock1 unlock"
8602 -
8603 - Lock1.read_lock()
8604 - print "Lock1 read lock"
8605 -
8606 - lock_work()
8607 -
8608 - Lock1.unlock()
8609 - print "Lock1 unlock"
8610 -
8611 - Lock1.read_lock()
8612 - print "Lock1 read lock"
8613 -
8614 - Lock1.write_lock()
8615 - print "Lock1 write lock"
8616 -
8617 - lock_work()
8618 -
8619 - Lock1.unlock()
8620 - print "Lock1 unlock"
8621 -
8622 - Lock1.read_lock()
8623 - print "Lock1 read lock"
8624 -
8625 - lock_work()
8626 -
8627 - Lock1.unlock()
8628 - print "Lock1 unlock"
8629 -
8630 -#Lock1.write_lock()
8631 -#time.sleep(2)
8632 -#Lock1.unlock()
8633 - ##Lock1.write_lock()
8634 - #time.sleep(2)
8635 - #Lock1.unlock()
8636 diff --git a/modules/catalyst_support.py b/modules/catalyst_support.py
8637 deleted file mode 100644
8638 index 316dfa3..0000000
8639 --- a/modules/catalyst_support.py
8640 +++ /dev/null
8641 @@ -1,718 +0,0 @@
8642 -
8643 -import sys,string,os,types,re,signal,traceback,time
8644 -#import md5,sha
8645 -selinux_capable = False
8646 -#userpriv_capable = (os.getuid() == 0)
8647 -#fakeroot_capable = False
8648 -BASH_BINARY = "/bin/bash"
8649 -
8650 -try:
8651 - import resource
8652 - max_fd_limit=resource.getrlimit(RLIMIT_NOFILE)
8653 -except SystemExit, e:
8654 - raise
8655 -except:
8656 - # hokay, no resource module.
8657 - max_fd_limit=256
8658 -
8659 -# pids this process knows of.
8660 -spawned_pids = []
8661 -
8662 -try:
8663 - import urllib
8664 -except SystemExit, e:
8665 - raise
8666 -
8667 -def cleanup(pids,block_exceptions=True):
8668 - """function to go through and reap the list of pids passed to it"""
8669 - global spawned_pids
8670 - if type(pids) == int:
8671 - pids = [pids]
8672 - for x in pids:
8673 - try:
8674 - os.kill(x,signal.SIGTERM)
8675 - if os.waitpid(x,os.WNOHANG)[1] == 0:
8676 - # feisty bugger, still alive.
8677 - os.kill(x,signal.SIGKILL)
8678 - os.waitpid(x,0)
8679 -
8680 - except OSError, oe:
8681 - if block_exceptions:
8682 - pass
8683 - if oe.errno not in (10,3):
8684 - raise oe
8685 - except SystemExit:
8686 - raise
8687 - except Exception:
8688 - if block_exceptions:
8689 - pass
8690 - try: spawned_pids.remove(x)
8691 - except IndexError: pass
8692 -
8693 -
8694 -
8695 -# a function to turn a string of non-printable characters into a string of
8696 -# hex characters
8697 -def hexify(str):
8698 - hexStr = string.hexdigits
8699 - r = ''
8700 - for ch in str:
8701 - i = ord(ch)
8702 - r = r + hexStr[(i >> 4) & 0xF] + hexStr[i & 0xF]
8703 - return r
8704 -# hexify()
8705 -
8706 -def generate_contents(file,contents_function="auto",verbose=False):
8707 - try:
8708 - _ = contents_function
8709 - if _ == 'auto' and file.endswith('.iso'):
8710 - _ = 'isoinfo-l'
8711 - if (_ in ['tar-tv','auto']):
8712 - if file.endswith('.tgz') or file.endswith('.tar.gz'):
8713 - _ = 'tar-tvz'
8714 - elif file.endswith('.tbz2') or file.endswith('.tar.bz2'):
8715 - _ = 'tar-tvj'
8716 - elif file.endswith('.tar'):
8717 - _ = 'tar-tv'
8718 -
8719 - if _ == 'auto':
8720 - warn('File %r has unknown type for automatic detection.' % (file, ))
8721 - return None
8722 - else:
8723 - contents_function = _
8724 - _ = contents_map[contents_function]
8725 - return _[0](file,_[1],verbose)
8726 - except:
8727 - raise CatalystError,\
8728 - "Error generating contents, is appropriate utility (%s) installed on your system?" \
8729 - % (contents_function, )
8730 -
8731 -def calc_contents(file,cmd,verbose):
8732 - args={ 'file': file }
8733 - cmd=cmd % dict(args)
8734 - a=os.popen(cmd)
8735 - mylines=a.readlines()
8736 - a.close()
8737 - result="".join(mylines)
8738 - if verbose:
8739 - print result
8740 - return result
8741 -
8742 -# This has map must be defined after the function calc_content
8743 -# It is possible to call different functions from this but they must be defined
8744 -# before hash_map
8745 -# Key,function,cmd
8746 -contents_map={
8747 - # 'find' is disabled because it requires the source path, which is not
8748 - # always available
8749 - #"find" :[calc_contents,"find %(path)s"],
8750 - "tar-tv":[calc_contents,"tar tvf %(file)s"],
8751 - "tar-tvz":[calc_contents,"tar tvzf %(file)s"],
8752 - "tar-tvj":[calc_contents,"tar -I lbzip2 -tvf %(file)s"],
8753 - "isoinfo-l":[calc_contents,"isoinfo -l -i %(file)s"],
8754 - # isoinfo-f should be a last resort only
8755 - "isoinfo-f":[calc_contents,"isoinfo -f -i %(file)s"],
8756 -}
8757 -
8758 -def generate_hash(file,hash_function="crc32",verbose=False):
8759 - try:
8760 - return hash_map[hash_function][0](file,hash_map[hash_function][1],hash_map[hash_function][2],\
8761 - hash_map[hash_function][3],verbose)
8762 - except:
8763 - raise CatalystError,"Error generating hash, is appropriate utility installed on your system?"
8764 -
8765 -def calc_hash(file,cmd,cmd_args,id_string="MD5",verbose=False):
8766 - a=os.popen(cmd+" "+cmd_args+" "+file)
8767 - mylines=a.readlines()
8768 - a.close()
8769 - mylines=mylines[0].split()
8770 - result=mylines[0]
8771 - if verbose:
8772 - print id_string+" (%s) = %s" % (file, result)
8773 - return result
8774 -
8775 -def calc_hash2(file,cmd,cmd_args,id_string="MD5",verbose=False):
8776 - a=os.popen(cmd+" "+cmd_args+" "+file)
8777 - header=a.readline()
8778 - mylines=a.readline().split()
8779 - hash=mylines[0]
8780 - short_file=os.path.split(mylines[1])[1]
8781 - a.close()
8782 - result=header+hash+" "+short_file+"\n"
8783 - if verbose:
8784 - print header+" (%s) = %s" % (short_file, result)
8785 - return result
8786 -
8787 -# This has map must be defined after the function calc_hash
8788 -# It is possible to call different functions from this but they must be defined
8789 -# before hash_map
8790 -# Key,function,cmd,cmd_args,Print string
8791 -hash_map={
8792 - "adler32":[calc_hash2,"shash","-a ADLER32","ADLER32"],\
8793 - "crc32":[calc_hash2,"shash","-a CRC32","CRC32"],\
8794 - "crc32b":[calc_hash2,"shash","-a CRC32B","CRC32B"],\
8795 - "gost":[calc_hash2,"shash","-a GOST","GOST"],\
8796 - "haval128":[calc_hash2,"shash","-a HAVAL128","HAVAL128"],\
8797 - "haval160":[calc_hash2,"shash","-a HAVAL160","HAVAL160"],\
8798 - "haval192":[calc_hash2,"shash","-a HAVAL192","HAVAL192"],\
8799 - "haval224":[calc_hash2,"shash","-a HAVAL224","HAVAL224"],\
8800 - "haval256":[calc_hash2,"shash","-a HAVAL256","HAVAL256"],\
8801 - "md2":[calc_hash2,"shash","-a MD2","MD2"],\
8802 - "md4":[calc_hash2,"shash","-a MD4","MD4"],\
8803 - "md5":[calc_hash2,"shash","-a MD5","MD5"],\
8804 - "ripemd128":[calc_hash2,"shash","-a RIPEMD128","RIPEMD128"],\
8805 - "ripemd160":[calc_hash2,"shash","-a RIPEMD160","RIPEMD160"],\
8806 - "ripemd256":[calc_hash2,"shash","-a RIPEMD256","RIPEMD256"],\
8807 - "ripemd320":[calc_hash2,"shash","-a RIPEMD320","RIPEMD320"],\
8808 - "sha1":[calc_hash2,"shash","-a SHA1","SHA1"],\
8809 - "sha224":[calc_hash2,"shash","-a SHA224","SHA224"],\
8810 - "sha256":[calc_hash2,"shash","-a SHA256","SHA256"],\
8811 - "sha384":[calc_hash2,"shash","-a SHA384","SHA384"],\
8812 - "sha512":[calc_hash2,"shash","-a SHA512","SHA512"],\
8813 - "snefru128":[calc_hash2,"shash","-a SNEFRU128","SNEFRU128"],\
8814 - "snefru256":[calc_hash2,"shash","-a SNEFRU256","SNEFRU256"],\
8815 - "tiger":[calc_hash2,"shash","-a TIGER","TIGER"],\
8816 - "tiger128":[calc_hash2,"shash","-a TIGER128","TIGER128"],\
8817 - "tiger160":[calc_hash2,"shash","-a TIGER160","TIGER160"],\
8818 - "whirlpool":[calc_hash2,"shash","-a WHIRLPOOL","WHIRLPOOL"],\
8819 - }
8820 -
8821 -def read_from_clst(file):
8822 - line = ''
8823 - myline = ''
8824 - try:
8825 - myf=open(file,"r")
8826 - except:
8827 - return -1
8828 - #raise CatalystError, "Could not open file "+file
8829 - for line in myf.readlines():
8830 - #line = string.replace(line, "\n", "") # drop newline
8831 - myline = myline + line
8832 - myf.close()
8833 - return myline
8834 -# read_from_clst
8835 -
8836 -# these should never be touched
8837 -required_build_targets=["generic_target","generic_stage_target"]
8838 -
8839 -# new build types should be added here
8840 -valid_build_targets=["stage1_target","stage2_target","stage3_target","stage4_target","grp_target",
8841 - "livecd_stage1_target","livecd_stage2_target","embedded_target",
8842 - "tinderbox_target","snapshot_target","netboot_target","netboot2_target"]
8843 -
8844 -required_config_file_values=["storedir","sharedir","distdir","portdir"]
8845 -valid_config_file_values=required_config_file_values[:]
8846 -valid_config_file_values.append("PKGCACHE")
8847 -valid_config_file_values.append("KERNCACHE")
8848 -valid_config_file_values.append("CCACHE")
8849 -valid_config_file_values.append("DISTCC")
8850 -valid_config_file_values.append("ICECREAM")
8851 -valid_config_file_values.append("ENVSCRIPT")
8852 -valid_config_file_values.append("AUTORESUME")
8853 -valid_config_file_values.append("FETCH")
8854 -valid_config_file_values.append("CLEAR_AUTORESUME")
8855 -valid_config_file_values.append("options")
8856 -valid_config_file_values.append("DEBUG")
8857 -valid_config_file_values.append("VERBOSE")
8858 -valid_config_file_values.append("PURGE")
8859 -valid_config_file_values.append("PURGEONLY")
8860 -valid_config_file_values.append("SNAPCACHE")
8861 -valid_config_file_values.append("snapshot_cache")
8862 -valid_config_file_values.append("hash_function")
8863 -valid_config_file_values.append("digests")
8864 -valid_config_file_values.append("contents")
8865 -valid_config_file_values.append("SEEDCACHE")
8866 -
8867 -verbosity=1
8868 -
8869 -def list_bashify(mylist):
8870 - if type(mylist)==types.StringType:
8871 - mypack=[mylist]
8872 - else:
8873 - mypack=mylist[:]
8874 - for x in range(0,len(mypack)):
8875 - # surround args with quotes for passing to bash,
8876 - # allows things like "<" to remain intact
8877 - mypack[x]="'"+mypack[x]+"'"
8878 - mypack=string.join(mypack)
8879 - return mypack
8880 -
8881 -def list_to_string(mylist):
8882 - if type(mylist)==types.StringType:
8883 - mypack=[mylist]
8884 - else:
8885 - mypack=mylist[:]
8886 - for x in range(0,len(mypack)):
8887 - # surround args with quotes for passing to bash,
8888 - # allows things like "<" to remain intact
8889 - mypack[x]=mypack[x]
8890 - mypack=string.join(mypack)
8891 - return mypack
8892 -
8893 -class CatalystError(Exception):
8894 - def __init__(self, message):
8895 - if message:
8896 - (type,value)=sys.exc_info()[:2]
8897 - if value!=None:
8898 - print
8899 - print traceback.print_exc(file=sys.stdout)
8900 - print
8901 - print "!!! catalyst: "+message
8902 - print
8903 -
8904 -class LockInUse(Exception):
8905 - def __init__(self, message):
8906 - if message:
8907 - #(type,value)=sys.exc_info()[:2]
8908 - #if value!=None:
8909 - #print
8910 - #kprint traceback.print_exc(file=sys.stdout)
8911 - print
8912 - print "!!! catalyst lock file in use: "+message
8913 - print
8914 -
8915 -def die(msg=None):
8916 - warn(msg)
8917 - sys.exit(1)
8918 -
8919 -def warn(msg):
8920 - print "!!! catalyst: "+msg
8921 -
8922 -def find_binary(myc):
8923 - """look through the environmental path for an executable file named whatever myc is"""
8924 - # this sucks. badly.
8925 - p=os.getenv("PATH")
8926 - if p == None:
8927 - return None
8928 - for x in p.split(":"):
8929 - #if it exists, and is executable
8930 - if os.path.exists("%s/%s" % (x,myc)) and os.stat("%s/%s" % (x,myc))[0] & 0x0248:
8931 - return "%s/%s" % (x,myc)
8932 - return None
8933 -
8934 -def spawn_bash(mycommand,env={},debug=False,opt_name=None,**keywords):
8935 - """spawn mycommand as an arguement to bash"""
8936 - args=[BASH_BINARY]
8937 - if not opt_name:
8938 - opt_name=mycommand.split()[0]
8939 - if "BASH_ENV" not in env:
8940 - env["BASH_ENV"] = "/etc/spork/is/not/valid/profile.env"
8941 - if debug:
8942 - args.append("-x")
8943 - args.append("-c")
8944 - args.append(mycommand)
8945 - return spawn(args,env=env,opt_name=opt_name,**keywords)
8946 -
8947 -#def spawn_get_output(mycommand,spawn_type=spawn,raw_exit_code=False,emulate_gso=True, \
8948 -# collect_fds=[1],fd_pipes=None,**keywords):
8949 -
8950 -def spawn_get_output(mycommand,raw_exit_code=False,emulate_gso=True, \
8951 - collect_fds=[1],fd_pipes=None,**keywords):
8952 - """call spawn, collecting the output to fd's specified in collect_fds list
8953 - emulate_gso is a compatability hack to emulate commands.getstatusoutput's return, minus the
8954 - requirement it always be a bash call (spawn_type controls the actual spawn call), and minus the
8955 - 'lets let log only stdin and let stderr slide by'.
8956 -
8957 - emulate_gso was deprecated from the day it was added, so convert your code over.
8958 - spawn_type is the passed in function to call- typically spawn_bash, spawn, spawn_sandbox, or spawn_fakeroot"""
8959 - global selinux_capable
8960 - pr,pw=os.pipe()
8961 -
8962 - #if type(spawn_type) not in [types.FunctionType, types.MethodType]:
8963 - # s="spawn_type must be passed a function, not",type(spawn_type),spawn_type
8964 - # raise Exception,s
8965 -
8966 - if fd_pipes==None:
8967 - fd_pipes={}
8968 - fd_pipes[0] = 0
8969 -
8970 - for x in collect_fds:
8971 - fd_pipes[x] = pw
8972 - keywords["returnpid"]=True
8973 -
8974 - mypid=spawn_bash(mycommand,fd_pipes=fd_pipes,**keywords)
8975 - os.close(pw)
8976 - if type(mypid) != types.ListType:
8977 - os.close(pr)
8978 - return [mypid, "%s: No such file or directory" % mycommand.split()[0]]
8979 -
8980 - fd=os.fdopen(pr,"r")
8981 - mydata=fd.readlines()
8982 - fd.close()
8983 - if emulate_gso:
8984 - mydata=string.join(mydata)
8985 - if len(mydata) and mydata[-1] == "\n":
8986 - mydata=mydata[:-1]
8987 - retval=os.waitpid(mypid[0],0)[1]
8988 - cleanup(mypid)
8989 - if raw_exit_code:
8990 - return [retval,mydata]
8991 - retval=process_exit_code(retval)
8992 - return [retval, mydata]
8993 -
8994 -# base spawn function
8995 -def spawn(mycommand,env={},raw_exit_code=False,opt_name=None,fd_pipes=None,returnpid=False,\
8996 - uid=None,gid=None,groups=None,umask=None,logfile=None,path_lookup=True,\
8997 - selinux_context=None, raise_signals=False, func_call=False):
8998 - """base fork/execve function.
8999 - mycommand is the desired command- if you need a command to execute in a bash/sandbox/fakeroot
9000 - environment, use the appropriate spawn call. This is a straight fork/exec code path.
9001 - Can either have a tuple, or a string passed in. If uid/gid/groups/umask specified, it changes
9002 - the forked process to said value. If path_lookup is on, a non-absolute command will be converted
9003 - to an absolute command, otherwise it returns None.
9004 -
9005 - selinux_context is the desired context, dependant on selinux being available.
9006 - opt_name controls the name the processor goes by.
9007 - fd_pipes controls which file descriptor numbers are left open in the forked process- it's a dict of
9008 - current fd's raw fd #, desired #.
9009 -
9010 - func_call is a boolean for specifying to execute a python function- use spawn_func instead.
9011 - raise_signals is questionable. Basically throw an exception if signal'd. No exception is thrown
9012 - if raw_input is on.
9013 -
9014 - logfile overloads the specified fd's to write to a tee process which logs to logfile
9015 - returnpid returns the relevant pids (a list, including the logging process if logfile is on).
9016 -
9017 - non-returnpid calls to spawn will block till the process has exited, returning the exitcode/signal
9018 - raw_exit_code controls whether the actual waitpid result is returned, or intrepretted."""
9019 -
9020 - myc=''
9021 - if not func_call:
9022 - if type(mycommand)==types.StringType:
9023 - mycommand=mycommand.split()
9024 - myc = mycommand[0]
9025 - if not os.access(myc, os.X_OK):
9026 - if not path_lookup:
9027 - return None
9028 - myc = find_binary(myc)
9029 - if myc == None:
9030 - return None
9031 - mypid=[]
9032 - if logfile:
9033 - pr,pw=os.pipe()
9034 - mypid.extend(spawn(('tee','-i','-a',logfile),returnpid=True,fd_pipes={0:pr,1:1,2:2}))
9035 - retval=os.waitpid(mypid[-1],os.WNOHANG)[1]
9036 - if retval != 0:
9037 - # he's dead jim.
9038 - if raw_exit_code:
9039 - return retval
9040 - return process_exit_code(retval)
9041 -
9042 - if fd_pipes == None:
9043 - fd_pipes={}
9044 - fd_pipes[0] = 0
9045 - fd_pipes[1]=pw
9046 - fd_pipes[2]=pw
9047 -
9048 - if not opt_name:
9049 - opt_name = mycommand[0]
9050 - myargs=[opt_name]
9051 - myargs.extend(mycommand[1:])
9052 - global spawned_pids
9053 - mypid.append(os.fork())
9054 - if mypid[-1] != 0:
9055 - #log the bugger.
9056 - spawned_pids.extend(mypid)
9057 -
9058 - if mypid[-1] == 0:
9059 - if func_call:
9060 - spawned_pids = []
9061 -
9062 - # this may look ugly, but basically it moves file descriptors around to ensure no
9063 - # handles that are needed are accidentally closed during the final dup2 calls.
9064 - trg_fd=[]
9065 - if type(fd_pipes)==types.DictType:
9066 - src_fd=[]
9067 - k=fd_pipes.keys()
9068 - k.sort()
9069 -
9070 - #build list of which fds will be where, and where they are at currently
9071 - for x in k:
9072 - trg_fd.append(x)
9073 - src_fd.append(fd_pipes[x])
9074 -
9075 - # run through said list dup'ing descriptors so that they won't be waxed
9076 - # by other dup calls.
9077 - for x in range(0,len(trg_fd)):
9078 - if trg_fd[x] == src_fd[x]:
9079 - continue
9080 - if trg_fd[x] in src_fd[x+1:]:
9081 - new=os.dup2(trg_fd[x],max(src_fd) + 1)
9082 - os.close(trg_fd[x])
9083 - try:
9084 - while True:
9085 - src_fd[s.index(trg_fd[x])]=new
9086 - except SystemExit, e:
9087 - raise
9088 - except:
9089 - pass
9090 -
9091 - # transfer the fds to their final pre-exec position.
9092 - for x in range(0,len(trg_fd)):
9093 - if trg_fd[x] != src_fd[x]:
9094 - os.dup2(src_fd[x], trg_fd[x])
9095 - else:
9096 - trg_fd=[0,1,2]
9097 -
9098 - # wax all open descriptors that weren't requested be left open.
9099 - for x in range(0,max_fd_limit):
9100 - if x not in trg_fd:
9101 - try:
9102 - os.close(x)
9103 - except SystemExit, e:
9104 - raise
9105 - except:
9106 - pass
9107 -
9108 - # note this order must be preserved- can't change gid/groups if you change uid first.
9109 - if selinux_capable and selinux_context:
9110 - import selinux
9111 - selinux.setexec(selinux_context)
9112 - if gid:
9113 - os.setgid(gid)
9114 - if groups:
9115 - os.setgroups(groups)
9116 - if uid:
9117 - os.setuid(uid)
9118 - if umask:
9119 - os.umask(umask)
9120 - else:
9121 - os.umask(022)
9122 -
9123 - try:
9124 - #print "execing", myc, myargs
9125 - if func_call:
9126 - # either use a passed in func for interpretting the results, or return if no exception.
9127 - # note the passed in list, and dict are expanded.
9128 - if len(mycommand) == 4:
9129 - os._exit(mycommand[3](mycommand[0](*mycommand[1],**mycommand[2])))
9130 - try:
9131 - mycommand[0](*mycommand[1],**mycommand[2])
9132 - except Exception,e:
9133 - print "caught exception",e," in forked func",mycommand[0]
9134 - sys.exit(0)
9135 -
9136 - #os.execvp(myc,myargs)
9137 - os.execve(myc,myargs,env)
9138 - except SystemExit, e:
9139 - raise
9140 - except Exception, e:
9141 - if not func_call:
9142 - raise str(e)+":\n "+myc+" "+string.join(myargs)
9143 - print "func call failed"
9144 -
9145 - # If the execve fails, we need to report it, and exit
9146 - # *carefully* --- report error here
9147 - os._exit(1)
9148 - sys.exit(1)
9149 - return # should never get reached
9150 -
9151 - # if we were logging, kill the pipes.
9152 - if logfile:
9153 - os.close(pr)
9154 - os.close(pw)
9155 -
9156 - if returnpid:
9157 - return mypid
9158 -
9159 - # loop through pids (typically one, unless logging), either waiting on their death, or waxing them
9160 - # if the main pid (mycommand) returned badly.
9161 - while len(mypid):
9162 - retval=os.waitpid(mypid[-1],0)[1]
9163 - if retval != 0:
9164 - cleanup(mypid[0:-1],block_exceptions=False)
9165 - # at this point we've killed all other kid pids generated via this call.
9166 - # return now.
9167 - if raw_exit_code:
9168 - return retval
9169 - return process_exit_code(retval,throw_signals=raise_signals)
9170 - else:
9171 - mypid.pop(-1)
9172 - cleanup(mypid)
9173 - return 0
9174 -
9175 -def cmd(mycmd,myexc="",env={}):
9176 - try:
9177 - sys.stdout.flush()
9178 - retval=spawn_bash(mycmd,env)
9179 - if retval != 0:
9180 - raise CatalystError,myexc
9181 - except:
9182 - raise
9183 -
9184 -def process_exit_code(retval,throw_signals=False):
9185 - """process a waitpid returned exit code, returning exit code if it exit'd, or the
9186 - signal if it died from signalling
9187 - if throw_signals is on, it raises a SystemExit if the process was signaled.
9188 - This is intended for usage with threads, although at the moment you can't signal individual
9189 - threads in python, only the master thread, so it's a questionable option."""
9190 - if (retval & 0xff)==0:
9191 - return retval >> 8 # return exit code
9192 - else:
9193 - if throw_signals:
9194 - #use systemexit, since portage is stupid about exception catching.
9195 - raise SystemExit()
9196 - return (retval & 0xff) << 8 # interrupted by signal
9197 -
9198 -def file_locate(settings,filelist,expand=1):
9199 - #if expand=1, non-absolute paths will be accepted and
9200 - # expanded to os.getcwd()+"/"+localpath if file exists
9201 - for myfile in filelist:
9202 - if myfile not in settings:
9203 - #filenames such as cdtar are optional, so we don't assume the variable is defined.
9204 - pass
9205 - else:
9206 - if len(settings[myfile])==0:
9207 - raise CatalystError, "File variable \""+myfile+"\" has a length of zero (not specified.)"
9208 - if settings[myfile][0]=="/":
9209 - if not os.path.exists(settings[myfile]):
9210 - raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]
9211 - elif expand and os.path.exists(os.getcwd()+"/"+settings[myfile]):
9212 - settings[myfile]=os.getcwd()+"/"+settings[myfile]
9213 - else:
9214 - raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]+" (2nd try)"
9215 -"""
9216 -Spec file format:
9217 -
9218 -The spec file format is a very simple and easy-to-use format for storing data. Here's an example
9219 -file:
9220 -
9221 -item1: value1
9222 -item2: foo bar oni
9223 -item3:
9224 - meep
9225 - bark
9226 - gleep moop
9227 -
9228 -This file would be interpreted as defining three items: item1, item2 and item3. item1 would contain
9229 -the string value "value1". Item2 would contain an ordered list [ "foo", "bar", "oni" ]. item3
9230 -would contain an ordered list as well: [ "meep", "bark", "gleep", "moop" ]. It's important to note
9231 -that the order of multiple-value items is preserved, but the order that the items themselves are
9232 -defined are not preserved. In other words, "foo", "bar", "oni" ordering is preserved but "item1"
9233 -"item2" "item3" ordering is not, as the item strings are stored in a dictionary (hash).
9234 -"""
9235 -
9236 -def parse_makeconf(mylines):
9237 - mymakeconf={}
9238 - pos=0
9239 - pat=re.compile("([0-9a-zA-Z_]*)=(.*)")
9240 - while pos<len(mylines):
9241 - if len(mylines[pos])<=1:
9242 - #skip blanks
9243 - pos += 1
9244 - continue
9245 - if mylines[pos][0] in ["#"," ","\t"]:
9246 - #skip indented lines, comments
9247 - pos += 1
9248 - continue
9249 - else:
9250 - myline=mylines[pos]
9251 - mobj=pat.match(myline)
9252 - pos += 1
9253 - if mobj.group(2):
9254 - clean_string = re.sub(r"\"",r"",mobj.group(2))
9255 - mymakeconf[mobj.group(1)]=clean_string
9256 - return mymakeconf
9257 -
9258 -def read_makeconf(mymakeconffile):
9259 - if os.path.exists(mymakeconffile):
9260 - try:
9261 - try:
9262 - import snakeoil.fileutils
9263 - return snakeoil.fileutils.read_bash_dict(mymakeconffile, sourcing_command="source")
9264 - except ImportError:
9265 - try:
9266 - import portage.util
9267 - return portage.util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
9268 - except:
9269 - try:
9270 - import portage_util
9271 - return portage_util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
9272 - except ImportError:
9273 - myf=open(mymakeconffile,"r")
9274 - mylines=myf.readlines()
9275 - myf.close()
9276 - return parse_makeconf(mylines)
9277 - except:
9278 - raise CatalystError, "Could not parse make.conf file "+mymakeconffile
9279 - else:
9280 - makeconf={}
9281 - return makeconf
9282 -
9283 -def msg(mymsg,verblevel=1):
9284 - if verbosity>=verblevel:
9285 - print mymsg
9286 -
9287 -def pathcompare(path1,path2):
9288 - # Change double slashes to slash
9289 - path1 = re.sub(r"//",r"/",path1)
9290 - path2 = re.sub(r"//",r"/",path2)
9291 - # Removing ending slash
9292 - path1 = re.sub("/$","",path1)
9293 - path2 = re.sub("/$","",path2)
9294 -
9295 - if path1 == path2:
9296 - return 1
9297 - return 0
9298 -
9299 -def ismount(path):
9300 - "enhanced to handle bind mounts"
9301 - if os.path.ismount(path):
9302 - return 1
9303 - a=os.popen("mount")
9304 - mylines=a.readlines()
9305 - a.close()
9306 - for line in mylines:
9307 - mysplit=line.split()
9308 - if pathcompare(path,mysplit[2]):
9309 - return 1
9310 - return 0
9311 -
9312 -def addl_arg_parse(myspec,addlargs,requiredspec,validspec):
9313 - "helper function to help targets parse additional arguments"
9314 - global valid_config_file_values
9315 -
9316 - messages = []
9317 - for x in addlargs.keys():
9318 - if x not in validspec and x not in valid_config_file_values and x not in requiredspec:
9319 - messages.append("Argument \""+x+"\" not recognized.")
9320 - else:
9321 - myspec[x]=addlargs[x]
9322 -
9323 - for x in requiredspec:
9324 - if x not in myspec:
9325 - messages.append("Required argument \""+x+"\" not specified.")
9326 -
9327 - if messages:
9328 - raise CatalystError, '\n\tAlso: '.join(messages)
9329 -
9330 -def touch(myfile):
9331 - try:
9332 - myf=open(myfile,"w")
9333 - myf.close()
9334 - except IOError:
9335 - raise CatalystError, "Could not touch "+myfile+"."
9336 -
9337 -def countdown(secs=5, doing="Starting"):
9338 - if secs:
9339 - print ">>> Waiting",secs,"seconds before starting..."
9340 - print ">>> (Control-C to abort)...\n"+doing+" in: ",
9341 - ticks=range(secs)
9342 - ticks.reverse()
9343 - for sec in ticks:
9344 - sys.stdout.write(str(sec+1)+" ")
9345 - sys.stdout.flush()
9346 - time.sleep(1)
9347 - print
9348 -
9349 -def normpath(mypath):
9350 - TrailingSlash=False
9351 - if mypath[-1] == "/":
9352 - TrailingSlash=True
9353 - newpath = os.path.normpath(mypath)
9354 - if len(newpath) > 1:
9355 - if newpath[:2] == "//":
9356 - newpath = newpath[1:]
9357 - if TrailingSlash:
9358 - newpath=newpath+'/'
9359 - return newpath
9360 diff --git a/modules/embedded_target.py b/modules/embedded_target.py
9361 deleted file mode 100644
9362 index f38ea00..0000000
9363 --- a/modules/embedded_target.py
9364 +++ /dev/null
9365 @@ -1,51 +0,0 @@
9366 -"""
9367 -Enbedded target, similar to the stage2 target, builds upon a stage2 tarball.
9368 -
9369 -A stage2 tarball is unpacked, but instead
9370 -of building a stage3, it emerges @system into another directory
9371 -inside the stage2 system. This way, we do not have to emerge GCC/portage
9372 -into the staged system.
9373 -It may sound complicated but basically it runs
9374 -ROOT=/tmp/submerge emerge --something foo bar .
9375 -"""
9376 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
9377 -
9378 -import os,string,imp,types,shutil
9379 -from catalyst_support import *
9380 -from generic_stage_target import *
9381 -from stat import *
9382 -
9383 -class embedded_target(generic_stage_target):
9384 - """
9385 - Builder class for embedded target
9386 - """
9387 - def __init__(self,spec,addlargs):
9388 - self.required_values=[]
9389 - self.valid_values=[]
9390 - self.valid_values.extend(["embedded/empty","embedded/rm","embedded/unmerge","embedded/fs-prepare","embedded/fs-finish","embedded/mergeroot","embedded/packages","embedded/fs-type","embedded/runscript","boot/kernel","embedded/linuxrc"])
9391 - self.valid_values.extend(["embedded/use"])
9392 - if "embedded/fs-type" in addlargs:
9393 - self.valid_values.append("embedded/fs-ops")
9394 -
9395 - generic_stage_target.__init__(self,spec,addlargs)
9396 - self.set_build_kernel_vars(addlargs)
9397 -
9398 - def set_action_sequence(self):
9399 - self.settings["action_sequence"]=["dir_setup","unpack","unpack_snapshot",\
9400 - "config_profile_link","setup_confdir",\
9401 - "portage_overlay","bind","chroot_setup",\
9402 - "setup_environment","build_kernel","build_packages",\
9403 - "bootloader","root_overlay","fsscript","unmerge",\
9404 - "unbind","remove","empty","clean","capture","clear_autoresume"]
9405 -
9406 - def set_stage_path(self):
9407 - self.settings["stage_path"]=normpath(self.settings["chroot_path"]+"/tmp/mergeroot")
9408 - print "embedded stage path is "+self.settings["stage_path"]
9409 -
9410 - def set_root_path(self):
9411 - self.settings["root_path"]=normpath("/tmp/mergeroot")
9412 - print "embedded root path is "+self.settings["root_path"]
9413 -
9414 -def register(foo):
9415 - foo.update({"embedded":embedded_target})
9416 - return foo
9417 diff --git a/modules/generic_stage_target.py b/modules/generic_stage_target.py
9418 deleted file mode 100644
9419 index 1fbc733..0000000
9420 --- a/modules/generic_stage_target.py
9421 +++ /dev/null
9422 @@ -1,1692 +0,0 @@
9423 -import os,string,imp,types,shutil
9424 -from catalyst_support import *
9425 -from generic_target import *
9426 -from stat import *
9427 -import catalyst_lock
9428 -
9429 -class generic_stage_target(generic_target):
9430 - """
9431 - This class does all of the chroot setup, copying of files, etc. It is
9432 - the driver class for pretty much everything that Catalyst does.
9433 - """
9434 - def __init__(self,myspec,addlargs):
9435 - self.required_values.extend(["version_stamp","target","subarch",\
9436 - "rel_type","profile","snapshot","source_subpath"])
9437 -
9438 - self.valid_values.extend(["version_stamp","target","subarch",\
9439 - "rel_type","profile","snapshot","source_subpath","portage_confdir",\
9440 - "cflags","cxxflags","ldflags","cbuild","hostuse","portage_overlay",\
9441 - "distcc_hosts","makeopts","pkgcache_path","kerncache_path"])
9442 -
9443 - self.set_valid_build_kernel_vars(addlargs)
9444 - generic_target.__init__(self,myspec,addlargs)
9445 -
9446 - """
9447 - The semantics of subarchmap and machinemap changed a bit in 2.0.3 to
9448 - work better with vapier's CBUILD stuff. I've removed the "monolithic"
9449 - machinemap from this file and split up its contents amongst the
9450 - various arch/foo.py files.
9451 -
9452 - When register() is called on each module in the arch/ dir, it now
9453 - returns a tuple instead of acting on the subarchmap dict that is
9454 - passed to it. The tuple contains the values that were previously
9455 - added to subarchmap as well as a new list of CHOSTs that go along
9456 - with that arch. This allows us to build machinemap on the fly based
9457 - on the keys in subarchmap and the values of the 2nd list returned
9458 - (tmpmachinemap).
9459 -
9460 - Also, after talking with vapier. I have a slightly better idea of what
9461 - certain variables are used for and what they should be set to. Neither
9462 - 'buildarch' or 'hostarch' are used directly, so their value doesn't
9463 - really matter. They are just compared to determine if we are
9464 - cross-compiling. Because of this, they are just set to the name of the
9465 - module in arch/ that the subarch is part of to make things simpler.
9466 - The entire build process is still based off of 'subarch' like it was
9467 - previously. -agaffney
9468 - """
9469 -
9470 - self.archmap = {}
9471 - self.subarchmap = {}
9472 - machinemap = {}
9473 - for x in [x[:-3] for x in os.listdir(self.settings["sharedir"]+\
9474 - "/arch/") if x.endswith(".py")]:
9475 - try:
9476 - fh=open(self.settings["sharedir"]+"/arch/"+x+".py")
9477 - """
9478 - This next line loads the plugin as a module and assigns it to
9479 - archmap[x]
9480 - """
9481 - self.archmap[x]=imp.load_module(x,fh,"arch/"+x+\
9482 - ".py",(".py","r",imp.PY_SOURCE))
9483 - """
9484 - This next line registers all the subarches supported in the
9485 - plugin
9486 - """
9487 - tmpsubarchmap, tmpmachinemap = self.archmap[x].register()
9488 - self.subarchmap.update(tmpsubarchmap)
9489 - for machine in tmpmachinemap:
9490 - machinemap[machine] = x
9491 - for subarch in tmpsubarchmap:
9492 - machinemap[subarch] = x
9493 - fh.close()
9494 - except IOError:
9495 - """
9496 - This message should probably change a bit, since everything in
9497 - the dir should load just fine. If it doesn't, it's probably a
9498 - syntax error in the module
9499 - """
9500 - msg("Can't find/load "+x+".py plugin in "+\
9501 - self.settings["sharedir"]+"/arch/")
9502 -
9503 - if "chost" in self.settings:
9504 - hostmachine = self.settings["chost"].split("-")[0]
9505 - if hostmachine not in machinemap:
9506 - raise CatalystError, "Unknown host machine type "+hostmachine
9507 - self.settings["hostarch"]=machinemap[hostmachine]
9508 - else:
9509 - hostmachine = self.settings["subarch"]
9510 - if hostmachine in machinemap:
9511 - hostmachine = machinemap[hostmachine]
9512 - self.settings["hostarch"]=hostmachine
9513 - if "cbuild" in self.settings:
9514 - buildmachine = self.settings["cbuild"].split("-")[0]
9515 - else:
9516 - buildmachine = os.uname()[4]
9517 - if buildmachine not in machinemap:
9518 - raise CatalystError, "Unknown build machine type "+buildmachine
9519 - self.settings["buildarch"]=machinemap[buildmachine]
9520 - self.settings["crosscompile"]=(self.settings["hostarch"]!=\
9521 - self.settings["buildarch"])
9522 -
9523 - """ Call arch constructor, pass our settings """
9524 - try:
9525 - self.arch=self.subarchmap[self.settings["subarch"]](self.settings)
9526 - except KeyError:
9527 - print "Invalid subarch: "+self.settings["subarch"]
9528 - print "Choose one of the following:",
9529 - for x in self.subarchmap:
9530 - print x,
9531 - print
9532 - sys.exit(2)
9533 -
9534 - print "Using target:",self.settings["target"]
9535 - """ Print a nice informational message """
9536 - if self.settings["buildarch"]==self.settings["hostarch"]:
9537 - print "Building natively for",self.settings["hostarch"]
9538 - elif self.settings["crosscompile"]:
9539 - print "Cross-compiling on",self.settings["buildarch"],\
9540 - "for different machine type",self.settings["hostarch"]
9541 - else:
9542 - print "Building on",self.settings["buildarch"],\
9543 - "for alternate personality type",self.settings["hostarch"]
9544 -
9545 - """ This must be set first as other set_ options depend on this """
9546 - self.set_spec_prefix()
9547 -
9548 - """ Define all of our core variables """
9549 - self.set_target_profile()
9550 - self.set_target_subpath()
9551 - self.set_source_subpath()
9552 -
9553 - """ Set paths """
9554 - self.set_snapshot_path()
9555 - self.set_root_path()
9556 - self.set_source_path()
9557 - self.set_snapcache_path()
9558 - self.set_chroot_path()
9559 - self.set_autoresume_path()
9560 - self.set_dest_path()
9561 - self.set_stage_path()
9562 - self.set_target_path()
9563 -
9564 - self.set_controller_file()
9565 - self.set_action_sequence()
9566 - self.set_use()
9567 - self.set_cleanables()
9568 - self.set_iso_volume_id()
9569 - self.set_build_kernel_vars()
9570 - self.set_fsscript()
9571 - self.set_install_mask()
9572 - self.set_rcadd()
9573 - self.set_rcdel()
9574 - self.set_cdtar()
9575 - self.set_fstype()
9576 - self.set_fsops()
9577 - self.set_iso()
9578 - self.set_packages()
9579 - self.set_rm()
9580 - self.set_linuxrc()
9581 - self.set_busybox_config()
9582 - self.set_overlay()
9583 - self.set_portage_overlay()
9584 - self.set_root_overlay()
9585 -
9586 - """
9587 - This next line checks to make sure that the specified variables exist
9588 - on disk.
9589 - """
9590 - #pdb.set_trace()
9591 - file_locate(self.settings,["source_path","snapshot_path","distdir"],\
9592 - expand=0)
9593 - """ If we are using portage_confdir, check that as well. """
9594 - if "portage_confdir" in self.settings:
9595 - file_locate(self.settings,["portage_confdir"],expand=0)
9596 -
9597 - """ Setup our mount points """
9598 - if "SNAPCACHE" in self.settings:
9599 - self.mounts=["/proc","/dev","/usr/portage","/usr/portage/distfiles","/var/tmp/portage"]
9600 - self.mountmap={"/proc":"/proc","/dev":"/dev","/dev/pts":"/dev/pts",\
9601 - "/usr/portage":self.settings["snapshot_cache_path"]+"/portage",\
9602 - "/usr/portage/distfiles":self.settings["distdir"],"/var/tmp/portage":"tmpfs"}
9603 - else:
9604 - self.mounts=["proc","dev", "distdir", "port_tmpdir"]
9605 - self.mountmap={"proc":"/proc", "dev":"/dev", "pts":"/dev/pts",
9606 - "distdir":self.settings["distdir"], "port_tmpdir":"tmpfs"}
9607 - if os.uname()[0] == "Linux":
9608 - self.mounts.append("pts")
9609 -
9610 - self.set_mounts()
9611 -
9612 - """
9613 - Configure any user specified options (either in catalyst.conf or on
9614 - the command line).
9615 - """
9616 - if "PKGCACHE" in self.settings:
9617 - self.set_pkgcache_path()
9618 - print "Location of the package cache is "+\
9619 - self.settings["pkgcache_path"]
9620 - self.mounts.append("packagedir")
9621 - self.mountmap["packagedir"] = self.settings["pkgcache_path"]
9622 -
9623 - if "KERNCACHE" in self.settings:
9624 - self.set_kerncache_path()
9625 - print "Location of the kerncache is "+\
9626 - self.settings["kerncache_path"]
9627 - self.mounts.append("kerncache")
9628 - self.mountmap["kerncache"]=self.settings["kerncache_path"]
9629 -
9630 - if "CCACHE" in self.settings:
9631 - if "CCACHE_DIR" in os.environ:
9632 - ccdir=os.environ["CCACHE_DIR"]
9633 - del os.environ["CCACHE_DIR"]
9634 - else:
9635 - ccdir="/root/.ccache"
9636 - if not os.path.isdir(ccdir):
9637 - raise CatalystError,\
9638 - "Compiler cache support can't be enabled (can't find "+\
9639 - ccdir+")"
9640 - self.mounts.append("ccache")
9641 - self.mountmap["ccache"]=ccdir
9642 - """ for the chroot: """
9643 - self.env["CCACHE_DIR"]="/var/tmp/ccache"
9644 -
9645 - if "ICECREAM" in self.settings:
9646 - self.mounts.append("/var/cache/icecream")
9647 - self.mountmap["/var/cache/icecream"]="/var/cache/icecream"
9648 - self.env["PATH"]="/usr/lib/icecc/bin:"+self.env["PATH"]
9649 -
9650 - if "port_logdir" in self.settings:
9651 - self.mounts.append("/var/log/portage")
9652 - self.mountmap["/var/log/portage"]=self.settings["port_logdir"]
9653 - self.env["PORT_LOGDIR"]="/var/log/portage"
9654 - self.env["PORT_LOGDIR_CLEAN"]='find "${PORT_LOGDIR}" -type f ! -name "summary.log*" -mtime +30 -delete'
9655 -
9656 - def override_cbuild(self):
9657 - if "CBUILD" in self.makeconf:
9658 - self.settings["CBUILD"]=self.makeconf["CBUILD"]
9659 -
9660 - def override_chost(self):
9661 - if "CHOST" in self.makeconf:
9662 - self.settings["CHOST"]=self.makeconf["CHOST"]
9663 -
9664 - def override_cflags(self):
9665 - if "CFLAGS" in self.makeconf:
9666 - self.settings["CFLAGS"]=self.makeconf["CFLAGS"]
9667 -
9668 - def override_cxxflags(self):
9669 - if "CXXFLAGS" in self.makeconf:
9670 - self.settings["CXXFLAGS"]=self.makeconf["CXXFLAGS"]
9671 -
9672 - def override_ldflags(self):
9673 - if "LDFLAGS" in self.makeconf:
9674 - self.settings["LDFLAGS"]=self.makeconf["LDFLAGS"]
9675 -
9676 - def set_install_mask(self):
9677 - if "install_mask" in self.settings:
9678 - if type(self.settings["install_mask"])!=types.StringType:
9679 - self.settings["install_mask"]=\
9680 - string.join(self.settings["install_mask"])
9681 -
9682 - def set_spec_prefix(self):
9683 - self.settings["spec_prefix"]=self.settings["target"]
9684 -
9685 - def set_target_profile(self):
9686 - self.settings["target_profile"]=self.settings["profile"]
9687 -
9688 - def set_target_subpath(self):
9689 - self.settings["target_subpath"]=self.settings["rel_type"]+"/"+\
9690 - self.settings["target"]+"-"+self.settings["subarch"]+"-"+\
9691 - self.settings["version_stamp"]
9692 -
9693 - def set_source_subpath(self):
9694 - if type(self.settings["source_subpath"])!=types.StringType:
9695 - raise CatalystError,\
9696 - "source_subpath should have been a string. Perhaps you have something wrong in your spec file?"
9697 -
9698 - def set_pkgcache_path(self):
9699 - if "pkgcache_path" in self.settings:
9700 - if type(self.settings["pkgcache_path"])!=types.StringType:
9701 - self.settings["pkgcache_path"]=\
9702 - normpath(string.join(self.settings["pkgcache_path"]))
9703 - else:
9704 - self.settings["pkgcache_path"]=\
9705 - normpath(self.settings["storedir"]+"/packages/"+\
9706 - self.settings["target_subpath"]+"/")
9707 -
9708 - def set_kerncache_path(self):
9709 - if "kerncache_path" in self.settings:
9710 - if type(self.settings["kerncache_path"])!=types.StringType:
9711 - self.settings["kerncache_path"]=\
9712 - normpath(string.join(self.settings["kerncache_path"]))
9713 - else:
9714 - self.settings["kerncache_path"]=normpath(self.settings["storedir"]+\
9715 - "/kerncache/"+self.settings["target_subpath"]+"/")
9716 -
9717 - def set_target_path(self):
9718 - self.settings["target_path"]=normpath(self.settings["storedir"]+\
9719 - "/builds/"+self.settings["target_subpath"]+".tar.bz2")
9720 - if "AUTORESUME" in self.settings\
9721 - and os.path.exists(self.settings["autoresume_path"]+\
9722 - "setup_target_path"):
9723 - print \
9724 - "Resume point detected, skipping target path setup operation..."
9725 - else:
9726 - """ First clean up any existing target stuff """
9727 - # XXX WTF are we removing the old tarball before we start building the
9728 - # XXX new one? If the build fails, you don't want to be left with
9729 - # XXX nothing at all
9730 -# if os.path.isfile(self.settings["target_path"]):
9731 -# cmd("rm -f "+self.settings["target_path"],\
9732 -# "Could not remove existing file: "\
9733 -# +self.settings["target_path"],env=self.env)
9734 - touch(self.settings["autoresume_path"]+"setup_target_path")
9735 -
9736 - if not os.path.exists(self.settings["storedir"]+"/builds/"):
9737 - os.makedirs(self.settings["storedir"]+"/builds/")
9738 -
9739 - def set_fsscript(self):
9740 - if self.settings["spec_prefix"]+"/fsscript" in self.settings:
9741 - self.settings["fsscript"]=\
9742 - self.settings[self.settings["spec_prefix"]+"/fsscript"]
9743 - del self.settings[self.settings["spec_prefix"]+"/fsscript"]
9744 -
9745 - def set_rcadd(self):
9746 - if self.settings["spec_prefix"]+"/rcadd" in self.settings:
9747 - self.settings["rcadd"]=\
9748 - self.settings[self.settings["spec_prefix"]+"/rcadd"]
9749 - del self.settings[self.settings["spec_prefix"]+"/rcadd"]
9750 -
9751 - def set_rcdel(self):
9752 - if self.settings["spec_prefix"]+"/rcdel" in self.settings:
9753 - self.settings["rcdel"]=\
9754 - self.settings[self.settings["spec_prefix"]+"/rcdel"]
9755 - del self.settings[self.settings["spec_prefix"]+"/rcdel"]
9756 -
9757 - def set_cdtar(self):
9758 - if self.settings["spec_prefix"]+"/cdtar" in self.settings:
9759 - self.settings["cdtar"]=\
9760 - normpath(self.settings[self.settings["spec_prefix"]+"/cdtar"])
9761 - del self.settings[self.settings["spec_prefix"]+"/cdtar"]
9762 -
9763 - def set_iso(self):
9764 - if self.settings["spec_prefix"]+"/iso" in self.settings:
9765 - if self.settings[self.settings["spec_prefix"]+"/iso"].startswith('/'):
9766 - self.settings["iso"]=\
9767 - normpath(self.settings[self.settings["spec_prefix"]+"/iso"])
9768 - else:
9769 - # This automatically prepends the build dir to the ISO output path
9770 - # if it doesn't start with a /
9771 - self.settings["iso"] = normpath(self.settings["storedir"] + \
9772 - "/builds/" + self.settings["rel_type"] + "/" + \
9773 - self.settings[self.settings["spec_prefix"]+"/iso"])
9774 - del self.settings[self.settings["spec_prefix"]+"/iso"]
9775 -
9776 - def set_fstype(self):
9777 - if self.settings["spec_prefix"]+"/fstype" in self.settings:
9778 - self.settings["fstype"]=\
9779 - self.settings[self.settings["spec_prefix"]+"/fstype"]
9780 - del self.settings[self.settings["spec_prefix"]+"/fstype"]
9781 -
9782 - if "fstype" not in self.settings:
9783 - self.settings["fstype"]="normal"
9784 - for x in self.valid_values:
9785 - if x == self.settings["spec_prefix"]+"/fstype":
9786 - print "\n"+self.settings["spec_prefix"]+\
9787 - "/fstype is being set to the default of \"normal\"\n"
9788 -
9789 - def set_fsops(self):
9790 - if "fstype" in self.settings:
9791 - self.valid_values.append("fsops")
9792 - if self.settings["spec_prefix"]+"/fsops" in self.settings:
9793 - self.settings["fsops"]=\
9794 - self.settings[self.settings["spec_prefix"]+"/fsops"]
9795 - del self.settings[self.settings["spec_prefix"]+"/fsops"]
9796 -
9797 - def set_source_path(self):
9798 - if "SEEDCACHE" in self.settings\
9799 - and os.path.isdir(normpath(self.settings["storedir"]+"/tmp/"+\
9800 - self.settings["source_subpath"]+"/")):
9801 - self.settings["source_path"]=normpath(self.settings["storedir"]+\
9802 - "/tmp/"+self.settings["source_subpath"]+"/")
9803 - else:
9804 - self.settings["source_path"]=normpath(self.settings["storedir"]+\
9805 - "/builds/"+self.settings["source_subpath"]+".tar.bz2")
9806 - if os.path.isfile(self.settings["source_path"]):
9807 - # XXX: Is this even necessary if the previous check passes?
9808 - if os.path.exists(self.settings["source_path"]):
9809 - self.settings["source_path_hash"]=\
9810 - generate_hash(self.settings["source_path"],\
9811 - hash_function=self.settings["hash_function"],\
9812 - verbose=False)
9813 - print "Source path set to "+self.settings["source_path"]
9814 - if os.path.isdir(self.settings["source_path"]):
9815 - print "\tIf this is not desired, remove this directory or turn off"
9816 - print "\tseedcache in the options of catalyst.conf the source path"
9817 - print "\twill then be "+\
9818 - normpath(self.settings["storedir"]+"/builds/"+\
9819 - self.settings["source_subpath"]+".tar.bz2\n")
9820 -
9821 - def set_dest_path(self):
9822 - if "root_path" in self.settings:
9823 - self.settings["destpath"]=normpath(self.settings["chroot_path"]+\
9824 - self.settings["root_path"])
9825 - else:
9826 - self.settings["destpath"]=normpath(self.settings["chroot_path"])
9827 -
9828 - def set_cleanables(self):
9829 - self.settings["cleanables"]=["/etc/resolv.conf","/var/tmp/*","/tmp/*",\
9830 - "/root/*", self.settings["portdir"]]
9831 -
9832 - def set_snapshot_path(self):
9833 - self.settings["snapshot_path"]=normpath(self.settings["storedir"]+\
9834 - "/snapshots/" + self.settings["snapshot_name"] +
9835 - self.settings["snapshot"]+".tar.xz")
9836 -
9837 - if os.path.exists(self.settings["snapshot_path"]):
9838 - self.settings["snapshot_path_hash"]=\
9839 - generate_hash(self.settings["snapshot_path"],\
9840 - hash_function=self.settings["hash_function"],verbose=False)
9841 - else:
9842 - self.settings["snapshot_path"]=normpath(self.settings["storedir"]+\
9843 - "/snapshots/" + self.settings["snapshot_name"] +
9844 - self.settings["snapshot"]+".tar.bz2")
9845 -
9846 - if os.path.exists(self.settings["snapshot_path"]):
9847 - self.settings["snapshot_path_hash"]=\
9848 - generate_hash(self.settings["snapshot_path"],\
9849 - hash_function=self.settings["hash_function"],verbose=False)
9850 -
9851 - def set_snapcache_path(self):
9852 - if "SNAPCACHE" in self.settings:
9853 - self.settings["snapshot_cache_path"]=\
9854 - normpath(self.settings["snapshot_cache"]+"/"+\
9855 - self.settings["snapshot"]+"/")
9856 - self.snapcache_lock=\
9857 - catalyst_lock.LockDir(self.settings["snapshot_cache_path"])
9858 - print "Caching snapshot to "+self.settings["snapshot_cache_path"]
9859 -
9860 - def set_chroot_path(self):
9861 - """
9862 - NOTE: the trailing slash is very important!
9863 - Things *will* break without it!
9864 - """
9865 - self.settings["chroot_path"]=normpath(self.settings["storedir"]+\
9866 - "/tmp/"+self.settings["target_subpath"]+"/")
9867 - self.chroot_lock=catalyst_lock.LockDir(self.settings["chroot_path"])
9868 -
9869 - def set_autoresume_path(self):
9870 - self.settings["autoresume_path"]=normpath(self.settings["storedir"]+\
9871 - "/tmp/"+self.settings["rel_type"]+"/"+".autoresume-"+\
9872 - self.settings["target"]+"-"+self.settings["subarch"]+"-"+\
9873 - self.settings["version_stamp"]+"/")
9874 - if "AUTORESUME" in self.settings:
9875 - print "The autoresume path is " + self.settings["autoresume_path"]
9876 - if not os.path.exists(self.settings["autoresume_path"]):
9877 - os.makedirs(self.settings["autoresume_path"],0755)
9878 -
9879 - def set_controller_file(self):
9880 - self.settings["controller_file"]=normpath(self.settings["sharedir"]+\
9881 - "/targets/"+self.settings["target"]+"/"+self.settings["target"]+\
9882 - "-controller.sh")
9883 -
9884 - def set_iso_volume_id(self):
9885 - if self.settings["spec_prefix"]+"/volid" in self.settings:
9886 - self.settings["iso_volume_id"]=\
9887 - self.settings[self.settings["spec_prefix"]+"/volid"]
9888 - if len(self.settings["iso_volume_id"])>32:
9889 - raise CatalystError,\
9890 - "ISO volume ID must not exceed 32 characters."
9891 - else:
9892 - self.settings["iso_volume_id"]="catalyst "+self.settings["snapshot"]
9893 -
9894 - def set_action_sequence(self):
9895 - """ Default action sequence for run method """
9896 - self.settings["action_sequence"]=["unpack","unpack_snapshot",\
9897 - "setup_confdir","portage_overlay",\
9898 - "base_dirs","bind","chroot_setup","setup_environment",\
9899 - "run_local","preclean","unbind","clean"]
9900 -# if "TARBALL" in self.settings or \
9901 -# "FETCH" not in self.settings:
9902 - if "FETCH" not in self.settings:
9903 - self.settings["action_sequence"].append("capture")
9904 - self.settings["action_sequence"].append("clear_autoresume")
9905 -
9906 - def set_use(self):
9907 - if self.settings["spec_prefix"]+"/use" in self.settings:
9908 - self.settings["use"]=\
9909 - self.settings[self.settings["spec_prefix"]+"/use"]
9910 - del self.settings[self.settings["spec_prefix"]+"/use"]
9911 - if "use" not in self.settings:
9912 - self.settings["use"]=""
9913 - if type(self.settings["use"])==types.StringType:
9914 - self.settings["use"]=self.settings["use"].split()
9915 -
9916 - # Force bindist when options ask for it
9917 - if "BINDIST" in self.settings:
9918 - self.settings["use"].append("bindist")
9919 -
9920 - def set_stage_path(self):
9921 - self.settings["stage_path"]=normpath(self.settings["chroot_path"])
9922 -
9923 - def set_mounts(self):
9924 - pass
9925 -
9926 - def set_packages(self):
9927 - pass
9928 -
9929 - def set_rm(self):
9930 - if self.settings["spec_prefix"]+"/rm" in self.settings:
9931 - if type(self.settings[self.settings["spec_prefix"]+\
9932 - "/rm"])==types.StringType:
9933 - self.settings[self.settings["spec_prefix"]+"/rm"]=\
9934 - self.settings[self.settings["spec_prefix"]+"/rm"].split()
9935 -
9936 - def set_linuxrc(self):
9937 - if self.settings["spec_prefix"]+"/linuxrc" in self.settings:
9938 - if type(self.settings[self.settings["spec_prefix"]+\
9939 - "/linuxrc"])==types.StringType:
9940 - self.settings["linuxrc"]=\
9941 - self.settings[self.settings["spec_prefix"]+"/linuxrc"]
9942 - del self.settings[self.settings["spec_prefix"]+"/linuxrc"]
9943 -
9944 - def set_busybox_config(self):
9945 - if self.settings["spec_prefix"]+"/busybox_config" in self.settings:
9946 - if type(self.settings[self.settings["spec_prefix"]+\
9947 - "/busybox_config"])==types.StringType:
9948 - self.settings["busybox_config"]=\
9949 - self.settings[self.settings["spec_prefix"]+"/busybox_config"]
9950 - del self.settings[self.settings["spec_prefix"]+"/busybox_config"]
9951 -
9952 - def set_portage_overlay(self):
9953 - if "portage_overlay" in self.settings:
9954 - if type(self.settings["portage_overlay"])==types.StringType:
9955 - self.settings["portage_overlay"]=\
9956 - self.settings["portage_overlay"].split()
9957 - print "portage_overlay directories are set to: \""+\
9958 - string.join(self.settings["portage_overlay"])+"\""
9959 -
9960 - def set_overlay(self):
9961 - if self.settings["spec_prefix"]+"/overlay" in self.settings:
9962 - if type(self.settings[self.settings["spec_prefix"]+\
9963 - "/overlay"])==types.StringType:
9964 - self.settings[self.settings["spec_prefix"]+"/overlay"]=\
9965 - self.settings[self.settings["spec_prefix"]+\
9966 - "/overlay"].split()
9967 -
9968 - def set_root_overlay(self):
9969 - if self.settings["spec_prefix"]+"/root_overlay" in self.settings:
9970 - if type(self.settings[self.settings["spec_prefix"]+\
9971 - "/root_overlay"])==types.StringType:
9972 - self.settings[self.settings["spec_prefix"]+"/root_overlay"]=\
9973 - self.settings[self.settings["spec_prefix"]+\
9974 - "/root_overlay"].split()
9975 -
9976 - def set_root_path(self):
9977 - """ ROOT= variable for emerges """
9978 - self.settings["root_path"]="/"
9979 -
9980 - def set_valid_build_kernel_vars(self,addlargs):
9981 - if "boot/kernel" in addlargs:
9982 - if type(addlargs["boot/kernel"])==types.StringType:
9983 - loopy=[addlargs["boot/kernel"]]
9984 - else:
9985 - loopy=addlargs["boot/kernel"]
9986 -
9987 - for x in loopy:
9988 - self.valid_values.append("boot/kernel/"+x+"/aliases")
9989 - self.valid_values.append("boot/kernel/"+x+"/config")
9990 - self.valid_values.append("boot/kernel/"+x+"/console")
9991 - self.valid_values.append("boot/kernel/"+x+"/extraversion")
9992 - self.valid_values.append("boot/kernel/"+x+"/gk_action")
9993 - self.valid_values.append("boot/kernel/"+x+"/gk_kernargs")
9994 - self.valid_values.append("boot/kernel/"+x+"/initramfs_overlay")
9995 - self.valid_values.append("boot/kernel/"+x+"/machine_type")
9996 - self.valid_values.append("boot/kernel/"+x+"/sources")
9997 - self.valid_values.append("boot/kernel/"+x+"/softlevel")
9998 - self.valid_values.append("boot/kernel/"+x+"/use")
9999 - self.valid_values.append("boot/kernel/"+x+"/packages")
10000 - if "boot/kernel/"+x+"/packages" in addlargs:
10001 - if type(addlargs["boot/kernel/"+x+\
10002 - "/packages"])==types.StringType:
10003 - addlargs["boot/kernel/"+x+"/packages"]=\
10004 - [addlargs["boot/kernel/"+x+"/packages"]]
10005 -
10006 - def set_build_kernel_vars(self):
10007 - if self.settings["spec_prefix"]+"/gk_mainargs" in self.settings:
10008 - self.settings["gk_mainargs"]=\
10009 - self.settings[self.settings["spec_prefix"]+"/gk_mainargs"]
10010 - del self.settings[self.settings["spec_prefix"]+"/gk_mainargs"]
10011 -
10012 - def kill_chroot_pids(self):
10013 - print "Checking for processes running in chroot and killing them."
10014 -
10015 - """
10016 - Force environment variables to be exported so script can see them
10017 - """
10018 - self.setup_environment()
10019 -
10020 - if os.path.exists(self.settings["sharedir"]+\
10021 - "/targets/support/kill-chroot-pids.sh"):
10022 - cmd("/bin/bash "+self.settings["sharedir"]+\
10023 - "/targets/support/kill-chroot-pids.sh",\
10024 - "kill-chroot-pids script failed.",env=self.env)
10025 -
10026 - def mount_safety_check(self):
10027 - mypath=self.settings["chroot_path"]
10028 -
10029 - """
10030 - Check and verify that none of our paths in mypath are mounted. We don't
10031 - want to clean up with things still mounted, and this allows us to check.
10032 - Returns 1 on ok, 0 on "something is still mounted" case.
10033 - """
10034 -
10035 - if not os.path.exists(mypath):
10036 - return
10037 -
10038 - for x in self.mounts:
10039 - if not os.path.exists(mypath + self.mountmap[x]):
10040 - continue
10041 -
10042 - if ismount(mypath +self.mountmap[x]):
10043 - """ Something is still mounted "" """
10044 - try:
10045 - print self.mountmap[x] + " is still mounted; performing auto-bind-umount...",
10046 - """ Try to umount stuff ourselves """
10047 - self.unbind()
10048 - if ismount(mypath + self.mountmap[x]):
10049 - raise CatalystError, "Auto-unbind failed for " + self.mountmap[x]
10050 - else:
10051 - print "Auto-unbind successful..."
10052 - except CatalystError:
10053 - raise CatalystError, "Unable to auto-unbind " + self.mountmap[x]
10054 -
10055 - def unpack(self):
10056 - unpack=True
10057 -
10058 - clst_unpack_hash=read_from_clst(self.settings["autoresume_path"]+\
10059 - "unpack")
10060 -
10061 - if "SEEDCACHE" in self.settings:
10062 - if os.path.isdir(self.settings["source_path"]):
10063 - """ SEEDCACHE Is a directory, use rsync """
10064 - unpack_cmd="rsync -a --delete "+self.settings["source_path"]+\
10065 - " "+self.settings["chroot_path"]
10066 - display_msg="\nStarting rsync from "+\
10067 - self.settings["source_path"]+"\nto "+\
10068 - self.settings["chroot_path"]+\
10069 - " (This may take some time) ...\n"
10070 - error_msg="Rsync of "+self.settings["source_path"]+" to "+\
10071 - self.settings["chroot_path"]+" failed."
10072 - else:
10073 - """ SEEDCACHE is a not a directory, try untar'ing """
10074 - print "Referenced SEEDCACHE does not appear to be a directory, trying to untar..."
10075 - display_msg="\nStarting tar extract from "+\
10076 - self.settings["source_path"]+"\nto "+\
10077 - self.settings["chroot_path"]+\
10078 - " (This may take some time) ...\n"
10079 - if "bz2" == self.settings["chroot_path"][-3:]:
10080 - unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
10081 - self.settings["chroot_path"]
10082 - else:
10083 - unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
10084 - self.settings["chroot_path"]
10085 - error_msg="Tarball extraction of "+\
10086 - self.settings["source_path"]+" to "+\
10087 - self.settings["chroot_path"]+" failed."
10088 - else:
10089 - """ No SEEDCACHE, use tar """
10090 - display_msg="\nStarting tar extract from "+\
10091 - self.settings["source_path"]+"\nto "+\
10092 - self.settings["chroot_path"]+\
10093 - " (This may take some time) ...\n"
10094 - if "bz2" == self.settings["chroot_path"][-3:]:
10095 - unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
10096 - self.settings["chroot_path"]
10097 - else:
10098 - unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
10099 - self.settings["chroot_path"]
10100 - error_msg="Tarball extraction of "+self.settings["source_path"]+\
10101 - " to "+self.settings["chroot_path"]+" failed."
10102 -
10103 - if "AUTORESUME" in self.settings:
10104 - if os.path.isdir(self.settings["source_path"]) \
10105 - and os.path.exists(self.settings["autoresume_path"]+"unpack"):
10106 - """ Autoresume is valid, SEEDCACHE is valid """
10107 - unpack=False
10108 - invalid_snapshot=False
10109 -
10110 - elif os.path.isfile(self.settings["source_path"]) \
10111 - and self.settings["source_path_hash"]==clst_unpack_hash:
10112 - """ Autoresume is valid, tarball is valid """
10113 - unpack=False
10114 - invalid_snapshot=True
10115 -
10116 - elif os.path.isdir(self.settings["source_path"]) \
10117 - and not os.path.exists(self.settings["autoresume_path"]+\
10118 - "unpack"):
10119 - """ Autoresume is invalid, SEEDCACHE """
10120 - unpack=True
10121 - invalid_snapshot=False
10122 -
10123 - elif os.path.isfile(self.settings["source_path"]) \
10124 - and self.settings["source_path_hash"]!=clst_unpack_hash:
10125 - """ Autoresume is invalid, tarball """
10126 - unpack=True
10127 - invalid_snapshot=True
10128 - else:
10129 - """ No autoresume, SEEDCACHE """
10130 - if "SEEDCACHE" in self.settings:
10131 - """ SEEDCACHE so let's run rsync and let it clean up """
10132 - if os.path.isdir(self.settings["source_path"]):
10133 - unpack=True
10134 - invalid_snapshot=False
10135 - elif os.path.isfile(self.settings["source_path"]):
10136 - """ Tarball so unpack and remove anything already there """
10137 - unpack=True
10138 - invalid_snapshot=True
10139 - """ No autoresume, no SEEDCACHE """
10140 - else:
10141 - """ Tarball so unpack and remove anything already there """
10142 - if os.path.isfile(self.settings["source_path"]):
10143 - unpack=True
10144 - invalid_snapshot=True
10145 - elif os.path.isdir(self.settings["source_path"]):
10146 - """ We should never reach this, so something is very wrong """
10147 - raise CatalystError,\
10148 - "source path is a dir but seedcache is not enabled"
10149 -
10150 - if unpack:
10151 - self.mount_safety_check()
10152 -
10153 - if invalid_snapshot:
10154 - if "AUTORESUME" in self.settings:
10155 - print "No Valid Resume point detected, cleaning up..."
10156 -
10157 - self.clear_autoresume()
10158 - self.clear_chroot()
10159 -
10160 - if not os.path.exists(self.settings["chroot_path"]):
10161 - os.makedirs(self.settings["chroot_path"])
10162 -
10163 - if not os.path.exists(self.settings["chroot_path"]+"/tmp"):
10164 - os.makedirs(self.settings["chroot_path"]+"/tmp",1777)
10165 -
10166 - if "PKGCACHE" in self.settings:
10167 - if not os.path.exists(self.settings["pkgcache_path"]):
10168 - os.makedirs(self.settings["pkgcache_path"],0755)
10169 -
10170 - if "KERNCACHE" in self.settings:
10171 - if not os.path.exists(self.settings["kerncache_path"]):
10172 - os.makedirs(self.settings["kerncache_path"],0755)
10173 -
10174 - print display_msg
10175 - cmd(unpack_cmd,error_msg,env=self.env)
10176 -
10177 - if "source_path_hash" in self.settings:
10178 - myf=open(self.settings["autoresume_path"]+"unpack","w")
10179 - myf.write(self.settings["source_path_hash"])
10180 - myf.close()
10181 - else:
10182 - touch(self.settings["autoresume_path"]+"unpack")
10183 - else:
10184 - print "Resume point detected, skipping unpack operation..."
10185 -
10186 - def unpack_snapshot(self):
10187 - unpack=True
10188 - snapshot_hash=read_from_clst(self.settings["autoresume_path"]+\
10189 - "unpack_portage")
10190 -
10191 - if "SNAPCACHE" in self.settings:
10192 - snapshot_cache_hash=\
10193 - read_from_clst(self.settings["snapshot_cache_path"]+\
10194 - "catalyst-hash")
10195 - destdir=self.settings["snapshot_cache_path"]
10196 - if "bz2" == self.settings["chroot_path"][-3:]:
10197 - unpack_cmd="tar -I lbzip2 -xpf "+self.settings["snapshot_path"]+" -C "+destdir
10198 - else:
10199 - unpack_cmd="tar xpf "+self.settings["snapshot_path"]+" -C "+destdir
10200 - unpack_errmsg="Error unpacking snapshot"
10201 - cleanup_msg="Cleaning up invalid snapshot cache at \n\t"+\
10202 - self.settings["snapshot_cache_path"]+\
10203 - " (This can take a long time)..."
10204 - cleanup_errmsg="Error removing existing snapshot cache directory."
10205 - self.snapshot_lock_object=self.snapcache_lock
10206 -
10207 - if self.settings["snapshot_path_hash"]==snapshot_cache_hash:
10208 - print "Valid snapshot cache, skipping unpack of portage tree..."
10209 - unpack=False
10210 - else:
10211 - destdir=normpath(self.settings["chroot_path"] + self.settings["portdir"])
10212 - cleanup_errmsg="Error removing existing snapshot directory."
10213 - cleanup_msg=\
10214 - "Cleaning up existing portage tree (This can take a long time)..."
10215 - if "bz2" == self.settings["chroot_path"][-3:]:
10216 - unpack_cmd="tar -I lbzip2 -xpf "+self.settings["snapshot_path"]+" -C "+\
10217 - self.settings["chroot_path"]+"/usr"
10218 - else:
10219 - unpack_cmd="tar xpf "+self.settings["snapshot_path"]+" -C "+\
10220 - self.settings["chroot_path"]+"/usr"
10221 - unpack_errmsg="Error unpacking snapshot"
10222 -
10223 - if "AUTORESUME" in self.settings \
10224 - and os.path.exists(self.settings["chroot_path"]+\
10225 - self.settings["portdir"]) \
10226 - and os.path.exists(self.settings["autoresume_path"]\
10227 - +"unpack_portage") \
10228 - and self.settings["snapshot_path_hash"] == snapshot_hash:
10229 - print \
10230 - "Valid Resume point detected, skipping unpack of portage tree..."
10231 - unpack=False
10232 -
10233 - if unpack:
10234 - if "SNAPCACHE" in self.settings:
10235 - self.snapshot_lock_object.write_lock()
10236 - if os.path.exists(destdir):
10237 - print cleanup_msg
10238 - cleanup_cmd="rm -rf "+destdir
10239 - cmd(cleanup_cmd,cleanup_errmsg,env=self.env)
10240 - if not os.path.exists(destdir):
10241 - os.makedirs(destdir,0755)
10242 -
10243 - print "Unpacking portage tree (This can take a long time) ..."
10244 - cmd(unpack_cmd,unpack_errmsg,env=self.env)
10245 -
10246 - if "SNAPCACHE" in self.settings:
10247 - myf=open(self.settings["snapshot_cache_path"]+"catalyst-hash","w")
10248 - myf.write(self.settings["snapshot_path_hash"])
10249 - myf.close()
10250 - else:
10251 - print "Setting snapshot autoresume point"
10252 - myf=open(self.settings["autoresume_path"]+"unpack_portage","w")
10253 - myf.write(self.settings["snapshot_path_hash"])
10254 - myf.close()
10255 -
10256 - if "SNAPCACHE" in self.settings:
10257 - self.snapshot_lock_object.unlock()
10258 -
10259 - def config_profile_link(self):
10260 - if "AUTORESUME" in self.settings \
10261 - and os.path.exists(self.settings["autoresume_path"]+\
10262 - "config_profile_link"):
10263 - print \
10264 - "Resume point detected, skipping config_profile_link operation..."
10265 - else:
10266 - # TODO: zmedico and I discussed making this a directory and pushing
10267 - # in a parent file, as well as other user-specified configuration.
10268 - print "Configuring profile link..."
10269 - cmd("rm -f "+self.settings["chroot_path"]+"/etc/portage/make.profile",\
10270 - "Error zapping profile link",env=self.env)
10271 - cmd("mkdir -p "+self.settings["chroot_path"]+"/etc/portage/")
10272 - cmd("ln -sf ../.." + self.settings["portdir"] + "/profiles/"+\
10273 - self.settings["target_profile"]+" "+\
10274 - self.settings["chroot_path"]+"/etc/portage/make.profile",\
10275 - "Error creating profile link",env=self.env)
10276 - touch(self.settings["autoresume_path"]+"config_profile_link")
10277 -
10278 - def setup_confdir(self):
10279 - if "AUTORESUME" in self.settings \
10280 - and os.path.exists(self.settings["autoresume_path"]+\
10281 - "setup_confdir"):
10282 - print "Resume point detected, skipping setup_confdir operation..."
10283 - else:
10284 - if "portage_confdir" in self.settings:
10285 - print "Configuring /etc/portage..."
10286 - cmd("rsync -a "+self.settings["portage_confdir"]+"/ "+\
10287 - self.settings["chroot_path"]+"/etc/portage/",\
10288 - "Error copying /etc/portage",env=self.env)
10289 - touch(self.settings["autoresume_path"]+"setup_confdir")
10290 -
10291 - def portage_overlay(self):
10292 - """ We copy the contents of our overlays to /usr/local/portage """
10293 - if "portage_overlay" in self.settings:
10294 - for x in self.settings["portage_overlay"]:
10295 - if os.path.exists(x):
10296 - print "Copying overlay dir " +x
10297 - cmd("mkdir -p "+self.settings["chroot_path"]+\
10298 - self.settings["local_overlay"],\
10299 - "Could not make portage_overlay dir",env=self.env)
10300 - cmd("cp -R "+x+"/* "+self.settings["chroot_path"]+\
10301 - self.settings["local_overlay"],\
10302 - "Could not copy portage_overlay",env=self.env)
10303 -
10304 - def root_overlay(self):
10305 - """ Copy over the root_overlay """
10306 - if self.settings["spec_prefix"]+"/root_overlay" in self.settings:
10307 - for x in self.settings[self.settings["spec_prefix"]+\
10308 - "/root_overlay"]:
10309 - if os.path.exists(x):
10310 - print "Copying root_overlay: "+x
10311 - cmd("rsync -a "+x+"/ "+\
10312 - self.settings["chroot_path"],\
10313 - self.settings["spec_prefix"]+"/root_overlay: "+x+\
10314 - " copy failed.",env=self.env)
10315 -
10316 - def base_dirs(self):
10317 - pass
10318 -
10319 - def bind(self):
10320 - for x in self.mounts:
10321 - if not os.path.exists(self.settings["chroot_path"] + self.mountmap[x]):
10322 - os.makedirs(self.settings["chroot_path"]+x,0755)
10323 -
10324 - if not os.path.exists(self.mountmap[x]):
10325 - if not self.mountmap[x] == "tmpfs":
10326 - os.makedirs(self.mountmap[x],0755)
10327 -
10328 - src=self.mountmap[x]
10329 - if "SNAPCACHE" in self.settings and x == "/usr/portage":
10330 - self.snapshot_lock_object.read_lock()
10331 - if os.uname()[0] == "FreeBSD":
10332 - if src == "/dev":
10333 - retval=os.system("mount -t devfs none " +
10334 - self.settings["chroot_path"] + src)
10335 - else:
10336 - retval=os.system("mount_nullfs " + src + " " +
10337 - self.settings["chroot_path"] + src)
10338 - else:
10339 - if src == "tmpfs":
10340 - if "var_tmpfs_portage" in self.settings:
10341 - retval=os.system("mount -t tmpfs -o size="+\
10342 - self.settings["var_tmpfs_portage"]+"G "+src+" "+\
10343 - self.settings["chroot_path"]+x)
10344 - else:
10345 - retval=os.system("mount --bind " + src + " " +
10346 - self.settings["chroot_path"] + src)
10347 - if retval!=0:
10348 - self.unbind()
10349 - raise CatalystError,"Couldn't bind mount " + src
10350 -
10351 - def unbind(self):
10352 - ouch=0
10353 - mypath=self.settings["chroot_path"]
10354 - myrevmounts=self.mounts[:]
10355 - myrevmounts.reverse()
10356 - """ Unmount in reverse order for nested bind-mounts """
10357 - for x in myrevmounts:
10358 - if not os.path.exists(mypath + self.mountmap[x]):
10359 - continue
10360 -
10361 - if not ismount(mypath + self.mountmap[x]):
10362 - continue
10363 -
10364 - retval=os.system("umount "+\
10365 - os.path.join(mypath, self.mountmap[x].lstrip(os.path.sep)))
10366 -
10367 - if retval!=0:
10368 - warn("First attempt to unmount: " + mypath +
10369 - self.mountmap[x] +" failed.")
10370 - warn("Killing any pids still running in the chroot")
10371 -
10372 - self.kill_chroot_pids()
10373 -
10374 - retval2=os.system("umount " + mypath + self.mountmap[x])
10375 - if retval2!=0:
10376 - ouch=1
10377 - warn("Couldn't umount bind mount: " + mypath + self.mountmap[x])
10378 -
10379 - if "SNAPCACHE" in self.settings and x == "/usr/portage":
10380 - try:
10381 - """
10382 - It's possible the snapshot lock object isn't created yet.
10383 - This is because mount safety check calls unbind before the
10384 - target is fully initialized
10385 - """
10386 - self.snapshot_lock_object.unlock()
10387 - except:
10388 - pass
10389 - if ouch:
10390 - """
10391 - if any bind mounts really failed, then we need to raise
10392 - this to potentially prevent an upcoming bash stage cleanup script
10393 - from wiping our bind mounts.
10394 - """
10395 - raise CatalystError,\
10396 - "Couldn't umount one or more bind-mounts; aborting for safety."
10397 -
10398 - def chroot_setup(self):
10399 - self.makeconf=read_makeconf(self.settings["chroot_path"]+\
10400 - "/etc/portage/make.conf")
10401 - self.override_cbuild()
10402 - self.override_chost()
10403 - self.override_cflags()
10404 - self.override_cxxflags()
10405 - self.override_ldflags()
10406 - if "AUTORESUME" in self.settings \
10407 - and os.path.exists(self.settings["autoresume_path"]+"chroot_setup"):
10408 - print "Resume point detected, skipping chroot_setup operation..."
10409 - else:
10410 - print "Setting up chroot..."
10411 -
10412 - #self.makeconf=read_makeconf(self.settings["chroot_path"]+"/etc/portage/make.conf")
10413 -
10414 - cmd("cp /etc/resolv.conf "+self.settings["chroot_path"]+"/etc",\
10415 - "Could not copy resolv.conf into place.",env=self.env)
10416 -
10417 - """ Copy over the envscript, if applicable """
10418 - if "ENVSCRIPT" in self.settings:
10419 - if not os.path.exists(self.settings["ENVSCRIPT"]):
10420 - raise CatalystError,\
10421 - "Can't find envscript "+self.settings["ENVSCRIPT"]
10422 -
10423 - print "\nWarning!!!!"
10424 - print "\tOverriding certain env variables may cause catastrophic failure."
10425 - print "\tIf your build fails look here first as the possible problem."
10426 - print "\tCatalyst assumes you know what you are doing when setting"
10427 - print "\t\tthese variables."
10428 - print "\tCatalyst Maintainers use VERY minimal envscripts if used at all"
10429 - print "\tYou have been warned\n"
10430 -
10431 - cmd("cp "+self.settings["ENVSCRIPT"]+" "+\
10432 - self.settings["chroot_path"]+"/tmp/envscript",\
10433 - "Could not copy envscript into place.",env=self.env)
10434 -
10435 - """
10436 - Copy over /etc/hosts from the host in case there are any
10437 - specialties in there
10438 - """
10439 - if os.path.exists(self.settings["chroot_path"]+"/etc/hosts"):
10440 - cmd("mv "+self.settings["chroot_path"]+"/etc/hosts "+\
10441 - self.settings["chroot_path"]+"/etc/hosts.catalyst",\
10442 - "Could not backup /etc/hosts",env=self.env)
10443 - cmd("cp /etc/hosts "+self.settings["chroot_path"]+"/etc/hosts",\
10444 - "Could not copy /etc/hosts",env=self.env)
10445 -
10446 - """ Modify and write out make.conf (for the chroot) """
10447 - cmd("rm -f "+self.settings["chroot_path"]+"/etc/portage/make.conf",\
10448 - "Could not remove "+self.settings["chroot_path"]+\
10449 - "/etc/portage/make.conf",env=self.env)
10450 - myf=open(self.settings["chroot_path"]+"/etc/portage/make.conf","w")
10451 - myf.write("# These settings were set by the catalyst build script that automatically\n# built this stage.\n")
10452 - myf.write("# Please consult /usr/share/portage/config/make.conf.example for a more\n# detailed example.\n")
10453 - if "CFLAGS" in self.settings:
10454 - myf.write('CFLAGS="'+self.settings["CFLAGS"]+'"\n')
10455 - if "CXXFLAGS" in self.settings:
10456 - if self.settings["CXXFLAGS"]!=self.settings["CFLAGS"]:
10457 - myf.write('CXXFLAGS="'+self.settings["CXXFLAGS"]+'"\n')
10458 - else:
10459 - myf.write('CXXFLAGS="${CFLAGS}"\n')
10460 - else:
10461 - myf.write('CXXFLAGS="${CFLAGS}"\n')
10462 -
10463 - if "LDFLAGS" in self.settings:
10464 - myf.write("# LDFLAGS is unsupported. USE AT YOUR OWN RISK!\n")
10465 - myf.write('LDFLAGS="'+self.settings["LDFLAGS"]+'"\n')
10466 - if "CBUILD" in self.settings:
10467 - myf.write("# This should not be changed unless you know exactly what you are doing. You\n# should probably be using a different stage, instead.\n")
10468 - myf.write('CBUILD="'+self.settings["CBUILD"]+'"\n')
10469 -
10470 - myf.write("# WARNING: Changing your CHOST is not something that should be done lightly.\n# Please consult http://www.gentoo.org/doc/en/change-chost.xml before changing.\n")
10471 - myf.write('CHOST="'+self.settings["CHOST"]+'"\n')
10472 -
10473 - """ Figure out what our USE vars are for building """
10474 - myusevars=[]
10475 - if "HOSTUSE" in self.settings:
10476 - myusevars.extend(self.settings["HOSTUSE"])
10477 -
10478 - if "use" in self.settings:
10479 - myusevars.extend(self.settings["use"])
10480 -
10481 - if myusevars:
10482 - myf.write("# These are the USE flags that were used in addition to what is provided by the\n# profile used for building.\n")
10483 - myusevars = sorted(set(myusevars))
10484 - myf.write('USE="'+string.join(myusevars)+'"\n')
10485 - if '-*' in myusevars:
10486 - print "\nWarning!!! "
10487 - print "\tThe use of -* in "+self.settings["spec_prefix"]+\
10488 - "/use will cause portage to ignore"
10489 - print "\tpackage.use in the profile and portage_confdir. You've been warned!"
10490 -
10491 - myf.write('PORTDIR="%s"\n' % self.settings['portdir'])
10492 - myf.write('DISTDIR="%s"\n' % self.settings['distdir'])
10493 - myf.write('PKGDIR="%s"\n' % self.settings['packagedir'])
10494 -
10495 - """ Setup the portage overlay """
10496 - if "portage_overlay" in self.settings:
10497 - myf.write('PORTDIR_OVERLAY="/usr/local/portage"\n')
10498 -
10499 - myf.close()
10500 - cmd("cp "+self.settings["chroot_path"]+"/etc/portage/make.conf "+\
10501 - self.settings["chroot_path"]+"/etc/portage/make.conf.catalyst",\
10502 - "Could not backup /etc/portage/make.conf",env=self.env)
10503 - touch(self.settings["autoresume_path"]+"chroot_setup")
10504 -
10505 - def fsscript(self):
10506 - if "AUTORESUME" in self.settings \
10507 - and os.path.exists(self.settings["autoresume_path"]+"fsscript"):
10508 - print "Resume point detected, skipping fsscript operation..."
10509 - else:
10510 - if "fsscript" in self.settings:
10511 - if os.path.exists(self.settings["controller_file"]):
10512 - cmd("/bin/bash "+self.settings["controller_file"]+\
10513 - " fsscript","fsscript script failed.",env=self.env)
10514 - touch(self.settings["autoresume_path"]+"fsscript")
10515 -
10516 - def rcupdate(self):
10517 - if "AUTORESUME" in self.settings \
10518 - and os.path.exists(self.settings["autoresume_path"]+"rcupdate"):
10519 - print "Resume point detected, skipping rcupdate operation..."
10520 - else:
10521 - if os.path.exists(self.settings["controller_file"]):
10522 - cmd("/bin/bash "+self.settings["controller_file"]+" rc-update",\
10523 - "rc-update script failed.",env=self.env)
10524 - touch(self.settings["autoresume_path"]+"rcupdate")
10525 -
10526 - def clean(self):
10527 - if "AUTORESUME" in self.settings \
10528 - and os.path.exists(self.settings["autoresume_path"]+"clean"):
10529 - print "Resume point detected, skipping clean operation..."
10530 - else:
10531 - for x in self.settings["cleanables"]:
10532 - print "Cleaning chroot: "+x+"... "
10533 - cmd("rm -rf "+self.settings["destpath"]+x,"Couldn't clean "+\
10534 - x,env=self.env)
10535 -
10536 - """ Put /etc/hosts back into place """
10537 - if os.path.exists(self.settings["chroot_path"]+"/etc/hosts.catalyst"):
10538 - cmd("mv -f "+self.settings["chroot_path"]+"/etc/hosts.catalyst "+\
10539 - self.settings["chroot_path"]+"/etc/hosts",\
10540 - "Could not replace /etc/hosts",env=self.env)
10541 -
10542 - """ Remove our overlay """
10543 - if os.path.exists(self.settings["chroot_path"] + self.settings["local_overlay"]):
10544 - cmd("rm -rf " + self.settings["chroot_path"] + self.settings["local_overlay"],
10545 - "Could not remove " + self.settings["local_overlay"], env=self.env)
10546 - cmd("sed -i '/^PORTDIR_OVERLAY/d' "+self.settings["chroot_path"]+\
10547 - "/etc/portage/make.conf",\
10548 - "Could not remove PORTDIR_OVERLAY from make.conf",env=self.env)
10549 -
10550 - """ Clean up old and obsoleted files in /etc """
10551 - if os.path.exists(self.settings["stage_path"]+"/etc"):
10552 - cmd("find "+self.settings["stage_path"]+\
10553 - "/etc -maxdepth 1 -name \"*-\" | xargs rm -f",\
10554 - "Could not remove stray files in /etc",env=self.env)
10555 -
10556 - if os.path.exists(self.settings["controller_file"]):
10557 - cmd("/bin/bash "+self.settings["controller_file"]+" clean",\
10558 - "clean script failed.",env=self.env)
10559 - touch(self.settings["autoresume_path"]+"clean")
10560 -
10561 - def empty(self):
10562 - if "AUTORESUME" in self.settings \
10563 - and os.path.exists(self.settings["autoresume_path"]+"empty"):
10564 - print "Resume point detected, skipping empty operation..."
10565 - else:
10566 - if self.settings["spec_prefix"]+"/empty" in self.settings:
10567 - if type(self.settings[self.settings["spec_prefix"]+\
10568 - "/empty"])==types.StringType:
10569 - self.settings[self.settings["spec_prefix"]+"/empty"]=\
10570 - self.settings[self.settings["spec_prefix"]+\
10571 - "/empty"].split()
10572 - for x in self.settings[self.settings["spec_prefix"]+"/empty"]:
10573 - myemp=self.settings["destpath"]+x
10574 - if not os.path.isdir(myemp) or os.path.islink(myemp):
10575 - print x,"not a directory or does not exist, skipping 'empty' operation."
10576 - continue
10577 - print "Emptying directory",x
10578 - """
10579 - stat the dir, delete the dir, recreate the dir and set
10580 - the proper perms and ownership
10581 - """
10582 - mystat=os.stat(myemp)
10583 - shutil.rmtree(myemp)
10584 - os.makedirs(myemp,0755)
10585 - os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
10586 - os.chmod(myemp,mystat[ST_MODE])
10587 - touch(self.settings["autoresume_path"]+"empty")
10588 -
10589 - def remove(self):
10590 - if "AUTORESUME" in self.settings \
10591 - and os.path.exists(self.settings["autoresume_path"]+"remove"):
10592 - print "Resume point detected, skipping remove operation..."
10593 - else:
10594 - if self.settings["spec_prefix"]+"/rm" in self.settings:
10595 - for x in self.settings[self.settings["spec_prefix"]+"/rm"]:
10596 - """
10597 - We're going to shell out for all these cleaning
10598 - operations, so we get easy glob handling.
10599 - """
10600 - print "livecd: removing "+x
10601 - os.system("rm -rf "+self.settings["chroot_path"]+x)
10602 - try:
10603 - if os.path.exists(self.settings["controller_file"]):
10604 - cmd("/bin/bash "+self.settings["controller_file"]+\
10605 - " clean","Clean failed.",env=self.env)
10606 - touch(self.settings["autoresume_path"]+"remove")
10607 - except:
10608 - self.unbind()
10609 - raise
10610 -
10611 - def preclean(self):
10612 - if "AUTORESUME" in self.settings \
10613 - and os.path.exists(self.settings["autoresume_path"]+"preclean"):
10614 - print "Resume point detected, skipping preclean operation..."
10615 - else:
10616 - try:
10617 - if os.path.exists(self.settings["controller_file"]):
10618 - cmd("/bin/bash "+self.settings["controller_file"]+\
10619 - " preclean","preclean script failed.",env=self.env)
10620 - touch(self.settings["autoresume_path"]+"preclean")
10621 -
10622 - except:
10623 - self.unbind()
10624 - raise CatalystError, "Build failed, could not execute preclean"
10625 -
10626 - def capture(self):
10627 - if "AUTORESUME" in self.settings \
10628 - and os.path.exists(self.settings["autoresume_path"]+"capture"):
10629 - print "Resume point detected, skipping capture operation..."
10630 - else:
10631 - """ Capture target in a tarball """
10632 - mypath=self.settings["target_path"].split("/")
10633 - """ Remove filename from path """
10634 - mypath=string.join(mypath[:-1],"/")
10635 -
10636 - """ Now make sure path exists """
10637 - if not os.path.exists(mypath):
10638 - os.makedirs(mypath)
10639 -
10640 - print "Creating stage tarball..."
10641 -
10642 - cmd("tar -I lbzip2 -cpf "+self.settings["target_path"]+" -C "+\
10643 - self.settings["stage_path"]+" .",\
10644 - "Couldn't create stage tarball",env=self.env)
10645 -
10646 - self.gen_contents_file(self.settings["target_path"])
10647 - self.gen_digest_file(self.settings["target_path"])
10648 -
10649 - touch(self.settings["autoresume_path"]+"capture")
10650 -
10651 - def run_local(self):
10652 - if "AUTORESUME" in self.settings \
10653 - and os.path.exists(self.settings["autoresume_path"]+"run_local"):
10654 - print "Resume point detected, skipping run_local operation..."
10655 - else:
10656 - try:
10657 - if os.path.exists(self.settings["controller_file"]):
10658 - cmd("/bin/bash "+self.settings["controller_file"]+" run",\
10659 - "run script failed.",env=self.env)
10660 - touch(self.settings["autoresume_path"]+"run_local")
10661 -
10662 - except CatalystError:
10663 - self.unbind()
10664 - raise CatalystError,"Stage build aborting due to error."
10665 -
10666 - def setup_environment(self):
10667 - """
10668 - Modify the current environment. This is an ugly hack that should be
10669 - fixed. We need this to use the os.system() call since we can't
10670 - specify our own environ
10671 - """
10672 - for x in self.settings.keys():
10673 - """ Sanitize var names by doing "s|/-.|_|g" """
10674 - varname="clst_"+string.replace(x,"/","_")
10675 - varname=string.replace(varname,"-","_")
10676 - varname=string.replace(varname,".","_")
10677 - if type(self.settings[x])==types.StringType:
10678 - """ Prefix to prevent namespace clashes """
10679 - #os.environ[varname]=self.settings[x]
10680 - self.env[varname]=self.settings[x]
10681 - elif type(self.settings[x])==types.ListType:
10682 - #os.environ[varname]=string.join(self.settings[x])
10683 - self.env[varname]=string.join(self.settings[x])
10684 - elif type(self.settings[x])==types.BooleanType:
10685 - if self.settings[x]:
10686 - self.env[varname]="true"
10687 - else:
10688 - self.env[varname]="false"
10689 - if "makeopts" in self.settings:
10690 - self.env["MAKEOPTS"]=self.settings["makeopts"]
10691 -
10692 - def run(self):
10693 - self.chroot_lock.write_lock()
10694 -
10695 - """ Kill any pids in the chroot "" """
10696 - self.kill_chroot_pids()
10697 -
10698 - """ Check for mounts right away and abort if we cannot unmount them """
10699 - self.mount_safety_check()
10700 -
10701 - if "CLEAR_AUTORESUME" in self.settings:
10702 - self.clear_autoresume()
10703 -
10704 - if "PURGETMPONLY" in self.settings:
10705 - self.purge()
10706 - return
10707 -
10708 - if "PURGEONLY" in self.settings:
10709 - self.purge()
10710 - return
10711 -
10712 - if "PURGE" in self.settings:
10713 - self.purge()
10714 -
10715 - for x in self.settings["action_sequence"]:
10716 - print "--- Running action sequence: "+x
10717 - sys.stdout.flush()
10718 - try:
10719 - apply(getattr(self,x))
10720 - except:
10721 - self.mount_safety_check()
10722 - raise
10723 -
10724 - self.chroot_lock.unlock()
10725 -
10726 - def unmerge(self):
10727 - if "AUTORESUME" in self.settings \
10728 - and os.path.exists(self.settings["autoresume_path"]+"unmerge"):
10729 - print "Resume point detected, skipping unmerge operation..."
10730 - else:
10731 - if self.settings["spec_prefix"]+"/unmerge" in self.settings:
10732 - if type(self.settings[self.settings["spec_prefix"]+\
10733 - "/unmerge"])==types.StringType:
10734 - self.settings[self.settings["spec_prefix"]+"/unmerge"]=\
10735 - [self.settings[self.settings["spec_prefix"]+"/unmerge"]]
10736 - myunmerge=\
10737 - self.settings[self.settings["spec_prefix"]+"/unmerge"][:]
10738 -
10739 - for x in range(0,len(myunmerge)):
10740 - """
10741 - Surround args with quotes for passing to bash, allows
10742 - things like "<" to remain intact
10743 - """
10744 - myunmerge[x]="'"+myunmerge[x]+"'"
10745 - myunmerge=string.join(myunmerge)
10746 -
10747 - """ Before cleaning, unmerge stuff """
10748 - try:
10749 - cmd("/bin/bash "+self.settings["controller_file"]+\
10750 - " unmerge "+ myunmerge,"Unmerge script failed.",\
10751 - env=self.env)
10752 - print "unmerge shell script"
10753 - except CatalystError:
10754 - self.unbind()
10755 - raise
10756 - touch(self.settings["autoresume_path"]+"unmerge")
10757 -
10758 - def target_setup(self):
10759 - if "AUTORESUME" in self.settings \
10760 - and os.path.exists(self.settings["autoresume_path"]+"target_setup"):
10761 - print "Resume point detected, skipping target_setup operation..."
10762 - else:
10763 - print "Setting up filesystems per filesystem type"
10764 - cmd("/bin/bash "+self.settings["controller_file"]+\
10765 - " target_image_setup "+ self.settings["target_path"],\
10766 - "target_image_setup script failed.",env=self.env)
10767 - touch(self.settings["autoresume_path"]+"target_setup")
10768 -
10769 - def setup_overlay(self):
10770 - if "AUTORESUME" in self.settings \
10771 - and os.path.exists(self.settings["autoresume_path"]+"setup_overlay"):
10772 - print "Resume point detected, skipping setup_overlay operation..."
10773 - else:
10774 - if self.settings["spec_prefix"]+"/overlay" in self.settings:
10775 - for x in self.settings[self.settings["spec_prefix"]+"/overlay"]:
10776 - if os.path.exists(x):
10777 - cmd("rsync -a "+x+"/ "+\
10778 - self.settings["target_path"],\
10779 - self.settings["spec_prefix"]+"overlay: "+x+\
10780 - " copy failed.",env=self.env)
10781 - touch(self.settings["autoresume_path"]+"setup_overlay")
10782 -
10783 - def create_iso(self):
10784 - if "AUTORESUME" in self.settings \
10785 - and os.path.exists(self.settings["autoresume_path"]+"create_iso"):
10786 - print "Resume point detected, skipping create_iso operation..."
10787 - else:
10788 - """ Create the ISO """
10789 - if "iso" in self.settings:
10790 - cmd("/bin/bash "+self.settings["controller_file"]+" iso "+\
10791 - self.settings["iso"],"ISO creation script failed.",\
10792 - env=self.env)
10793 - self.gen_contents_file(self.settings["iso"])
10794 - self.gen_digest_file(self.settings["iso"])
10795 - touch(self.settings["autoresume_path"]+"create_iso")
10796 - else:
10797 - print "WARNING: livecd/iso was not defined."
10798 - print "An ISO Image will not be created."
10799 -
10800 - def build_packages(self):
10801 - if "AUTORESUME" in self.settings \
10802 - and os.path.exists(self.settings["autoresume_path"]+\
10803 - "build_packages"):
10804 - print "Resume point detected, skipping build_packages operation..."
10805 - else:
10806 - if self.settings["spec_prefix"]+"/packages" in self.settings:
10807 - if "AUTORESUME" in self.settings \
10808 - and os.path.exists(self.settings["autoresume_path"]+\
10809 - "build_packages"):
10810 - print "Resume point detected, skipping build_packages operation..."
10811 - else:
10812 - mypack=\
10813 - list_bashify(self.settings[self.settings["spec_prefix"]\
10814 - +"/packages"])
10815 - try:
10816 - cmd("/bin/bash "+self.settings["controller_file"]+\
10817 - " build_packages "+mypack,\
10818 - "Error in attempt to build packages",env=self.env)
10819 - touch(self.settings["autoresume_path"]+"build_packages")
10820 - except CatalystError:
10821 - self.unbind()
10822 - raise CatalystError,self.settings["spec_prefix"]+\
10823 - "build aborting due to error."
10824 -
10825 - def build_kernel(self):
10826 - "Build all configured kernels"
10827 - if "AUTORESUME" in self.settings \
10828 - and os.path.exists(self.settings["autoresume_path"]+"build_kernel"):
10829 - print "Resume point detected, skipping build_kernel operation..."
10830 - else:
10831 - if "boot/kernel" in self.settings:
10832 - try:
10833 - mynames=self.settings["boot/kernel"]
10834 - if type(mynames)==types.StringType:
10835 - mynames=[mynames]
10836 - """
10837 - Execute the script that sets up the kernel build environment
10838 - """
10839 - cmd("/bin/bash "+self.settings["controller_file"]+\
10840 - " pre-kmerge ","Runscript pre-kmerge failed",\
10841 - env=self.env)
10842 - for kname in mynames:
10843 - self._build_kernel(kname=kname)
10844 - touch(self.settings["autoresume_path"]+"build_kernel")
10845 - except CatalystError:
10846 - self.unbind()
10847 - raise CatalystError,\
10848 - "build aborting due to kernel build error."
10849 -
10850 - def _build_kernel(self, kname):
10851 - "Build a single configured kernel by name"
10852 - if "AUTORESUME" in self.settings \
10853 - and os.path.exists(self.settings["autoresume_path"]\
10854 - +"build_kernel_"+kname):
10855 - print "Resume point detected, skipping build_kernel for "+kname+" operation..."
10856 - return
10857 - self._copy_kernel_config(kname=kname)
10858 -
10859 - """
10860 - If we need to pass special options to the bootloader
10861 - for this kernel put them into the environment
10862 - """
10863 - if "boot/kernel/"+kname+"/kernelopts" in self.settings:
10864 - myopts=self.settings["boot/kernel/"+kname+\
10865 - "/kernelopts"]
10866 -
10867 - if type(myopts) != types.StringType:
10868 - myopts = string.join(myopts)
10869 - self.env[kname+"_kernelopts"]=myopts
10870 -
10871 - else:
10872 - self.env[kname+"_kernelopts"]=""
10873 -
10874 - if "boot/kernel/"+kname+"/extraversion" not in self.settings:
10875 - self.settings["boot/kernel/"+kname+\
10876 - "/extraversion"]=""
10877 -
10878 - self.env["clst_kextraversion"]=\
10879 - self.settings["boot/kernel/"+kname+\
10880 - "/extraversion"]
10881 -
10882 - self._copy_initramfs_overlay(kname=kname)
10883 -
10884 - """ Execute the script that builds the kernel """
10885 - cmd("/bin/bash "+self.settings["controller_file"]+\
10886 - " kernel "+kname,\
10887 - "Runscript kernel build failed",env=self.env)
10888 -
10889 - if "boot/kernel/"+kname+"/initramfs_overlay" in self.settings:
10890 - if os.path.exists(self.settings["chroot_path"]+\
10891 - "/tmp/initramfs_overlay/"):
10892 - print "Cleaning up temporary overlay dir"
10893 - cmd("rm -R "+self.settings["chroot_path"]+\
10894 - "/tmp/initramfs_overlay/",env=self.env)
10895 -
10896 - touch(self.settings["autoresume_path"]+\
10897 - "build_kernel_"+kname)
10898 -
10899 - """
10900 - Execute the script that cleans up the kernel build
10901 - environment
10902 - """
10903 - cmd("/bin/bash "+self.settings["controller_file"]+\
10904 - " post-kmerge ",
10905 - "Runscript post-kmerge failed",env=self.env)
10906 -
10907 - def _copy_kernel_config(self, kname):
10908 - if "boot/kernel/"+kname+"/config" in self.settings:
10909 - if not os.path.exists(self.settings["boot/kernel/"+kname+"/config"]):
10910 - self.unbind()
10911 - raise CatalystError,\
10912 - "Can't find kernel config: "+\
10913 - self.settings["boot/kernel/"+kname+\
10914 - "/config"]
10915 -
10916 - try:
10917 - cmd("cp "+self.settings["boot/kernel/"+kname+\
10918 - "/config"]+" "+\
10919 - self.settings["chroot_path"]+"/var/tmp/"+\
10920 - kname+".config",\
10921 - "Couldn't copy kernel config: "+\
10922 - self.settings["boot/kernel/"+kname+\
10923 - "/config"],env=self.env)
10924 -
10925 - except CatalystError:
10926 - self.unbind()
10927 -
10928 - def _copy_initramfs_overlay(self, kname):
10929 - if "boot/kernel/"+kname+"/initramfs_overlay" in self.settings:
10930 - if os.path.exists(self.settings["boot/kernel/"+\
10931 - kname+"/initramfs_overlay"]):
10932 - print "Copying initramfs_overlay dir "+\
10933 - self.settings["boot/kernel/"+kname+\
10934 - "/initramfs_overlay"]
10935 -
10936 - cmd("mkdir -p "+\
10937 - self.settings["chroot_path"]+\
10938 - "/tmp/initramfs_overlay/"+\
10939 - self.settings["boot/kernel/"+kname+\
10940 - "/initramfs_overlay"],env=self.env)
10941 -
10942 - cmd("cp -R "+self.settings["boot/kernel/"+\
10943 - kname+"/initramfs_overlay"]+"/* "+\
10944 - self.settings["chroot_path"]+\
10945 - "/tmp/initramfs_overlay/"+\
10946 - self.settings["boot/kernel/"+kname+\
10947 - "/initramfs_overlay"],env=self.env)
10948 -
10949 - def bootloader(self):
10950 - if "AUTORESUME" in self.settings \
10951 - and os.path.exists(self.settings["autoresume_path"]+"bootloader"):
10952 - print "Resume point detected, skipping bootloader operation..."
10953 - else:
10954 - try:
10955 - cmd("/bin/bash "+self.settings["controller_file"]+\
10956 - " bootloader " + self.settings["target_path"],\
10957 - "Bootloader script failed.",env=self.env)
10958 - touch(self.settings["autoresume_path"]+"bootloader")
10959 - except CatalystError:
10960 - self.unbind()
10961 - raise CatalystError,"Script aborting due to error."
10962 -
10963 - def livecd_update(self):
10964 - if "AUTORESUME" in self.settings \
10965 - and os.path.exists(self.settings["autoresume_path"]+\
10966 - "livecd_update"):
10967 - print "Resume point detected, skipping build_packages operation..."
10968 - else:
10969 - try:
10970 - cmd("/bin/bash "+self.settings["controller_file"]+\
10971 - " livecd-update","livecd-update failed.",env=self.env)
10972 - touch(self.settings["autoresume_path"]+"livecd_update")
10973 -
10974 - except CatalystError:
10975 - self.unbind()
10976 - raise CatalystError,"build aborting due to livecd_update error."
10977 -
10978 - def clear_chroot(self):
10979 - myemp=self.settings["chroot_path"]
10980 - if os.path.isdir(myemp):
10981 - print "Emptying directory",myemp
10982 - """
10983 - stat the dir, delete the dir, recreate the dir and set
10984 - the proper perms and ownership
10985 - """
10986 - mystat=os.stat(myemp)
10987 - #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
10988 - """ There's no easy way to change flags recursively in python """
10989 - if os.uname()[0] == "FreeBSD":
10990 - os.system("chflags -R noschg "+myemp)
10991 - shutil.rmtree(myemp)
10992 - os.makedirs(myemp,0755)
10993 - os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
10994 - os.chmod(myemp,mystat[ST_MODE])
10995 -
10996 - def clear_packages(self):
10997 - if "PKGCACHE" in self.settings:
10998 - print "purging the pkgcache ..."
10999 -
11000 - myemp=self.settings["pkgcache_path"]
11001 - if os.path.isdir(myemp):
11002 - print "Emptying directory",myemp
11003 - """
11004 - stat the dir, delete the dir, recreate the dir and set
11005 - the proper perms and ownership
11006 - """
11007 - mystat=os.stat(myemp)
11008 - #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
11009 - shutil.rmtree(myemp)
11010 - os.makedirs(myemp,0755)
11011 - os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
11012 - os.chmod(myemp,mystat[ST_MODE])
11013 -
11014 - def clear_kerncache(self):
11015 - if "KERNCACHE" in self.settings:
11016 - print "purging the kerncache ..."
11017 -
11018 - myemp=self.settings["kerncache_path"]
11019 - if os.path.isdir(myemp):
11020 - print "Emptying directory",myemp
11021 - """
11022 - stat the dir, delete the dir, recreate the dir and set
11023 - the proper perms and ownership
11024 - """
11025 - mystat=os.stat(myemp)
11026 - #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
11027 - shutil.rmtree(myemp)
11028 - os.makedirs(myemp,0755)
11029 - os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
11030 - os.chmod(myemp,mystat[ST_MODE])
11031 -
11032 - def clear_autoresume(self):
11033 - """ Clean resume points since they are no longer needed """
11034 - if "AUTORESUME" in self.settings:
11035 - print "Removing AutoResume Points: ..."
11036 - myemp=self.settings["autoresume_path"]
11037 - if os.path.isdir(myemp):
11038 - if "AUTORESUME" in self.settings:
11039 - print "Emptying directory",myemp
11040 - """
11041 - stat the dir, delete the dir, recreate the dir and set
11042 - the proper perms and ownership
11043 - """
11044 - mystat=os.stat(myemp)
11045 - if os.uname()[0] == "FreeBSD":
11046 - cmd("chflags -R noschg "+myemp,\
11047 - "Could not remove immutable flag for file "\
11048 - +myemp)
11049 - #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env-self.env)
11050 - shutil.rmtree(myemp)
11051 - os.makedirs(myemp,0755)
11052 - os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
11053 - os.chmod(myemp,mystat[ST_MODE])
11054 -
11055 - def gen_contents_file(self,file):
11056 - if os.path.exists(file+".CONTENTS"):
11057 - os.remove(file+".CONTENTS")
11058 - if "contents" in self.settings:
11059 - if os.path.exists(file):
11060 - myf=open(file+".CONTENTS","w")
11061 - keys={}
11062 - for i in self.settings["contents"].split():
11063 - keys[i]=1
11064 - array=keys.keys()
11065 - array.sort()
11066 - for j in array:
11067 - contents=generate_contents(file,contents_function=j,\
11068 - verbose="VERBOSE" in self.settings)
11069 - if contents:
11070 - myf.write(contents)
11071 - myf.close()
11072 -
11073 - def gen_digest_file(self,file):
11074 - if os.path.exists(file+".DIGESTS"):
11075 - os.remove(file+".DIGESTS")
11076 - if "digests" in self.settings:
11077 - if os.path.exists(file):
11078 - myf=open(file+".DIGESTS","w")
11079 - keys={}
11080 - for i in self.settings["digests"].split():
11081 - keys[i]=1
11082 - array=keys.keys()
11083 - array.sort()
11084 - for f in [file, file+'.CONTENTS']:
11085 - if os.path.exists(f):
11086 - if "all" in array:
11087 - for k in hash_map.keys():
11088 - hash=generate_hash(f,hash_function=k,verbose=\
11089 - "VERBOSE" in self.settings)
11090 - myf.write(hash)
11091 - else:
11092 - for j in array:
11093 - hash=generate_hash(f,hash_function=j,verbose=\
11094 - "VERBOSE" in self.settings)
11095 - myf.write(hash)
11096 - myf.close()
11097 -
11098 - def purge(self):
11099 - countdown(10,"Purging Caches ...")
11100 - if any(k in self.settings for k in ("PURGE","PURGEONLY","PURGETMPONLY")):
11101 - print "clearing autoresume ..."
11102 - self.clear_autoresume()
11103 -
11104 - print "clearing chroot ..."
11105 - self.clear_chroot()
11106 -
11107 - if "PURGETMPONLY" not in self.settings:
11108 - print "clearing package cache ..."
11109 - self.clear_packages()
11110 -
11111 - print "clearing kerncache ..."
11112 - self.clear_kerncache()
11113 -
11114 -# vim: ts=4 sw=4 sta et sts=4 ai
11115 diff --git a/modules/generic_target.py b/modules/generic_target.py
11116 deleted file mode 100644
11117 index fe96bd7..0000000
11118 --- a/modules/generic_target.py
11119 +++ /dev/null
11120 @@ -1,11 +0,0 @@
11121 -from catalyst_support import *
11122 -
11123 -class generic_target:
11124 - """
11125 - The toplevel class for generic_stage_target. This is about as generic as we get.
11126 - """
11127 - def __init__(self,myspec,addlargs):
11128 - addl_arg_parse(myspec,addlargs,self.required_values,self.valid_values)
11129 - self.settings=myspec
11130 - self.env={}
11131 - self.env["PATH"]="/bin:/sbin:/usr/bin:/usr/sbin"
11132 diff --git a/modules/grp_target.py b/modules/grp_target.py
11133 deleted file mode 100644
11134 index 6941522..0000000
11135 --- a/modules/grp_target.py
11136 +++ /dev/null
11137 @@ -1,118 +0,0 @@
11138 -"""
11139 -Gentoo Reference Platform (GRP) target
11140 -"""
11141 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
11142 -
11143 -import os,types,glob
11144 -from catalyst_support import *
11145 -from generic_stage_target import *
11146 -
11147 -class grp_target(generic_stage_target):
11148 - """
11149 - The builder class for GRP (Gentoo Reference Platform) builds.
11150 - """
11151 - def __init__(self,spec,addlargs):
11152 - self.required_values=["version_stamp","target","subarch",\
11153 - "rel_type","profile","snapshot","source_subpath"]
11154 -
11155 - self.valid_values=self.required_values[:]
11156 - self.valid_values.extend(["grp/use"])
11157 - if "grp" not in addlargs:
11158 - raise CatalystError,"Required value \"grp\" not specified in spec."
11159 -
11160 - self.required_values.extend(["grp"])
11161 - if type(addlargs["grp"])==types.StringType:
11162 - addlargs["grp"]=[addlargs["grp"]]
11163 -
11164 - if "grp/use" in addlargs:
11165 - if type(addlargs["grp/use"])==types.StringType:
11166 - addlargs["grp/use"]=[addlargs["grp/use"]]
11167 -
11168 - for x in addlargs["grp"]:
11169 - self.required_values.append("grp/"+x+"/packages")
11170 - self.required_values.append("grp/"+x+"/type")
11171 -
11172 - generic_stage_target.__init__(self,spec,addlargs)
11173 -
11174 - def set_target_path(self):
11175 - self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"]+"/")
11176 - if "AUTORESUME" in self.settings \
11177 - and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
11178 - print "Resume point detected, skipping target path setup operation..."
11179 - else:
11180 - # first clean up any existing target stuff
11181 - #if os.path.isdir(self.settings["target_path"]):
11182 - #cmd("rm -rf "+self.settings["target_path"],
11183 - #"Could not remove existing directory: "+self.settings["target_path"],env=self.env)
11184 - if not os.path.exists(self.settings["target_path"]):
11185 - os.makedirs(self.settings["target_path"])
11186 -
11187 - touch(self.settings["autoresume_path"]+"setup_target_path")
11188 -
11189 - def run_local(self):
11190 - for pkgset in self.settings["grp"]:
11191 - # example call: "grp.sh run pkgset cd1 xmms vim sys-apps/gleep"
11192 - mypackages=list_bashify(self.settings["grp/"+pkgset+"/packages"])
11193 - try:
11194 - cmd("/bin/bash "+self.settings["controller_file"]+" run "+self.settings["grp/"+pkgset+"/type"]\
11195 - +" "+pkgset+" "+mypackages,env=self.env)
11196 -
11197 - except CatalystError:
11198 - self.unbind()
11199 - raise CatalystError,"GRP build aborting due to error."
11200 -
11201 - def set_use(self):
11202 - generic_stage_target.set_use(self)
11203 - if "BINDIST" in self.settings:
11204 - if "use" in self.settings:
11205 - self.settings["use"].append("bindist")
11206 - else:
11207 - self.settings["use"]=["bindist"]
11208 -
11209 - def set_mounts(self):
11210 - self.mounts.append("/tmp/grp")
11211 - self.mountmap["/tmp/grp"]=self.settings["target_path"]
11212 -
11213 - def generate_digests(self):
11214 - for pkgset in self.settings["grp"]:
11215 - if self.settings["grp/"+pkgset+"/type"] == "pkgset":
11216 - destdir=normpath(self.settings["target_path"]+"/"+pkgset+"/All")
11217 - print "Digesting files in the pkgset....."
11218 - digests=glob.glob(destdir+'/*.DIGESTS')
11219 - for i in digests:
11220 - if os.path.exists(i):
11221 - os.remove(i)
11222 -
11223 - files=os.listdir(destdir)
11224 - #ignore files starting with '.' using list comprehension
11225 - files=[filename for filename in files if filename[0] != '.']
11226 - for i in files:
11227 - if os.path.isfile(normpath(destdir+"/"+i)):
11228 - self.gen_contents_file(normpath(destdir+"/"+i))
11229 - self.gen_digest_file(normpath(destdir+"/"+i))
11230 - else:
11231 - destdir=normpath(self.settings["target_path"]+"/"+pkgset)
11232 - print "Digesting files in the srcset....."
11233 -
11234 - digests=glob.glob(destdir+'/*.DIGESTS')
11235 - for i in digests:
11236 - if os.path.exists(i):
11237 - os.remove(i)
11238 -
11239 - files=os.listdir(destdir)
11240 - #ignore files starting with '.' using list comprehension
11241 - files=[filename for filename in files if filename[0] != '.']
11242 - for i in files:
11243 - if os.path.isfile(normpath(destdir+"/"+i)):
11244 - #self.gen_contents_file(normpath(destdir+"/"+i))
11245 - self.gen_digest_file(normpath(destdir+"/"+i))
11246 -
11247 - def set_action_sequence(self):
11248 - self.settings["action_sequence"]=["unpack","unpack_snapshot",\
11249 - "config_profile_link","setup_confdir","portage_overlay","bind","chroot_setup",\
11250 - "setup_environment","run_local","unbind",\
11251 - "generate_digests","clear_autoresume"]
11252 -
11253 -def register(foo):
11254 - foo.update({"grp":grp_target})
11255 - return foo
11256 diff --git a/modules/livecd_stage1_target.py b/modules/livecd_stage1_target.py
11257 deleted file mode 100644
11258 index 59de9bb..0000000
11259 --- a/modules/livecd_stage1_target.py
11260 +++ /dev/null
11261 @@ -1,75 +0,0 @@
11262 -"""
11263 -LiveCD stage1 target
11264 -"""
11265 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
11266 -
11267 -from catalyst_support import *
11268 -from generic_stage_target import *
11269 -
11270 -class livecd_stage1_target(generic_stage_target):
11271 - """
11272 - Builder class for LiveCD stage1.
11273 - """
11274 - def __init__(self,spec,addlargs):
11275 - self.required_values=["livecd/packages"]
11276 - self.valid_values=self.required_values[:]
11277 -
11278 - self.valid_values.extend(["livecd/use"])
11279 - generic_stage_target.__init__(self,spec,addlargs)
11280 -
11281 - def set_action_sequence(self):
11282 - self.settings["action_sequence"]=["unpack","unpack_snapshot",\
11283 - "config_profile_link","setup_confdir","portage_overlay",\
11284 - "bind","chroot_setup","setup_environment","build_packages",\
11285 - "unbind", "clean","clear_autoresume"]
11286 -
11287 - def set_target_path(self):
11288 - self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"])
11289 - if "AUTORESUME" in self.settings \
11290 - and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
11291 - print "Resume point detected, skipping target path setup operation..."
11292 - else:
11293 - # first clean up any existing target stuff
11294 - if os.path.exists(self.settings["target_path"]):
11295 - cmd("rm -rf "+self.settings["target_path"],\
11296 - "Could not remove existing directory: "+self.settings["target_path"],env=self.env)
11297 - touch(self.settings["autoresume_path"]+"setup_target_path")
11298 -
11299 - if not os.path.exists(self.settings["target_path"]):
11300 - os.makedirs(self.settings["target_path"])
11301 -
11302 - def set_target_path(self):
11303 - pass
11304 -
11305 - def set_spec_prefix(self):
11306 - self.settings["spec_prefix"]="livecd"
11307 -
11308 - def set_use(self):
11309 - generic_stage_target.set_use(self)
11310 - if "use" in self.settings:
11311 - self.settings["use"].append("livecd")
11312 - if "BINDIST" in self.settings:
11313 - self.settings["use"].append("bindist")
11314 - else:
11315 - self.settings["use"]=["livecd"]
11316 - if "BINDIST" in self.settings:
11317 - self.settings["use"].append("bindist")
11318 -
11319 - def set_packages(self):
11320 - generic_stage_target.set_packages(self)
11321 - if self.settings["spec_prefix"]+"/packages" in self.settings:
11322 - if type(self.settings[self.settings["spec_prefix"]+"/packages"]) == types.StringType:
11323 - self.settings[self.settings["spec_prefix"]+"/packages"] = \
11324 - self.settings[self.settings["spec_prefix"]+"/packages"].split()
11325 - self.settings[self.settings["spec_prefix"]+"/packages"].append("app-misc/livecd-tools")
11326 -
11327 - def set_pkgcache_path(self):
11328 - if "pkgcache_path" in self.settings:
11329 - if type(self.settings["pkgcache_path"]) != types.StringType:
11330 - self.settings["pkgcache_path"]=normpath(string.join(self.settings["pkgcache_path"]))
11331 - else:
11332 - generic_stage_target.set_pkgcache_path(self)
11333 -
11334 -def register(foo):
11335 - foo.update({"livecd-stage1":livecd_stage1_target})
11336 - return foo
11337 diff --git a/modules/livecd_stage2_target.py b/modules/livecd_stage2_target.py
11338 deleted file mode 100644
11339 index 5be8fd2..0000000
11340 --- a/modules/livecd_stage2_target.py
11341 +++ /dev/null
11342 @@ -1,146 +0,0 @@
11343 -"""
11344 -LiveCD stage2 target, builds upon previous LiveCD stage1 tarball
11345 -"""
11346 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
11347 -
11348 -import os,string,types,stat,shutil
11349 -from catalyst_support import *
11350 -from generic_stage_target import *
11351 -
11352 -class livecd_stage2_target(generic_stage_target):
11353 - """
11354 - Builder class for a LiveCD stage2 build.
11355 - """
11356 - def __init__(self,spec,addlargs):
11357 - self.required_values=["boot/kernel"]
11358 -
11359 - self.valid_values=[]
11360 -
11361 - self.valid_values.extend(self.required_values)
11362 - self.valid_values.extend(["livecd/cdtar","livecd/empty","livecd/rm",\
11363 - "livecd/unmerge","livecd/iso","livecd/gk_mainargs","livecd/type",\
11364 - "livecd/readme","livecd/motd","livecd/overlay",\
11365 - "livecd/modblacklist","livecd/splash_theme","livecd/rcadd",\
11366 - "livecd/rcdel","livecd/fsscript","livecd/xinitrc",\
11367 - "livecd/root_overlay","livecd/users","portage_overlay",\
11368 - "livecd/fstype","livecd/fsops","livecd/linuxrc","livecd/bootargs",\
11369 - "gamecd/conf","livecd/xdm","livecd/xsession","livecd/volid"])
11370 -
11371 - generic_stage_target.__init__(self,spec,addlargs)
11372 - if "livecd/type" not in self.settings:
11373 - self.settings["livecd/type"] = "generic-livecd"
11374 -
11375 - file_locate(self.settings, ["cdtar","controller_file"])
11376 -
11377 - def set_source_path(self):
11378 - self.settings["source_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["source_subpath"]+".tar.bz2")
11379 - if os.path.isfile(self.settings["source_path"]):
11380 - self.settings["source_path_hash"]=generate_hash(self.settings["source_path"])
11381 - else:
11382 - self.settings["source_path"]=normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/")
11383 - if not os.path.exists(self.settings["source_path"]):
11384 - raise CatalystError,"Source Path: "+self.settings["source_path"]+" does not exist."
11385 -
11386 - def set_spec_prefix(self):
11387 - self.settings["spec_prefix"]="livecd"
11388 -
11389 - def set_target_path(self):
11390 - self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"]+"/")
11391 - if "AUTORESUME" in self.settings \
11392 - and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
11393 - print "Resume point detected, skipping target path setup operation..."
11394 - else:
11395 - # first clean up any existing target stuff
11396 - if os.path.isdir(self.settings["target_path"]):
11397 - cmd("rm -rf "+self.settings["target_path"],
11398 - "Could not remove existing directory: "+self.settings["target_path"],env=self.env)
11399 - touch(self.settings["autoresume_path"]+"setup_target_path")
11400 - if not os.path.exists(self.settings["target_path"]):
11401 - os.makedirs(self.settings["target_path"])
11402 -
11403 - def run_local(self):
11404 - # what modules do we want to blacklist?
11405 - if "livecd/modblacklist" in self.settings:
11406 - try:
11407 - myf=open(self.settings["chroot_path"]+"/etc/modprobe.d/blacklist.conf","a")
11408 - except:
11409 - self.unbind()
11410 - raise CatalystError,"Couldn't open "+self.settings["chroot_path"]+"/etc/modprobe.d/blacklist.conf."
11411 -
11412 - myf.write("\n#Added by Catalyst:")
11413 - # workaround until config.py is using configparser
11414 - if isinstance(self.settings["livecd/modblacklist"], str):
11415 - self.settings["livecd/modblacklist"] = self.settings["livecd/modblacklist"].split()
11416 - for x in self.settings["livecd/modblacklist"]:
11417 - myf.write("\nblacklist "+x)
11418 - myf.close()
11419 -
11420 - def unpack(self):
11421 - unpack=True
11422 - display_msg=None
11423 -
11424 - clst_unpack_hash=read_from_clst(self.settings["autoresume_path"]+"unpack")
11425 -
11426 - if os.path.isdir(self.settings["source_path"]):
11427 - unpack_cmd="rsync -a --delete "+self.settings["source_path"]+" "+self.settings["chroot_path"]
11428 - display_msg="\nStarting rsync from "+self.settings["source_path"]+"\nto "+\
11429 - self.settings["chroot_path"]+" (This may take some time) ...\n"
11430 - error_msg="Rsync of "+self.settings["source_path"]+" to "+self.settings["chroot_path"]+" failed."
11431 - invalid_snapshot=False
11432 -
11433 - if "AUTORESUME" in self.settings:
11434 - if os.path.isdir(self.settings["source_path"]) and \
11435 - os.path.exists(self.settings["autoresume_path"]+"unpack"):
11436 - print "Resume point detected, skipping unpack operation..."
11437 - unpack=False
11438 - elif "source_path_hash" in self.settings:
11439 - if self.settings["source_path_hash"] != clst_unpack_hash:
11440 - invalid_snapshot=True
11441 -
11442 - if unpack:
11443 - self.mount_safety_check()
11444 - if invalid_snapshot:
11445 - print "No Valid Resume point detected, cleaning up ..."
11446 - #os.remove(self.settings["autoresume_path"]+"dir_setup")
11447 - self.clear_autoresume()
11448 - self.clear_chroot()
11449 - #self.dir_setup()
11450 -
11451 - if not os.path.exists(self.settings["chroot_path"]):
11452 - os.makedirs(self.settings["chroot_path"])
11453 -
11454 - if not os.path.exists(self.settings["chroot_path"]+"/tmp"):
11455 - os.makedirs(self.settings["chroot_path"]+"/tmp",1777)
11456 -
11457 - if "PKGCACHE" in self.settings:
11458 - if not os.path.exists(self.settings["pkgcache_path"]):
11459 - os.makedirs(self.settings["pkgcache_path"],0755)
11460 -
11461 - if not display_msg:
11462 - raise CatalystError,"Could not find appropriate source. Please check the 'source_subpath' setting in the spec file."
11463 -
11464 - print display_msg
11465 - cmd(unpack_cmd,error_msg,env=self.env)
11466 -
11467 - if "source_path_hash" in self.settings:
11468 - myf=open(self.settings["autoresume_path"]+"unpack","w")
11469 - myf.write(self.settings["source_path_hash"])
11470 - myf.close()
11471 - else:
11472 - touch(self.settings["autoresume_path"]+"unpack")
11473 -
11474 - def set_action_sequence(self):
11475 - self.settings["action_sequence"]=["unpack","unpack_snapshot",\
11476 - "config_profile_link","setup_confdir","portage_overlay",\
11477 - "bind","chroot_setup","setup_environment","run_local",\
11478 - "build_kernel"]
11479 - if "FETCH" not in self.settings:
11480 - self.settings["action_sequence"] += ["bootloader","preclean",\
11481 - "livecd_update","root_overlay","fsscript","rcupdate","unmerge",\
11482 - "unbind","remove","empty","target_setup",\
11483 - "setup_overlay","create_iso"]
11484 - self.settings["action_sequence"].append("clear_autoresume")
11485 -
11486 -def register(foo):
11487 - foo.update({"livecd-stage2":livecd_stage2_target})
11488 - return foo
11489 diff --git a/modules/netboot2_target.py b/modules/netboot2_target.py
11490 deleted file mode 100644
11491 index 1ab7e7d..0000000
11492 --- a/modules/netboot2_target.py
11493 +++ /dev/null
11494 @@ -1,166 +0,0 @@
11495 -"""
11496 -netboot target, version 2
11497 -"""
11498 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
11499 -
11500 -import os,string,types
11501 -from catalyst_support import *
11502 -from generic_stage_target import *
11503 -
11504 -class netboot2_target(generic_stage_target):
11505 - """
11506 - Builder class for a netboot build, version 2
11507 - """
11508 - def __init__(self,spec,addlargs):
11509 - self.required_values=[
11510 - "boot/kernel"
11511 - ]
11512 - self.valid_values=self.required_values[:]
11513 - self.valid_values.extend([
11514 - "netboot2/packages",
11515 - "netboot2/use",
11516 - "netboot2/extra_files",
11517 - "netboot2/overlay",
11518 - "netboot2/busybox_config",
11519 - "netboot2/root_overlay",
11520 - "netboot2/linuxrc"
11521 - ])
11522 -
11523 - try:
11524 - if "netboot2/packages" in addlargs:
11525 - if type(addlargs["netboot2/packages"]) == types.StringType:
11526 - loopy=[addlargs["netboot2/packages"]]
11527 - else:
11528 - loopy=addlargs["netboot2/packages"]
11529 -
11530 - for x in loopy:
11531 - self.valid_values.append("netboot2/packages/"+x+"/files")
11532 - except:
11533 - raise CatalystError,"configuration error in netboot2/packages."
11534 -
11535 - generic_stage_target.__init__(self,spec,addlargs)
11536 - self.set_build_kernel_vars()
11537 - self.settings["merge_path"]=normpath("/tmp/image/")
11538 -
11539 - def set_target_path(self):
11540 - self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+\
11541 - self.settings["target_subpath"]+"/")
11542 - if "AUTORESUME" in self.settings \
11543 - and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
11544 - print "Resume point detected, skipping target path setup operation..."
11545 - else:
11546 - # first clean up any existing target stuff
11547 - if os.path.isfile(self.settings["target_path"]):
11548 - cmd("rm -f "+self.settings["target_path"], \
11549 - "Could not remove existing file: "+self.settings["target_path"],env=self.env)
11550 - touch(self.settings["autoresume_path"]+"setup_target_path")
11551 -
11552 - if not os.path.exists(self.settings["storedir"]+"/builds/"):
11553 - os.makedirs(self.settings["storedir"]+"/builds/")
11554 -
11555 - def copy_files_to_image(self):
11556 - # copies specific files from the buildroot to merge_path
11557 - myfiles=[]
11558 -
11559 - # check for autoresume point
11560 - if "AUTORESUME" in self.settings \
11561 - and os.path.exists(self.settings["autoresume_path"]+"copy_files_to_image"):
11562 - print "Resume point detected, skipping target path setup operation..."
11563 - else:
11564 - if "netboot2/packages" in self.settings:
11565 - if type(self.settings["netboot2/packages"]) == types.StringType:
11566 - loopy=[self.settings["netboot2/packages"]]
11567 - else:
11568 - loopy=self.settings["netboot2/packages"]
11569 -
11570 - for x in loopy:
11571 - if "netboot2/packages/"+x+"/files" in self.settings:
11572 - if type(self.settings["netboot2/packages/"+x+"/files"]) == types.ListType:
11573 - myfiles.extend(self.settings["netboot2/packages/"+x+"/files"])
11574 - else:
11575 - myfiles.append(self.settings["netboot2/packages/"+x+"/files"])
11576 -
11577 - if "netboot2/extra_files" in self.settings:
11578 - if type(self.settings["netboot2/extra_files"]) == types.ListType:
11579 - myfiles.extend(self.settings["netboot2/extra_files"])
11580 - else:
11581 - myfiles.append(self.settings["netboot2/extra_files"])
11582 -
11583 - try:
11584 - cmd("/bin/bash "+self.settings["controller_file"]+\
11585 - " image " + list_bashify(myfiles),env=self.env)
11586 - except CatalystError:
11587 - self.unbind()
11588 - raise CatalystError,"Failed to copy files to image!"
11589 -
11590 - touch(self.settings["autoresume_path"]+"copy_files_to_image")
11591 -
11592 - def setup_overlay(self):
11593 - if "AUTORESUME" in self.settings \
11594 - and os.path.exists(self.settings["autoresume_path"]+"setup_overlay"):
11595 - print "Resume point detected, skipping setup_overlay operation..."
11596 - else:
11597 - if "netboot2/overlay" in self.settings:
11598 - for x in self.settings["netboot2/overlay"]:
11599 - if os.path.exists(x):
11600 - cmd("rsync -a "+x+"/ "+\
11601 - self.settings["chroot_path"] + self.settings["merge_path"], "netboot2/overlay: "+x+" copy failed.",env=self.env)
11602 - touch(self.settings["autoresume_path"]+"setup_overlay")
11603 -
11604 - def move_kernels(self):
11605 - # we're done, move the kernels to builds/*
11606 - # no auto resume here as we always want the
11607 - # freshest images moved
11608 - try:
11609 - cmd("/bin/bash "+self.settings["controller_file"]+\
11610 - " final",env=self.env)
11611 - print ">>> Netboot Build Finished!"
11612 - except CatalystError:
11613 - self.unbind()
11614 - raise CatalystError,"Failed to move kernel images!"
11615 -
11616 - def remove(self):
11617 - if "AUTORESUME" in self.settings \
11618 - and os.path.exists(self.settings["autoresume_path"]+"remove"):
11619 - print "Resume point detected, skipping remove operation..."
11620 - else:
11621 - if self.settings["spec_prefix"]+"/rm" in self.settings:
11622 - for x in self.settings[self.settings["spec_prefix"]+"/rm"]:
11623 - # we're going to shell out for all these cleaning operations,
11624 - # so we get easy glob handling
11625 - print "netboot2: removing " + x
11626 - os.system("rm -rf " + self.settings["chroot_path"] + self.settings["merge_path"] + x)
11627 -
11628 - def empty(self):
11629 - if "AUTORESUME" in self.settings \
11630 - and os.path.exists(self.settings["autoresume_path"]+"empty"):
11631 - print "Resume point detected, skipping empty operation..."
11632 - else:
11633 - if "netboot2/empty" in self.settings:
11634 - if type(self.settings["netboot2/empty"])==types.StringType:
11635 - self.settings["netboot2/empty"]=self.settings["netboot2/empty"].split()
11636 - for x in self.settings["netboot2/empty"]:
11637 - myemp=self.settings["chroot_path"] + self.settings["merge_path"] + x
11638 - if not os.path.isdir(myemp):
11639 - print x,"not a directory or does not exist, skipping 'empty' operation."
11640 - continue
11641 - print "Emptying directory", x
11642 - # stat the dir, delete the dir, recreate the dir and set
11643 - # the proper perms and ownership
11644 - mystat=os.stat(myemp)
11645 - shutil.rmtree(myemp)
11646 - os.makedirs(myemp,0755)
11647 - os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
11648 - os.chmod(myemp,mystat[ST_MODE])
11649 - touch(self.settings["autoresume_path"]+"empty")
11650 -
11651 - def set_action_sequence(self):
11652 - self.settings["action_sequence"]=["unpack","unpack_snapshot","config_profile_link",
11653 - "setup_confdir","portage_overlay","bind","chroot_setup",\
11654 - "setup_environment","build_packages","root_overlay",\
11655 - "copy_files_to_image","setup_overlay","build_kernel","move_kernels",\
11656 - "remove","empty","unbind","clean","clear_autoresume"]
11657 -
11658 -def register(foo):
11659 - foo.update({"netboot2":netboot2_target})
11660 - return foo
11661 diff --git a/modules/netboot_target.py b/modules/netboot_target.py
11662 deleted file mode 100644
11663 index ff2c81f..0000000
11664 --- a/modules/netboot_target.py
11665 +++ /dev/null
11666 @@ -1,128 +0,0 @@
11667 -"""
11668 -netboot target, version 1
11669 -"""
11670 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
11671 -
11672 -import os,string,types
11673 -from catalyst_support import *
11674 -from generic_stage_target import *
11675 -
11676 -class netboot_target(generic_stage_target):
11677 - """
11678 - Builder class for a netboot build.
11679 - """
11680 - def __init__(self,spec,addlargs):
11681 - self.valid_values = [
11682 - "netboot/kernel/sources",
11683 - "netboot/kernel/config",
11684 - "netboot/kernel/prebuilt",
11685 -
11686 - "netboot/busybox_config",
11687 -
11688 - "netboot/extra_files",
11689 - "netboot/packages"
11690 - ]
11691 - self.required_values=[]
11692 -
11693 - try:
11694 - if "netboot/packages" in addlargs:
11695 - if type(addlargs["netboot/packages"]) == types.StringType:
11696 - loopy=[addlargs["netboot/packages"]]
11697 - else:
11698 - loopy=addlargs["netboot/packages"]
11699 -
11700 - # for x in loopy:
11701 - # self.required_values.append("netboot/packages/"+x+"/files")
11702 - except:
11703 - raise CatalystError,"configuration error in netboot/packages."
11704 -
11705 - generic_stage_target.__init__(self,spec,addlargs)
11706 - self.set_build_kernel_vars(addlargs)
11707 - if "netboot/busybox_config" in addlargs:
11708 - file_locate(self.settings, ["netboot/busybox_config"])
11709 -
11710 - # Custom Kernel Tarball --- use that instead ...
11711 -
11712 - # unless the user wants specific CFLAGS/CXXFLAGS, let's use -Os
11713 -
11714 - for envvar in "CFLAGS", "CXXFLAGS":
11715 - if envvar not in os.environ and envvar not in addlargs:
11716 - self.settings[envvar] = "-Os -pipe"
11717 -
11718 - def set_root_path(self):
11719 - # ROOT= variable for emerges
11720 - self.settings["root_path"]=normpath("/tmp/image")
11721 - print "netboot root path is "+self.settings["root_path"]
11722 -
11723 -# def build_packages(self):
11724 -# # build packages
11725 -# if "netboot/packages" in self.settings:
11726 -# mypack=list_bashify(self.settings["netboot/packages"])
11727 -# try:
11728 -# cmd("/bin/bash "+self.settings["controller_file"]+" packages "+mypack,env=self.env)
11729 -# except CatalystError:
11730 -# self.unbind()
11731 -# raise CatalystError,"netboot build aborting due to error."
11732 -
11733 - def build_busybox(self):
11734 - # build busybox
11735 - if "netboot/busybox_config" in self.settings:
11736 - mycmd = self.settings["netboot/busybox_config"]
11737 - else:
11738 - mycmd = ""
11739 - try:
11740 - cmd("/bin/bash "+self.settings["controller_file"]+" busybox "+ mycmd,env=self.env)
11741 - except CatalystError:
11742 - self.unbind()
11743 - raise CatalystError,"netboot build aborting due to error."
11744 -
11745 - def copy_files_to_image(self):
11746 - # create image
11747 - myfiles=[]
11748 - if "netboot/packages" in self.settings:
11749 - if type(self.settings["netboot/packages"]) == types.StringType:
11750 - loopy=[self.settings["netboot/packages"]]
11751 - else:
11752 - loopy=self.settings["netboot/packages"]
11753 -
11754 - for x in loopy:
11755 - if "netboot/packages/"+x+"/files" in self.settings:
11756 - if type(self.settings["netboot/packages/"+x+"/files"]) == types.ListType:
11757 - myfiles.extend(self.settings["netboot/packages/"+x+"/files"])
11758 - else:
11759 - myfiles.append(self.settings["netboot/packages/"+x+"/files"])
11760 -
11761 - if "netboot/extra_files" in self.settings:
11762 - if type(self.settings["netboot/extra_files"]) == types.ListType:
11763 - myfiles.extend(self.settings["netboot/extra_files"])
11764 - else:
11765 - myfiles.append(self.settings["netboot/extra_files"])
11766 -
11767 - try:
11768 - cmd("/bin/bash "+self.settings["controller_file"]+\
11769 - " image " + list_bashify(myfiles),env=self.env)
11770 - except CatalystError:
11771 - self.unbind()
11772 - raise CatalystError,"netboot build aborting due to error."
11773 -
11774 - def create_netboot_files(self):
11775 - # finish it all up
11776 - try:
11777 - cmd("/bin/bash "+self.settings["controller_file"]+" finish",env=self.env)
11778 - except CatalystError:
11779 - self.unbind()
11780 - raise CatalystError,"netboot build aborting due to error."
11781 -
11782 - # end
11783 - print "netboot: build finished !"
11784 -
11785 - def set_action_sequence(self):
11786 - self.settings["action_sequence"]=["unpack","unpack_snapshot",
11787 - "config_profile_link","setup_confdir","bind","chroot_setup",\
11788 - "setup_environment","build_packages","build_busybox",\
11789 - "build_kernel","copy_files_to_image",\
11790 - "clean","create_netboot_files","unbind","clear_autoresume"]
11791 -
11792 -def register(foo):
11793 - foo.update({"netboot":netboot_target})
11794 - return foo
11795 diff --git a/modules/snapshot_target.py b/modules/snapshot_target.py
11796 deleted file mode 100644
11797 index 29d6e87..0000000
11798 --- a/modules/snapshot_target.py
11799 +++ /dev/null
11800 @@ -1,91 +0,0 @@
11801 -"""
11802 -Snapshot target
11803 -"""
11804 -
11805 -import os
11806 -from catalyst_support import *
11807 -from generic_stage_target import *
11808 -
11809 -class snapshot_target(generic_stage_target):
11810 - """
11811 - Builder class for snapshots.
11812 - """
11813 - def __init__(self,myspec,addlargs):
11814 - self.required_values=["version_stamp","target"]
11815 - self.valid_values=["version_stamp","target"]
11816 -
11817 - generic_target.__init__(self,myspec,addlargs)
11818 - self.settings=myspec
11819 - self.settings["target_subpath"]="portage"
11820 - st=self.settings["storedir"]
11821 - self.settings["snapshot_path"]=normpath(st + "/snapshots/"
11822 - + self.settings["snapshot_name"]
11823 - + self.settings["version_stamp"] + ".tar.bz2")
11824 - self.settings["tmp_path"]=normpath(st+"/tmp/"+self.settings["target_subpath"])
11825 -
11826 - def setup(self):
11827 - x=normpath(self.settings["storedir"]+"/snapshots")
11828 - if not os.path.exists(x):
11829 - os.makedirs(x)
11830 -
11831 - def mount_safety_check(self):
11832 - pass
11833 -
11834 - def run(self):
11835 - if "PURGEONLY" in self.settings:
11836 - self.purge()
11837 - return
11838 -
11839 - if "PURGE" in self.settings:
11840 - self.purge()
11841 -
11842 - self.setup()
11843 - print "Creating Portage tree snapshot "+self.settings["version_stamp"]+\
11844 - " from "+self.settings["portdir"]+"..."
11845 -
11846 - mytmp=self.settings["tmp_path"]
11847 - if not os.path.exists(mytmp):
11848 - os.makedirs(mytmp)
11849 -
11850 - cmd("rsync -a --delete --exclude /packages/ --exclude /distfiles/ " +
11851 - "--exclude /local/ --exclude CVS/ --exclude .svn --filter=H_**/files/digest-* " +
11852 - self.settings["portdir"] + "/ " + mytmp + "/%s/" % self.settings["repo_name"],
11853 - "Snapshot failure",env=self.env)
11854 -
11855 - print "Compressing Portage snapshot tarball..."
11856 - cmd("tar -I lbzip2 -cf " + self.settings["snapshot_path"] + " -C " +
11857 - mytmp + " %s" % self.settings["repo_name"],
11858 - "Snapshot creation failure",env=self.env)
11859 -
11860 - self.gen_contents_file(self.settings["snapshot_path"])
11861 - self.gen_digest_file(self.settings["snapshot_path"])
11862 -
11863 - self.cleanup()
11864 - print "snapshot: complete!"
11865 -
11866 - def kill_chroot_pids(self):
11867 - pass
11868 -
11869 - def cleanup(self):
11870 - print "Cleaning up..."
11871 -
11872 - def purge(self):
11873 - myemp=self.settings["tmp_path"]
11874 - if os.path.isdir(myemp):
11875 - print "Emptying directory",myemp
11876 - """
11877 - stat the dir, delete the dir, recreate the dir and set
11878 - the proper perms and ownership
11879 - """
11880 - mystat=os.stat(myemp)
11881 - """ There's no easy way to change flags recursively in python """
11882 - if os.uname()[0] == "FreeBSD":
11883 - os.system("chflags -R noschg "+myemp)
11884 - shutil.rmtree(myemp)
11885 - os.makedirs(myemp,0755)
11886 - os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
11887 - os.chmod(myemp,mystat[ST_MODE])
11888 -
11889 -def register(foo):
11890 - foo.update({"snapshot":snapshot_target})
11891 - return foo
11892 diff --git a/modules/stage1_target.py b/modules/stage1_target.py
11893 deleted file mode 100644
11894 index aa43926..0000000
11895 --- a/modules/stage1_target.py
11896 +++ /dev/null
11897 @@ -1,96 +0,0 @@
11898 -"""
11899 -stage1 target
11900 -"""
11901 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
11902 -
11903 -from catalyst_support import *
11904 -from generic_stage_target import *
11905 -
11906 -class stage1_target(generic_stage_target):
11907 - """
11908 - Builder class for a stage1 installation tarball build.
11909 - """
11910 - def __init__(self,spec,addlargs):
11911 - self.required_values=[]
11912 - self.valid_values=["chost"]
11913 - self.valid_values.extend(["update_seed","update_seed_command"])
11914 - generic_stage_target.__init__(self,spec,addlargs)
11915 -
11916 - def set_stage_path(self):
11917 - self.settings["stage_path"]=normpath(self.settings["chroot_path"]+self.settings["root_path"])
11918 - print "stage1 stage path is "+self.settings["stage_path"]
11919 -
11920 - def set_root_path(self):
11921 - # sets the root path, relative to 'chroot_path', of the stage1 root
11922 - self.settings["root_path"]=normpath("/tmp/stage1root")
11923 - print "stage1 root path is "+self.settings["root_path"]
11924 -
11925 - def set_cleanables(self):
11926 - generic_stage_target.set_cleanables(self)
11927 - self.settings["cleanables"].extend([\
11928 - "/usr/share/zoneinfo", "/etc/portage/package*"])
11929 -
11930 - # XXX: How do these override_foo() functions differ from the ones in generic_stage_target and why aren't they in stage3_target?
11931 -
11932 - def override_chost(self):
11933 - if "chost" in self.settings:
11934 - self.settings["CHOST"]=list_to_string(self.settings["chost"])
11935 -
11936 - def override_cflags(self):
11937 - if "cflags" in self.settings:
11938 - self.settings["CFLAGS"]=list_to_string(self.settings["cflags"])
11939 -
11940 - def override_cxxflags(self):
11941 - if "cxxflags" in self.settings:
11942 - self.settings["CXXFLAGS"]=list_to_string(self.settings["cxxflags"])
11943 -
11944 - def override_ldflags(self):
11945 - if "ldflags" in self.settings:
11946 - self.settings["LDFLAGS"]=list_to_string(self.settings["ldflags"])
11947 -
11948 - def set_portage_overlay(self):
11949 - generic_stage_target.set_portage_overlay(self)
11950 - if "portage_overlay" in self.settings:
11951 - print "\nWARNING !!!!!"
11952 - print "\tUsing an portage overlay for earlier stages could cause build issues."
11953 - print "\tIf you break it, you buy it. Don't complain to us about it."
11954 - print "\tDont say we did not warn you\n"
11955 -
11956 - def base_dirs(self):
11957 - if os.uname()[0] == "FreeBSD":
11958 - # baselayout no longer creates the .keep files in proc and dev for FreeBSD as it
11959 - # would create them too late...we need them earlier before bind mounting filesystems
11960 - # since proc and dev are not writeable, so...create them here
11961 - if not os.path.exists(self.settings["stage_path"]+"/proc"):
11962 - os.makedirs(self.settings["stage_path"]+"/proc")
11963 - if not os.path.exists(self.settings["stage_path"]+"/dev"):
11964 - os.makedirs(self.settings["stage_path"]+"/dev")
11965 - if not os.path.isfile(self.settings["stage_path"]+"/proc/.keep"):
11966 - try:
11967 - proc_keepfile = open(self.settings["stage_path"]+"/proc/.keep","w")
11968 - proc_keepfile.write('')
11969 - proc_keepfile.close()
11970 - except IOError:
11971 - print "!!! Failed to create %s" % (self.settings["stage_path"]+"/dev/.keep")
11972 - if not os.path.isfile(self.settings["stage_path"]+"/dev/.keep"):
11973 - try:
11974 - dev_keepfile = open(self.settings["stage_path"]+"/dev/.keep","w")
11975 - dev_keepfile.write('')
11976 - dev_keepfile.close()
11977 - except IOError:
11978 - print "!!! Failed to create %s" % (self.settings["stage_path"]+"/dev/.keep")
11979 - else:
11980 - pass
11981 -
11982 - def set_mounts(self):
11983 - # stage_path/proc probably doesn't exist yet, so create it
11984 - if not os.path.exists(self.settings["stage_path"]+"/proc"):
11985 - os.makedirs(self.settings["stage_path"]+"/proc")
11986 -
11987 - # alter the mount mappings to bind mount proc onto it
11988 - self.mounts.append("/tmp/stage1root/proc")
11989 - self.mountmap["/tmp/stage1root/proc"]="/proc"
11990 -
11991 -def register(foo):
11992 - foo.update({"stage1":stage1_target})
11993 - return foo
11994 diff --git a/modules/stage2_target.py b/modules/stage2_target.py
11995 deleted file mode 100644
11996 index 6083e2b..0000000
11997 --- a/modules/stage2_target.py
11998 +++ /dev/null
11999 @@ -1,62 +0,0 @@
12000 -"""
12001 -stage2 target, builds upon previous stage1 tarball
12002 -"""
12003 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
12004 -
12005 -from catalyst_support import *
12006 -from generic_stage_target import *
12007 -
12008 -class stage2_target(generic_stage_target):
12009 - """
12010 - Builder class for a stage2 installation tarball build.
12011 - """
12012 - def __init__(self,spec,addlargs):
12013 - self.required_values=[]
12014 - self.valid_values=["chost"]
12015 - generic_stage_target.__init__(self,spec,addlargs)
12016 -
12017 - def set_source_path(self):
12018 - if "SEEDCACHE" in self.settings and os.path.isdir(normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/tmp/stage1root/")):
12019 - self.settings["source_path"]=normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/tmp/stage1root/")
12020 - else:
12021 - self.settings["source_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["source_subpath"]+".tar.bz2")
12022 - if os.path.isfile(self.settings["source_path"]):
12023 - if os.path.exists(self.settings["source_path"]):
12024 - # XXX: Is this even necessary if the previous check passes?
12025 - self.settings["source_path_hash"]=generate_hash(self.settings["source_path"],\
12026 - hash_function=self.settings["hash_function"],verbose=False)
12027 - print "Source path set to "+self.settings["source_path"]
12028 - if os.path.isdir(self.settings["source_path"]):
12029 - print "\tIf this is not desired, remove this directory or turn of seedcache in the options of catalyst.conf"
12030 - print "\tthe source path will then be "+normpath(self.settings["storedir"]+"/builds/"+self.settings["source_subpath"]+".tar.bz2\n")
12031 -
12032 - # XXX: How do these override_foo() functions differ from the ones in
12033 - # generic_stage_target and why aren't they in stage3_target?
12034 -
12035 - def override_chost(self):
12036 - if "chost" in self.settings:
12037 - self.settings["CHOST"]=list_to_string(self.settings["chost"])
12038 -
12039 - def override_cflags(self):
12040 - if "cflags" in self.settings:
12041 - self.settings["CFLAGS"]=list_to_string(self.settings["cflags"])
12042 -
12043 - def override_cxxflags(self):
12044 - if "cxxflags" in self.settings:
12045 - self.settings["CXXFLAGS"]=list_to_string(self.settings["cxxflags"])
12046 -
12047 - def override_ldflags(self):
12048 - if "ldflags" in self.settings:
12049 - self.settings["LDFLAGS"]=list_to_string(self.settings["ldflags"])
12050 -
12051 - def set_portage_overlay(self):
12052 - generic_stage_target.set_portage_overlay(self)
12053 - if "portage_overlay" in self.settings:
12054 - print "\nWARNING !!!!!"
12055 - print "\tUsing an portage overlay for earlier stages could cause build issues."
12056 - print "\tIf you break it, you buy it. Don't complain to us about it."
12057 - print "\tDont say we did not warn you\n"
12058 -
12059 -def register(foo):
12060 - foo.update({"stage2":stage2_target})
12061 - return foo
12062 diff --git a/modules/stage3_target.py b/modules/stage3_target.py
12063 deleted file mode 100644
12064 index 4d3a008..0000000
12065 --- a/modules/stage3_target.py
12066 +++ /dev/null
12067 @@ -1,31 +0,0 @@
12068 -"""
12069 -stage3 target, builds upon previous stage2/stage3 tarball
12070 -"""
12071 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
12072 -
12073 -from catalyst_support import *
12074 -from generic_stage_target import *
12075 -
12076 -class stage3_target(generic_stage_target):
12077 - """
12078 - Builder class for a stage3 installation tarball build.
12079 - """
12080 - def __init__(self,spec,addlargs):
12081 - self.required_values=[]
12082 - self.valid_values=[]
12083 - generic_stage_target.__init__(self,spec,addlargs)
12084 -
12085 - def set_portage_overlay(self):
12086 - generic_stage_target.set_portage_overlay(self)
12087 - if "portage_overlay" in self.settings:
12088 - print "\nWARNING !!!!!"
12089 - print "\tUsing an overlay for earlier stages could cause build issues."
12090 - print "\tIf you break it, you buy it. Don't complain to us about it."
12091 - print "\tDont say we did not warn you\n"
12092 -
12093 - def set_cleanables(self):
12094 - generic_stage_target.set_cleanables(self)
12095 -
12096 -def register(foo):
12097 - foo.update({"stage3":stage3_target})
12098 - return foo
12099 diff --git a/modules/stage4_target.py b/modules/stage4_target.py
12100 deleted file mode 100644
12101 index ce41b2d..0000000
12102 --- a/modules/stage4_target.py
12103 +++ /dev/null
12104 @@ -1,43 +0,0 @@
12105 -"""
12106 -stage4 target, builds upon previous stage3/stage4 tarball
12107 -"""
12108 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
12109 -
12110 -from catalyst_support import *
12111 -from generic_stage_target import *
12112 -
12113 -class stage4_target(generic_stage_target):
12114 - """
12115 - Builder class for stage4.
12116 - """
12117 - def __init__(self,spec,addlargs):
12118 - self.required_values=["stage4/packages"]
12119 - self.valid_values=self.required_values[:]
12120 - self.valid_values.extend(["stage4/use","boot/kernel",\
12121 - "stage4/root_overlay","stage4/fsscript",\
12122 - "stage4/gk_mainargs","splash_theme",\
12123 - "portage_overlay","stage4/rcadd","stage4/rcdel",\
12124 - "stage4/linuxrc","stage4/unmerge","stage4/rm","stage4/empty"])
12125 - generic_stage_target.__init__(self,spec,addlargs)
12126 -
12127 - def set_cleanables(self):
12128 - self.settings["cleanables"]=["/var/tmp/*","/tmp/*"]
12129 -
12130 - def set_action_sequence(self):
12131 - self.settings["action_sequence"]=["unpack","unpack_snapshot",\
12132 - "config_profile_link","setup_confdir","portage_overlay",\
12133 - "bind","chroot_setup","setup_environment","build_packages",\
12134 - "build_kernel","bootloader","root_overlay","fsscript",\
12135 - "preclean","rcupdate","unmerge","unbind","remove","empty",\
12136 - "clean"]
12137 -
12138 -# if "TARBALL" in self.settings or \
12139 -# "FETCH" not in self.settings:
12140 - if "FETCH" not in self.settings:
12141 - self.settings["action_sequence"].append("capture")
12142 - self.settings["action_sequence"].append("clear_autoresume")
12143 -
12144 -def register(foo):
12145 - foo.update({"stage4":stage4_target})
12146 - return foo
12147 -
12148 diff --git a/modules/tinderbox_target.py b/modules/tinderbox_target.py
12149 deleted file mode 100644
12150 index d6d3ea3..0000000
12151 --- a/modules/tinderbox_target.py
12152 +++ /dev/null
12153 @@ -1,44 +0,0 @@
12154 -"""
12155 -Tinderbox target
12156 -"""
12157 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
12158 -
12159 -from catalyst_support import *
12160 -from generic_stage_target import *
12161 -
12162 -class tinderbox_target(generic_stage_target):
12163 - """
12164 - Builder class for the tinderbox target
12165 - """
12166 - def __init__(self,spec,addlargs):
12167 - self.required_values=["tinderbox/packages"]
12168 - self.valid_values=self.required_values[:]
12169 - self.valid_values.extend(["tinderbox/use"])
12170 - generic_stage_target.__init__(self,spec,addlargs)
12171 -
12172 - def run_local(self):
12173 - # tinderbox
12174 - # example call: "grp.sh run xmms vim sys-apps/gleep"
12175 - try:
12176 - if os.path.exists(self.settings["controller_file"]):
12177 - cmd("/bin/bash "+self.settings["controller_file"]+" run "+\
12178 - list_bashify(self.settings["tinderbox/packages"]),"run script failed.",env=self.env)
12179 -
12180 - except CatalystError:
12181 - self.unbind()
12182 - raise CatalystError,"Tinderbox aborting due to error."
12183 -
12184 - def set_cleanables(self):
12185 - self.settings["cleanables"]=["/etc/resolv.conf","/var/tmp/*","/root/*",
12186 - self.settings['portdir']]
12187 -
12188 - def set_action_sequence(self):
12189 - #Default action sequence for run method
12190 - self.settings["action_sequence"]=["unpack","unpack_snapshot",\
12191 - "config_profile_link","setup_confdir","bind","chroot_setup",\
12192 - "setup_environment","run_local","preclean","unbind","clean",\
12193 - "clear_autoresume"]
12194 -
12195 -def register(foo):
12196 - foo.update({"tinderbox":tinderbox_target})
12197 - return foo
12198 --
12199 1.8.3.2

Replies