Gentoo Archives: gentoo-catalyst

From: Brian Dolbec <dolsen@g.o>
To: gentoo-catalyst@l.g.o
Cc: Brian Dolbec <dolsen@g.o>
Subject: [gentoo-catalyst] [PATCH 1/5] Initial rearrangement of the python directories
Date: Sun, 12 Jan 2014 01:51:06
Message-Id: 1389491218-1488-2-git-send-email-dolsen@gentoo.org
In Reply to: [gentoo-catalyst] Re-organize the python structure by Brian Dolbec
1 New minimal start script, moving the original catalyst script to catalyst/main.py.
2 Add __init__.py's to modules and arch sub-pkgs.
3 skip __init__.py when loading the modules.
4 Update the module loading paths for the new locations.
5 Fix catalyst_support import to new location and specify imported modules.
6 ---
7 arch/alpha.py | 75 --
8 arch/amd64.py | 83 --
9 arch/arm.py | 133 ---
10 arch/hppa.py | 40 -
11 arch/ia64.py | 16 -
12 arch/mips.py | 464 --------
13 arch/powerpc.py | 124 ---
14 arch/s390.py | 33 -
15 arch/sh.py | 116 --
16 arch/sparc.py | 42 -
17 arch/x86.py | 153 ---
18 bin/catalyst | 46 +
19 catalyst | 419 -------
20 catalyst/__init__.py | 0
21 catalyst/arch/__init__.py | 1 +
22 catalyst/arch/alpha.py | 75 ++
23 catalyst/arch/amd64.py | 83 ++
24 catalyst/arch/arm.py | 133 +++
25 catalyst/arch/hppa.py | 40 +
26 catalyst/arch/ia64.py | 16 +
27 catalyst/arch/mips.py | 464 ++++++++
28 catalyst/arch/powerpc.py | 124 +++
29 catalyst/arch/s390.py | 33 +
30 catalyst/arch/sh.py | 116 ++
31 catalyst/arch/sparc.py | 42 +
32 catalyst/arch/x86.py | 153 +++
33 catalyst/config.py | 122 +++
34 catalyst/main.py | 428 ++++++++
35 catalyst/modules/__init__.py | 1 +
36 catalyst/modules/builder.py | 20 +
37 catalyst/modules/catalyst_lock.py | 468 ++++++++
38 catalyst/modules/catalyst_support.py | 718 ++++++++++++
39 catalyst/modules/embedded_target.py | 51 +
40 catalyst/modules/generic_stage_target.py | 1741 ++++++++++++++++++++++++++++++
41 catalyst/modules/generic_target.py | 11 +
42 catalyst/modules/grp_target.py | 118 ++
43 catalyst/modules/livecd_stage1_target.py | 75 ++
44 catalyst/modules/livecd_stage2_target.py | 148 +++
45 catalyst/modules/netboot2_target.py | 166 +++
46 catalyst/modules/netboot_target.py | 128 +++
47 catalyst/modules/snapshot_target.py | 91 ++
48 catalyst/modules/stage1_target.py | 97 ++
49 catalyst/modules/stage2_target.py | 66 ++
50 catalyst/modules/stage3_target.py | 31 +
51 catalyst/modules/stage4_target.py | 43 +
52 catalyst/modules/tinderbox_target.py | 48 +
53 catalyst/util.py | 14 +
54 modules/__init__.py | 0
55 modules/builder.py | 20 -
56 modules/catalyst/__init__.py | 0
57 modules/catalyst/config.py | 122 ---
58 modules/catalyst/util.py | 14 -
59 modules/catalyst_lock.py | 468 --------
60 modules/catalyst_support.py | 718 ------------
61 modules/embedded_target.py | 51 -
62 modules/generic_stage_target.py | 1740 -----------------------------
63 modules/generic_target.py | 11 -
64 modules/grp_target.py | 118 --
65 modules/livecd_stage1_target.py | 75 --
66 modules/livecd_stage2_target.py | 148 ---
67 modules/netboot2_target.py | 166 ---
68 modules/netboot_target.py | 128 ---
69 modules/snapshot_target.py | 91 --
70 modules/stage1_target.py | 97 --
71 modules/stage2_target.py | 66 --
72 modules/stage3_target.py | 31 -
73 modules/stage4_target.py | 43 -
74 modules/tinderbox_target.py | 48 -
75 68 files changed, 5911 insertions(+), 5853 deletions(-)
76 delete mode 100644 arch/alpha.py
77 delete mode 100644 arch/amd64.py
78 delete mode 100644 arch/arm.py
79 delete mode 100644 arch/hppa.py
80 delete mode 100644 arch/ia64.py
81 delete mode 100644 arch/mips.py
82 delete mode 100644 arch/powerpc.py
83 delete mode 100644 arch/s390.py
84 delete mode 100644 arch/sh.py
85 delete mode 100644 arch/sparc.py
86 delete mode 100644 arch/x86.py
87 create mode 100755 bin/catalyst
88 delete mode 100755 catalyst
89 create mode 100644 catalyst/__init__.py
90 create mode 100644 catalyst/arch/__init__.py
91 create mode 100644 catalyst/arch/alpha.py
92 create mode 100644 catalyst/arch/amd64.py
93 create mode 100644 catalyst/arch/arm.py
94 create mode 100644 catalyst/arch/hppa.py
95 create mode 100644 catalyst/arch/ia64.py
96 create mode 100644 catalyst/arch/mips.py
97 create mode 100644 catalyst/arch/powerpc.py
98 create mode 100644 catalyst/arch/s390.py
99 create mode 100644 catalyst/arch/sh.py
100 create mode 100644 catalyst/arch/sparc.py
101 create mode 100644 catalyst/arch/x86.py
102 create mode 100644 catalyst/config.py
103 create mode 100644 catalyst/main.py
104 create mode 100644 catalyst/modules/__init__.py
105 create mode 100644 catalyst/modules/builder.py
106 create mode 100644 catalyst/modules/catalyst_lock.py
107 create mode 100644 catalyst/modules/catalyst_support.py
108 create mode 100644 catalyst/modules/embedded_target.py
109 create mode 100644 catalyst/modules/generic_stage_target.py
110 create mode 100644 catalyst/modules/generic_target.py
111 create mode 100644 catalyst/modules/grp_target.py
112 create mode 100644 catalyst/modules/livecd_stage1_target.py
113 create mode 100644 catalyst/modules/livecd_stage2_target.py
114 create mode 100644 catalyst/modules/netboot2_target.py
115 create mode 100644 catalyst/modules/netboot_target.py
116 create mode 100644 catalyst/modules/snapshot_target.py
117 create mode 100644 catalyst/modules/stage1_target.py
118 create mode 100644 catalyst/modules/stage2_target.py
119 create mode 100644 catalyst/modules/stage3_target.py
120 create mode 100644 catalyst/modules/stage4_target.py
121 create mode 100644 catalyst/modules/tinderbox_target.py
122 create mode 100644 catalyst/util.py
123 delete mode 100644 modules/__init__.py
124 delete mode 100644 modules/builder.py
125 delete mode 100644 modules/catalyst/__init__.py
126 delete mode 100644 modules/catalyst/config.py
127 delete mode 100644 modules/catalyst/util.py
128 delete mode 100644 modules/catalyst_lock.py
129 delete mode 100644 modules/catalyst_support.py
130 delete mode 100644 modules/embedded_target.py
131 delete mode 100644 modules/generic_stage_target.py
132 delete mode 100644 modules/generic_target.py
133 delete mode 100644 modules/grp_target.py
134 delete mode 100644 modules/livecd_stage1_target.py
135 delete mode 100644 modules/livecd_stage2_target.py
136 delete mode 100644 modules/netboot2_target.py
137 delete mode 100644 modules/netboot_target.py
138 delete mode 100644 modules/snapshot_target.py
139 delete mode 100644 modules/stage1_target.py
140 delete mode 100644 modules/stage2_target.py
141 delete mode 100644 modules/stage3_target.py
142 delete mode 100644 modules/stage4_target.py
143 delete mode 100644 modules/tinderbox_target.py
144
145 diff --git a/arch/alpha.py b/arch/alpha.py
146 deleted file mode 100644
147 index f0fc95a..0000000
148 --- a/arch/alpha.py
149 +++ /dev/null
150 @@ -1,75 +0,0 @@
151 -
152 -import builder,os
153 -from catalyst_support import *
154 -
155 -class generic_alpha(builder.generic):
156 - "abstract base class for all alpha builders"
157 - def __init__(self,myspec):
158 - builder.generic.__init__(self,myspec)
159 - self.settings["CHROOT"]="chroot"
160 - self.settings["CFLAGS"]="-mieee -pipe"
161 -
162 -class arch_alpha(generic_alpha):
163 - "builder class for generic alpha (ev4+)"
164 - def __init__(self,myspec):
165 - generic_alpha.__init__(self,myspec)
166 - self.settings["CFLAGS"]+=" -O2 -mcpu=ev4"
167 - self.settings["CHOST"]="alpha-unknown-linux-gnu"
168 -
169 -class arch_ev4(generic_alpha):
170 - "builder class for alpha ev4"
171 - def __init__(self,myspec):
172 - generic_alpha.__init__(self,myspec)
173 - self.settings["CFLAGS"]+=" -O2 -mcpu=ev4"
174 - self.settings["CHOST"]="alphaev4-unknown-linux-gnu"
175 -
176 -class arch_ev45(generic_alpha):
177 - "builder class for alpha ev45"
178 - def __init__(self,myspec):
179 - generic_alpha.__init__(self,myspec)
180 - self.settings["CFLAGS"]+=" -O2 -mcpu=ev45"
181 - self.settings["CHOST"]="alphaev45-unknown-linux-gnu"
182 -
183 -class arch_ev5(generic_alpha):
184 - "builder class for alpha ev5"
185 - def __init__(self,myspec):
186 - generic_alpha.__init__(self,myspec)
187 - self.settings["CFLAGS"]+=" -O2 -mcpu=ev5"
188 - self.settings["CHOST"]="alphaev5-unknown-linux-gnu"
189 -
190 -class arch_ev56(generic_alpha):
191 - "builder class for alpha ev56 (ev5 plus BWX)"
192 - def __init__(self,myspec):
193 - generic_alpha.__init__(self,myspec)
194 - self.settings["CFLAGS"]+=" -O2 -mcpu=ev56"
195 - self.settings["CHOST"]="alphaev56-unknown-linux-gnu"
196 -
197 -class arch_pca56(generic_alpha):
198 - "builder class for alpha pca56 (ev5 plus BWX & MAX)"
199 - def __init__(self,myspec):
200 - generic_alpha.__init__(self,myspec)
201 - self.settings["CFLAGS"]+=" -O2 -mcpu=pca56"
202 - self.settings["CHOST"]="alphaev56-unknown-linux-gnu"
203 -
204 -class arch_ev6(generic_alpha):
205 - "builder class for alpha ev6"
206 - def __init__(self,myspec):
207 - generic_alpha.__init__(self,myspec)
208 - self.settings["CFLAGS"]+=" -O2 -mcpu=ev6"
209 - self.settings["CHOST"]="alphaev6-unknown-linux-gnu"
210 - self.settings["HOSTUSE"]=["ev6"]
211 -
212 -class arch_ev67(generic_alpha):
213 - "builder class for alpha ev67 (ev6 plus CIX)"
214 - def __init__(self,myspec):
215 - generic_alpha.__init__(self,myspec)
216 - self.settings["CFLAGS"]+=" -O2 -mcpu=ev67"
217 - self.settings["CHOST"]="alphaev67-unknown-linux-gnu"
218 - self.settings["HOSTUSE"]=["ev6"]
219 -
220 -def register():
221 - "Inform main catalyst program of the contents of this plugin."
222 - return ({ "alpha":arch_alpha, "ev4":arch_ev4, "ev45":arch_ev45,
223 - "ev5":arch_ev5, "ev56":arch_ev56, "pca56":arch_pca56,
224 - "ev6":arch_ev6, "ev67":arch_ev67 },
225 - ("alpha", ))
226 diff --git a/arch/amd64.py b/arch/amd64.py
227 deleted file mode 100644
228 index 262b55a..0000000
229 --- a/arch/amd64.py
230 +++ /dev/null
231 @@ -1,83 +0,0 @@
232 -
233 -import builder
234 -
235 -class generic_amd64(builder.generic):
236 - "abstract base class for all amd64 builders"
237 - def __init__(self,myspec):
238 - builder.generic.__init__(self,myspec)
239 - self.settings["CHROOT"]="chroot"
240 -
241 -class arch_amd64(generic_amd64):
242 - "builder class for generic amd64 (Intel and AMD)"
243 - def __init__(self,myspec):
244 - generic_amd64.__init__(self,myspec)
245 - self.settings["CFLAGS"]="-O2 -pipe"
246 - self.settings["CHOST"]="x86_64-pc-linux-gnu"
247 - self.settings["HOSTUSE"]=["mmx","sse","sse2"]
248 -
249 -class arch_nocona(generic_amd64):
250 - "improved version of Intel Pentium 4 CPU with 64-bit extensions, MMX, SSE, SSE2 and SSE3 support"
251 - def __init__(self,myspec):
252 - generic_amd64.__init__(self,myspec)
253 - self.settings["CFLAGS"]="-O2 -march=nocona -pipe"
254 - self.settings["CHOST"]="x86_64-pc-linux-gnu"
255 - self.settings["HOSTUSE"]=["mmx","sse","sse2"]
256 -
257 -# Requires gcc 4.3 to use this class
258 -class arch_core2(generic_amd64):
259 - "Intel Core 2 CPU with 64-bit extensions, MMX, SSE, SSE2, SSE3 and SSSE3 support"
260 - def __init__(self,myspec):
261 - generic_amd64.__init__(self,myspec)
262 - self.settings["CFLAGS"]="-O2 -march=core2 -pipe"
263 - self.settings["CHOST"]="x86_64-pc-linux-gnu"
264 - self.settings["HOSTUSE"]=["mmx","sse","sse2","ssse3"]
265 -
266 -class arch_k8(generic_amd64):
267 - "generic k8, opteron and athlon64 support"
268 - def __init__(self,myspec):
269 - generic_amd64.__init__(self,myspec)
270 - self.settings["CFLAGS"]="-O2 -march=k8 -pipe"
271 - self.settings["CHOST"]="x86_64-pc-linux-gnu"
272 - self.settings["HOSTUSE"]=["mmx","sse","sse2","3dnow"]
273 -
274 -class arch_k8_sse3(generic_amd64):
275 - "improved versions of k8, opteron and athlon64 with SSE3 support"
276 - def __init__(self,myspec):
277 - generic_amd64.__init__(self,myspec)
278 - self.settings["CFLAGS"]="-O2 -march=k8-sse3 -pipe"
279 - self.settings["CHOST"]="x86_64-pc-linux-gnu"
280 - self.settings["HOSTUSE"]=["mmx","sse","sse2","3dnow"]
281 -
282 -class arch_amdfam10(generic_amd64):
283 - "AMD Family 10h core based CPUs with x86-64 instruction set support"
284 - def __init__(self,myspec):
285 - generic_amd64.__init__(self,myspec)
286 - self.settings["CFLAGS"]="-O2 -march=amdfam10 -pipe"
287 - self.settings["CHOST"]="x86_64-pc-linux-gnu"
288 - self.settings["HOSTUSE"]=["mmx","sse","sse2","3dnow"]
289 -
290 -class arch_x32(generic_amd64):
291 - "builder class for generic x32 (Intel and AMD)"
292 - def __init__(self,myspec):
293 - generic_amd64.__init__(self,myspec)
294 - self.settings["CFLAGS"]="-O2 -pipe"
295 - self.settings["CHOST"]="x86_64-pc-linux-gnux32"
296 - self.settings["HOSTUSE"]=["mmx","sse","sse2"]
297 -
298 -def register():
299 - "inform main catalyst program of the contents of this plugin"
300 - return ({
301 - "amd64" : arch_amd64,
302 - "k8" : arch_k8,
303 - "opteron" : arch_k8,
304 - "athlon64" : arch_k8,
305 - "athlonfx" : arch_k8,
306 - "nocona" : arch_nocona,
307 - "core2" : arch_core2,
308 - "k8-sse3" : arch_k8_sse3,
309 - "opteron-sse3" : arch_k8_sse3,
310 - "athlon64-sse3" : arch_k8_sse3,
311 - "amdfam10" : arch_amdfam10,
312 - "barcelona" : arch_amdfam10,
313 - "x32" : arch_x32,
314 - }, ("x86_64","amd64","nocona"))
315 diff --git a/arch/arm.py b/arch/arm.py
316 deleted file mode 100644
317 index 2de3942..0000000
318 --- a/arch/arm.py
319 +++ /dev/null
320 @@ -1,133 +0,0 @@
321 -
322 -import builder,os
323 -from catalyst_support import *
324 -
325 -class generic_arm(builder.generic):
326 - "Abstract base class for all arm (little endian) builders"
327 - def __init__(self,myspec):
328 - builder.generic.__init__(self,myspec)
329 - self.settings["CHROOT"]="chroot"
330 - self.settings["CFLAGS"]="-O2 -pipe"
331 -
332 -class generic_armeb(builder.generic):
333 - "Abstract base class for all arm (big endian) builders"
334 - def __init__(self,myspec):
335 - builder.generic.__init__(self,myspec)
336 - self.settings["CHROOT"]="chroot"
337 - self.settings["CFLAGS"]="-O2 -pipe"
338 -
339 -class arch_arm(generic_arm):
340 - "Builder class for arm (little endian) target"
341 - def __init__(self,myspec):
342 - generic_arm.__init__(self,myspec)
343 - self.settings["CHOST"]="arm-unknown-linux-gnu"
344 -
345 -class arch_armeb(generic_armeb):
346 - "Builder class for arm (big endian) target"
347 - def __init__(self,myspec):
348 - generic_armeb.__init__(self,myspec)
349 - self.settings["CHOST"]="armeb-unknown-linux-gnu"
350 -
351 -class arch_armv4l(generic_arm):
352 - "Builder class for armv4l target"
353 - def __init__(self,myspec):
354 - generic_arm.__init__(self,myspec)
355 - self.settings["CHOST"]="armv4l-unknown-linux-gnu"
356 - self.settings["CFLAGS"]+=" -march=armv4"
357 -
358 -class arch_armv4tl(generic_arm):
359 - "Builder class for armv4tl target"
360 - def __init__(self,myspec):
361 - generic_arm.__init__(self,myspec)
362 - self.settings["CHOST"]="armv4tl-softfloat-linux-gnueabi"
363 - self.settings["CFLAGS"]+=" -march=armv4t"
364 -
365 -class arch_armv5tl(generic_arm):
366 - "Builder class for armv5tl target"
367 - def __init__(self,myspec):
368 - generic_arm.__init__(self,myspec)
369 - self.settings["CHOST"]="armv5tl-softfloat-linux-gnueabi"
370 - self.settings["CFLAGS"]+=" -march=armv5t"
371 -
372 -class arch_armv5tel(generic_arm):
373 - "Builder class for armv5tel target"
374 - def __init__(self,myspec):
375 - generic_arm.__init__(self,myspec)
376 - self.settings["CHOST"]="armv5tel-softfloat-linux-gnueabi"
377 - self.settings["CFLAGS"]+=" -march=armv5te"
378 -
379 -class arch_armv5tejl(generic_arm):
380 - "Builder class for armv5tejl target"
381 - def __init__(self,myspec):
382 - generic_arm.__init__(self,myspec)
383 - self.settings["CHOST"]="armv5tejl-softfloat-linux-gnueabi"
384 - self.settings["CFLAGS"]+=" -march=armv5te"
385 -
386 -class arch_armv6j(generic_arm):
387 - "Builder class for armv6j target"
388 - def __init__(self,myspec):
389 - generic_arm.__init__(self,myspec)
390 - self.settings["CHOST"]="armv6j-softfp-linux-gnueabi"
391 - self.settings["CFLAGS"]+=" -march=armv6j -mfpu=vfp -mfloat-abi=softfp"
392 -
393 -class arch_armv6z(generic_arm):
394 - "Builder class for armv6z target"
395 - def __init__(self,myspec):
396 - generic_arm.__init__(self,myspec)
397 - self.settings["CHOST"]="armv6z-softfp-linux-gnueabi"
398 - self.settings["CFLAGS"]+=" -march=armv6z -mfpu=vfp -mfloat-abi=softfp"
399 -
400 -class arch_armv6zk(generic_arm):
401 - "Builder class for armv6zk target"
402 - def __init__(self,myspec):
403 - generic_arm.__init__(self,myspec)
404 - self.settings["CHOST"]="armv6zk-softfp-linux-gnueabi"
405 - self.settings["CFLAGS"]+=" -march=armv6zk -mfpu=vfp -mfloat-abi=softfp"
406 -
407 -class arch_armv7a(generic_arm):
408 - "Builder class for armv7a target"
409 - def __init__(self,myspec):
410 - generic_arm.__init__(self,myspec)
411 - self.settings["CHOST"]="armv7a-softfp-linux-gnueabi"
412 - self.settings["CFLAGS"]+=" -march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=softfp"
413 -
414 -class arch_armv6j_hardfp(generic_arm):
415 - "Builder class for armv6j hardfloat target, needs >=gcc-4.5"
416 - def __init__(self,myspec):
417 - generic_arm.__init__(self,myspec)
418 - self.settings["CHOST"]="armv6j-hardfloat-linux-gnueabi"
419 - self.settings["CFLAGS"]+=" -march=armv6j -mfpu=vfp -mfloat-abi=hard"
420 -
421 -class arch_armv7a_hardfp(generic_arm):
422 - "Builder class for armv7a hardfloat target, needs >=gcc-4.5"
423 - def __init__(self,myspec):
424 - generic_arm.__init__(self,myspec)
425 - self.settings["CHOST"]="armv7a-hardfloat-linux-gnueabi"
426 - self.settings["CFLAGS"]+=" -march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=hard"
427 -
428 -class arch_armv5teb(generic_armeb):
429 - "Builder class for armv5teb (XScale) target"
430 - def __init__(self,myspec):
431 - generic_armeb.__init__(self,myspec)
432 - self.settings["CFLAGS"]+=" -mcpu=xscale"
433 - self.settings["CHOST"]="armv5teb-softfloat-linux-gnueabi"
434 -
435 -def register():
436 - "Inform main catalyst program of the contents of this plugin."
437 - return ({
438 - "arm" : arch_arm,
439 - "armv4l" : arch_armv4l,
440 - "armv4tl": arch_armv4tl,
441 - "armv5tl": arch_armv5tl,
442 - "armv5tel": arch_armv5tel,
443 - "armv5tejl": arch_armv5tejl,
444 - "armv6j" : arch_armv6j,
445 - "armv6z" : arch_armv6z,
446 - "armv6zk" : arch_armv6zk,
447 - "armv7a" : arch_armv7a,
448 - "armv6j_hardfp" : arch_armv6j_hardfp,
449 - "armv7a_hardfp" : arch_armv7a_hardfp,
450 - "armeb" : arch_armeb,
451 - "armv5teb" : arch_armv5teb
452 - }, ("arm", "armv4l", "armv4tl", "armv5tl", "armv5tel", "armv5tejl", "armv6l",
453 -"armv7l", "armeb", "armv5teb") )
454 diff --git a/arch/hppa.py b/arch/hppa.py
455 deleted file mode 100644
456 index f804398..0000000
457 --- a/arch/hppa.py
458 +++ /dev/null
459 @@ -1,40 +0,0 @@
460 -
461 -import builder,os
462 -from catalyst_support import *
463 -
464 -class generic_hppa(builder.generic):
465 - "Abstract base class for all hppa builders"
466 - def __init__(self,myspec):
467 - builder.generic.__init__(self,myspec)
468 - self.settings["CHROOT"]="chroot"
469 - self.settings["CFLAGS"]="-O2 -pipe"
470 - self.settings["CXXFLAGS"]="-O2 -pipe"
471 -
472 -class arch_hppa(generic_hppa):
473 - "Builder class for hppa systems"
474 - def __init__(self,myspec):
475 - generic_hppa.__init__(self,myspec)
476 - self.settings["CFLAGS"]+=" -march=1.0"
477 - self.settings["CHOST"]="hppa-unknown-linux-gnu"
478 -
479 -class arch_hppa1_1(generic_hppa):
480 - "Builder class for hppa 1.1 systems"
481 - def __init__(self,myspec):
482 - generic_hppa.__init__(self,myspec)
483 - self.settings["CFLAGS"]+=" -march=1.1"
484 - self.settings["CHOST"]="hppa1.1-unknown-linux-gnu"
485 -
486 -class arch_hppa2_0(generic_hppa):
487 - "Builder class for hppa 2.0 systems"
488 - def __init__(self,myspec):
489 - generic_hppa.__init__(self,myspec)
490 - self.settings["CFLAGS"]+=" -march=2.0"
491 - self.settings["CHOST"]="hppa2.0-unknown-linux-gnu"
492 -
493 -def register():
494 - "Inform main catalyst program of the contents of this plugin."
495 - return ({
496 - "hppa": arch_hppa,
497 - "hppa1.1": arch_hppa1_1,
498 - "hppa2.0": arch_hppa2_0
499 - }, ("parisc","parisc64","hppa","hppa64") )
500 diff --git a/arch/ia64.py b/arch/ia64.py
501 deleted file mode 100644
502 index 825af70..0000000
503 --- a/arch/ia64.py
504 +++ /dev/null
505 @@ -1,16 +0,0 @@
506 -
507 -import builder,os
508 -from catalyst_support import *
509 -
510 -class arch_ia64(builder.generic):
511 - "builder class for ia64"
512 - def __init__(self,myspec):
513 - builder.generic.__init__(self,myspec)
514 - self.settings["CHROOT"]="chroot"
515 - self.settings["CFLAGS"]="-O2 -pipe"
516 - self.settings["CFLAGS"]="-O2 -pipe"
517 - self.settings["CHOST"]="ia64-unknown-linux-gnu"
518 -
519 -def register():
520 - "Inform main catalyst program of the contents of this plugin."
521 - return ({ "ia64":arch_ia64 }, ("ia64", ))
522 diff --git a/arch/mips.py b/arch/mips.py
523 deleted file mode 100644
524 index b3730fa..0000000
525 --- a/arch/mips.py
526 +++ /dev/null
527 @@ -1,464 +0,0 @@
528 -
529 -import builder,os
530 -from catalyst_support import *
531 -
532 -class generic_mips(builder.generic):
533 - "Abstract base class for all mips builders [Big-endian]"
534 - def __init__(self,myspec):
535 - builder.generic.__init__(self,myspec)
536 - self.settings["CHROOT"]="chroot"
537 - self.settings["CHOST"]="mips-unknown-linux-gnu"
538 -
539 -class generic_mipsel(builder.generic):
540 - "Abstract base class for all mipsel builders [Little-endian]"
541 - def __init__(self,myspec):
542 - builder.generic.__init__(self,myspec)
543 - self.settings["CHROOT"]="chroot"
544 - self.settings["CHOST"]="mipsel-unknown-linux-gnu"
545 -
546 -class generic_mips64(builder.generic):
547 - "Abstract base class for all mips64 builders [Big-endian]"
548 - def __init__(self,myspec):
549 - builder.generic.__init__(self,myspec)
550 - self.settings["CHROOT"]="chroot"
551 - self.settings["CHOST"]="mips64-unknown-linux-gnu"
552 -
553 -class generic_mips64el(builder.generic):
554 - "Abstract base class for all mips64el builders [Little-endian]"
555 - def __init__(self,myspec):
556 - builder.generic.__init__(self,myspec)
557 - self.settings["CHROOT"]="chroot"
558 - self.settings["CHOST"]="mips64el-unknown-linux-gnu"
559 -
560 -class arch_mips1(generic_mips):
561 - "Builder class for MIPS I [Big-endian]"
562 - def __init__(self,myspec):
563 - generic_mips.__init__(self,myspec)
564 - self.settings["CFLAGS"]="-O2 -march=mips1 -mabi=32 -mplt -pipe"
565 -
566 -class arch_mips32(generic_mips):
567 - "Builder class for MIPS 32 [Big-endian]"
568 - def __init__(self,myspec):
569 - generic_mips.__init__(self,myspec)
570 - self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
571 -
572 -class arch_mips32_softfloat(generic_mips):
573 - "Builder class for MIPS 32 [Big-endian softfloat]"
574 - def __init__(self,myspec):
575 - generic_mips.__init__(self,myspec)
576 - self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
577 - self.settings["CHOST"]="mips-softfloat-linux-gnu"
578 -
579 -class arch_mips32r2(generic_mips):
580 - "Builder class for MIPS 32r2 [Big-endian]"
581 - def __init__(self,myspec):
582 - generic_mips.__init__(self,myspec)
583 - self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
584 -
585 -class arch_mips32r2_softfloat(generic_mips):
586 - "Builder class for MIPS 32r2 [Big-endian softfloat]"
587 - def __init__(self,myspec):
588 - generic_mips.__init__(self,myspec)
589 - self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
590 - self.settings["CHOST"]="mips-softfloat-linux-gnu"
591 -
592 -class arch_mips3(generic_mips):
593 - "Builder class for MIPS III [Big-endian]"
594 - def __init__(self,myspec):
595 - generic_mips.__init__(self,myspec)
596 - self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=32 -mplt -mfix-r4000 -mfix-r4400 -pipe"
597 -
598 -class arch_mips3_n32(generic_mips64):
599 - "Builder class for MIPS III [Big-endian N32]"
600 - def __init__(self,myspec):
601 - generic_mips64.__init__(self,myspec)
602 - self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=n32 -mplt -mfix-r4000 -mfix-r4400 -pipe"
603 -
604 -class arch_mips3_n64(generic_mips64):
605 - "Builder class for MIPS III [Big-endian N64]"
606 - def __init__(self,myspec):
607 - generic_mips64.__init__(self,myspec)
608 - self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=64 -mfix-r4000 -mfix-r4400 -pipe"
609 -
610 -class arch_mips3_multilib(generic_mips64):
611 - "Builder class for MIPS III [Big-endian multilib]"
612 - def __init__(self,myspec):
613 - generic_mips64.__init__(self,myspec)
614 - self.settings["CFLAGS"]="-O2 -march=mips3 -mplt -mfix-r4000 -mfix-r4400 -pipe"
615 -
616 -class arch_mips4(generic_mips):
617 - "Builder class for MIPS IV [Big-endian]"
618 - def __init__(self,myspec):
619 - generic_mips.__init__(self,myspec)
620 - self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=32 -mplt -pipe"
621 -
622 -class arch_mips4_n32(generic_mips64):
623 - "Builder class for MIPS IV [Big-endian N32]"
624 - def __init__(self,myspec):
625 - generic_mips64.__init__(self,myspec)
626 - self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=n32 -mplt -pipe"
627 -
628 -class arch_mips4_n64(generic_mips64):
629 - "Builder class for MIPS IV [Big-endian N64]"
630 - def __init__(self,myspec):
631 - generic_mips64.__init__(self,myspec)
632 - self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=64 -pipe"
633 -
634 -class arch_mips4_multilib(generic_mips64):
635 - "Builder class for MIPS IV [Big-endian multilib]"
636 - def __init__(self,myspec):
637 - generic_mips64.__init__(self,myspec)
638 - self.settings["CFLAGS"]="-O2 -march=mips4 -mplt -pipe"
639 -
640 -class arch_mips4_r10k(generic_mips):
641 - "Builder class for MIPS IV R10k [Big-endian]"
642 - def __init__(self,myspec):
643 - generic_mips.__init__(self,myspec)
644 - self.settings["CFLAGS"]="-O2 -march=r10k -mabi=32 -mplt -pipe"
645 -
646 -class arch_mips4_r10k_n32(generic_mips64):
647 - "Builder class for MIPS IV R10k [Big-endian N32]"
648 - def __init__(self,myspec):
649 - generic_mips64.__init__(self,myspec)
650 - self.settings["CFLAGS"]="-O2 -march=r10k -mabi=n32 -mplt -pipe"
651 -
652 -class arch_mips4_r10k_n64(generic_mips64):
653 - "Builder class for MIPS IV R10k [Big-endian N64]"
654 - def __init__(self,myspec):
655 - generic_mips64.__init__(self,myspec)
656 - self.settings["CFLAGS"]="-O2 -march=r10k -mabi=64 -pipe"
657 -
658 -class arch_mips4_r10k_multilib(generic_mips64):
659 - "Builder class for MIPS IV R10k [Big-endian multilib]"
660 - def __init__(self,myspec):
661 - generic_mips64.__init__(self,myspec)
662 - self.settings["CFLAGS"]="-O2 -march=r10k -mplt -pipe"
663 -
664 -class arch_mips64(generic_mips):
665 - "Builder class for MIPS 64 [Big-endian]"
666 - def __init__(self,myspec):
667 - generic_mips.__init__(self,myspec)
668 - self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=32 -mplt -pipe"
669 -
670 -class arch_mips64_n32(generic_mips64):
671 - "Builder class for MIPS 64 [Big-endian N32]"
672 - def __init__(self,myspec):
673 - generic_mips64.__init__(self,myspec)
674 - self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=n32 -mplt -pipe"
675 -
676 -class arch_mips64_n64(generic_mips64):
677 - "Builder class for MIPS 64 [Big-endian N64]"
678 - def __init__(self,myspec):
679 - generic_mips64.__init__(self,myspec)
680 - self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=64 -pipe"
681 -
682 -class arch_mips64_multilib(generic_mips64):
683 - "Builder class for MIPS 64 [Big-endian multilib]"
684 - def __init__(self,myspec):
685 - generic_mips64.__init__(self,myspec)
686 - self.settings["CFLAGS"]="-O2 -march=mips64 -mplt -pipe"
687 -
688 -class arch_mips64r2(generic_mips):
689 - "Builder class for MIPS 64r2 [Big-endian]"
690 - def __init__(self,myspec):
691 - generic_mips.__init__(self,myspec)
692 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=32 -mplt -pipe"
693 -
694 -class arch_mips64r2_n32(generic_mips64):
695 - "Builder class for MIPS 64r2 [Big-endian N32]"
696 - def __init__(self,myspec):
697 - generic_mips64.__init__(self,myspec)
698 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=n32 -mplt -pipe"
699 -
700 -class arch_mips64r2_n64(generic_mips64):
701 - "Builder class for MIPS 64r2 [Big-endian N64]"
702 - def __init__(self,myspec):
703 - generic_mips64.__init__(self,myspec)
704 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=64 -pipe"
705 -
706 -class arch_mips64r2_multilib(generic_mips64):
707 - "Builder class for MIPS 64r2 [Big-endian multilib]"
708 - def __init__(self,myspec):
709 - generic_mips64.__init__(self,myspec)
710 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mplt -pipe"
711 -
712 -class arch_mipsel1(generic_mipsel):
713 - "Builder class for MIPS I [Little-endian]"
714 - def __init__(self,myspec):
715 - generic_mipsel.__init__(self,myspec)
716 - self.settings["CFLAGS"]="-O2 -march=mips1 -mabi=32 -mplt -pipe"
717 -
718 -class arch_mips32el(generic_mipsel):
719 - "Builder class for MIPS 32 [Little-endian]"
720 - def __init__(self,myspec):
721 - generic_mipsel.__init__(self,myspec)
722 - self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
723 -
724 -class arch_mips32el_softfloat(generic_mipsel):
725 - "Builder class for MIPS 32 [Little-endian softfloat]"
726 - def __init__(self,myspec):
727 - generic_mipsel.__init__(self,myspec)
728 - self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
729 - self.settings["CHOST"]="mipsel-softfloat-linux-gnu"
730 -
731 -class arch_mips32r2el(generic_mipsel):
732 - "Builder class for MIPS 32r2 [Little-endian]"
733 - def __init__(self,myspec):
734 - generic_mipsel.__init__(self,myspec)
735 - self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
736 -
737 -class arch_mips32r2el_softfloat(generic_mipsel):
738 - "Builder class for MIPS 32r2 [Little-endian softfloat]"
739 - def __init__(self,myspec):
740 - generic_mipsel.__init__(self,myspec)
741 - self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
742 - self.settings["CHOST"]="mipsel-softfloat-linux-gnu"
743 -
744 -class arch_mipsel3(generic_mipsel):
745 - "Builder class for MIPS III [Little-endian]"
746 - def __init__(self,myspec):
747 - generic_mipsel.__init__(self,myspec)
748 - self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
749 -
750 -class arch_mipsel3_n32(generic_mips64el):
751 - "Builder class for MIPS III [Little-endian N32]"
752 - def __init__(self,myspec):
753 - generic_mips64el.__init__(self,myspec)
754 - self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=n32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
755 -
756 -class arch_mipsel3_n64(generic_mips64el):
757 - "Builder class for MIPS III [Little-endian N64]"
758 - def __init__(self,myspec):
759 - generic_mips64el.__init__(self,myspec)
760 - self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=64 -Wa,-mfix-loongson2f-nop -pipe"
761 -
762 -class arch_mipsel3_multilib(generic_mips64el):
763 - "Builder class for MIPS III [Little-endian multilib]"
764 - def __init__(self,myspec):
765 - generic_mips64el.__init__(self,myspec)
766 - self.settings["CFLAGS"]="-O2 -march=mips3 -mplt -Wa,-mfix-loongson2f-nop -pipe"
767 -
768 -class arch_loongson2e(generic_mipsel):
769 - "Builder class for Loongson 2E [Little-endian]"
770 - def __init__(self,myspec):
771 - generic_mipsel.__init__(self,myspec)
772 - self.settings["CFLAGS"]="-O2 -march=loongson2e -mabi=32 -mplt -pipe"
773 -
774 -class arch_loongson2e_n32(generic_mips64el):
775 - "Builder class for Loongson 2E [Little-endian N32]"
776 - def __init__(self,myspec):
777 - generic_mips64el.__init__(self,myspec)
778 - self.settings["CFLAGS"]="-O2 -march=loongson2e -mabi=n32 -mplt -pipe"
779 -
780 -class arch_loongson2e_n64(generic_mips64el):
781 - "Builder class for Loongson 2E [Little-endian N64]"
782 - def __init__(self,myspec):
783 - generic_mips64el.__init__(self,myspec)
784 - self.settings["CFLAGS"]="-O2 -march=loongson2e -mabi=64 -pipe"
785 -
786 -class arch_loongson2e_multilib(generic_mips64el):
787 - "Builder class for Loongson 2E [Little-endian multilib]"
788 - def __init__(self,myspec):
789 - generic_mips64el.__init__(self,myspec)
790 - self.settings["CFLAGS"]="-O2 -march=loongson2e -mplt -pipe"
791 -
792 -class arch_loongson2f(generic_mipsel):
793 - "Builder class for Loongson 2F [Little-endian]"
794 - def __init__(self,myspec):
795 - generic_mipsel.__init__(self,myspec)
796 - self.settings["CFLAGS"]="-O2 -march=loongson2f -mabi=32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
797 -
798 -class arch_loongson2f_n32(generic_mips64el):
799 - "Builder class for Loongson 2F [Little-endian N32]"
800 - def __init__(self,myspec):
801 - generic_mips64el.__init__(self,myspec)
802 - self.settings["CFLAGS"]="-O2 -march=loongson2f -mabi=n32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
803 -
804 -class arch_loongson2f_n64(generic_mips64el):
805 - "Builder class for Loongson 2F [Little-endian N64]"
806 - def __init__(self,myspec):
807 - generic_mips64el.__init__(self,myspec)
808 - self.settings["CFLAGS"]="-O2 -march=loongson2f -mabi=64 -Wa,-mfix-loongson2f-nop -pipe"
809 -
810 -class arch_loongson2f_multilib(generic_mips64el):
811 - "Builder class for Loongson 2F [Little-endian multilib]"
812 - def __init__(self,myspec):
813 - generic_mips64el.__init__(self,myspec)
814 - self.settings["CFLAGS"]="-O2 -march=loongson2f -mplt -Wa,-mfix-loongson2f-nop -pipe"
815 -
816 -class arch_mipsel4(generic_mipsel):
817 - "Builder class for MIPS IV [Little-endian]"
818 - def __init__(self,myspec):
819 - generic_mipsel.__init__(self,myspec)
820 - self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=32 -mplt -pipe"
821 -
822 -class arch_mipsel4_n32(generic_mips64el):
823 - "Builder class for MIPS IV [Little-endian N32]"
824 - def __init__(self,myspec):
825 - generic_mips64el.__init__(self,myspec)
826 - self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=n32 -mplt -pipe"
827 -
828 -class arch_mipsel4_n64(generic_mips64el):
829 - "Builder class for MIPS IV [Little-endian N64]"
830 - def __init__(self,myspec):
831 - generic_mips64el.__init__(self,myspec)
832 - self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=64 -pipe"
833 -
834 -class arch_mipsel4_multilib(generic_mips64el):
835 - "Builder class for MIPS IV [Little-endian multilib]"
836 - def __init__(self,myspec):
837 - generic_mips64el.__init__(self,myspec)
838 - self.settings["CFLAGS"]="-O2 -march=mips4 -mplt -pipe"
839 -
840 -class arch_mips64el(generic_mipsel):
841 - "Builder class for MIPS 64 [Little-endian]"
842 - def __init__(self,myspec):
843 - generic_mipsel.__init__(self,myspec)
844 - self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=32 -mplt -pipe"
845 -
846 -class arch_mips64el_n32(generic_mips64el):
847 - "Builder class for MIPS 64 [Little-endian N32]"
848 - def __init__(self,myspec):
849 - generic_mips64el.__init__(self,myspec)
850 - self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=n32 -mplt -pipe"
851 -
852 -class arch_mips64el_n64(generic_mips64el):
853 - "Builder class for MIPS 64 [Little-endian N64]"
854 - def __init__(self,myspec):
855 - generic_mips64el.__init__(self,myspec)
856 - self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=64 -pipe"
857 -
858 -class arch_mips64el_multilib(generic_mips64el):
859 - "Builder class for MIPS 64 [Little-endian multilib]"
860 - def __init__(self,myspec):
861 - generic_mips64el.__init__(self,myspec)
862 - self.settings["CFLAGS"]="-O2 -march=mips64 -mplt -pipe"
863 -
864 -class arch_mips64r2el(generic_mipsel):
865 - "Builder class for MIPS 64r2 [Little-endian]"
866 - def __init__(self,myspec):
867 - generic_mipsel.__init__(self,myspec)
868 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=32 -mplt -pipe"
869 -
870 -class arch_mips64r2el_n32(generic_mips64el):
871 - "Builder class for MIPS 64r2 [Little-endian N32]"
872 - def __init__(self,myspec):
873 - generic_mips64el.__init__(self,myspec)
874 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=n32 -mplt -pipe"
875 -
876 -class arch_mips64r2el_n64(generic_mips64el):
877 - "Builder class for MIPS 64r2 [Little-endian N64]"
878 - def __init__(self,myspec):
879 - generic_mips64el.__init__(self,myspec)
880 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=64 -pipe"
881 -
882 -class arch_mips64r2el_multilib(generic_mips64el):
883 - "Builder class for MIPS 64r2 [Little-endian multilib]"
884 - def __init__(self,myspec):
885 - generic_mips64el.__init__(self,myspec)
886 - self.settings["CFLAGS"]="-O2 -march=mips64r2 -mplt -pipe"
887 -
888 -class arch_loongson3a(generic_mipsel):
889 - "Builder class for Loongson 3A [Little-endian]"
890 - def __init__(self,myspec):
891 - generic_mipsel.__init__(self,myspec)
892 - self.settings["CFLAGS"]="-O2 -march=loongson3a -mabi=32 -mplt -pipe"
893 -
894 -class arch_loongson3a_n32(generic_mips64el):
895 - "Builder class for Loongson 3A [Little-endian N32]"
896 - def __init__(self,myspec):
897 - generic_mips64el.__init__(self,myspec)
898 - self.settings["CFLAGS"]="-O2 -march=loongson3a -mabi=n32 -mplt -pipe"
899 -
900 -class arch_loongson3a_n64(generic_mips64el):
901 - "Builder class for Loongson 3A [Little-endian N64]"
902 - def __init__(self,myspec):
903 - generic_mips64el.__init__(self,myspec)
904 - self.settings["CFLAGS"]="-O2 -march=loongson3a -mabi=64 -pipe"
905 -
906 -class arch_loongson3a_multilib(generic_mips64el):
907 - "Builder class for Loongson 3A [Little-endian multilib]"
908 - def __init__(self,myspec):
909 - generic_mips64el.__init__(self,myspec)
910 - self.settings["CFLAGS"]="-O2 -march=loongson3a -mplt -pipe"
911 -
912 -class arch_cobalt(generic_mipsel):
913 - "Builder class for cobalt [Little-endian]"
914 - def __init__(self,myspec):
915 - generic_mipsel.__init__(self,myspec)
916 - self.settings["CFLAGS"]="-O2 -march=r5000 -mabi=32 -mplt -pipe"
917 - self.settings["HOSTUSE"]=["cobalt"]
918 -
919 -class arch_cobalt_n32(generic_mips64el):
920 - "Builder class for cobalt [Little-endian N32]"
921 - def __init__(self,myspec):
922 - generic_mips64el.__init__(self,myspec)
923 - self.settings["CFLAGS"]="-O2 -march=r5000 -mabi=n32 -mplt -pipe"
924 - self.settings["HOSTUSE"]=["cobalt"]
925 -
926 -def register():
927 - "Inform main catalyst program of the contents of this plugin."
928 - return ({
929 - "cobalt" : arch_cobalt,
930 - "cobalt_n32" : arch_cobalt_n32,
931 - "mips" : arch_mips1,
932 - "mips1" : arch_mips1,
933 - "mips32" : arch_mips32,
934 - "mips32_softfloat" : arch_mips32_softfloat,
935 - "mips32r2" : arch_mips32r2,
936 - "mips32r2_softfloat" : arch_mips32r2_softfloat,
937 - "mips3" : arch_mips3,
938 - "mips3_n32" : arch_mips3_n32,
939 - "mips3_n64" : arch_mips3_n64,
940 - "mips3_multilib" : arch_mips3_multilib,
941 - "mips4" : arch_mips4,
942 - "mips4_n32" : arch_mips4_n32,
943 - "mips4_n64" : arch_mips4_n64,
944 - "mips4_multilib" : arch_mips4_multilib,
945 - "mips4_r10k" : arch_mips4_r10k,
946 - "mips4_r10k_n32" : arch_mips4_r10k_n32,
947 - "mips4_r10k_n64" : arch_mips4_r10k_n64,
948 - "mips4_r10k_multilib" : arch_mips4_r10k_multilib,
949 - "mips64" : arch_mips64,
950 - "mips64_n32" : arch_mips64_n32,
951 - "mips64_n64" : arch_mips64_n64,
952 - "mips64_multilib" : arch_mips64_multilib,
953 - "mips64r2" : arch_mips64r2,
954 - "mips64r2_n32" : arch_mips64r2_n32,
955 - "mips64r2_n64" : arch_mips64r2_n64,
956 - "mips64r2_multilib" : arch_mips64r2_multilib,
957 - "mipsel" : arch_mipsel1,
958 - "mipsel1" : arch_mipsel1,
959 - "mips32el" : arch_mips32el,
960 - "mips32el_softfloat" : arch_mips32el_softfloat,
961 - "mips32r2el" : arch_mips32r2el,
962 - "mips32r2el_softfloat" : arch_mips32r2el_softfloat,
963 - "mipsel3" : arch_mipsel3,
964 - "mipsel3_n32" : arch_mipsel3_n32,
965 - "mipsel3_n64" : arch_mipsel3_n64,
966 - "mipsel3_multilib" : arch_mipsel3_multilib,
967 - "mipsel4" : arch_mipsel4,
968 - "mipsel4_n32" : arch_mipsel4_n32,
969 - "mipsel4_n64" : arch_mipsel4_n64,
970 - "mipsel4_multilib" : arch_mipsel4_multilib,
971 - "mips64el" : arch_mips64el,
972 - "mips64el_n32" : arch_mips64el_n32,
973 - "mips64el_n64" : arch_mips64el_n64,
974 - "mips64el_multilib" : arch_mips64el_multilib,
975 - "mips64r2el" : arch_mips64r2el,
976 - "mips64r2el_n32" : arch_mips64r2el_n32,
977 - "mips64r2el_n64" : arch_mips64r2el_n64,
978 - "mips64r2el_multilib" : arch_mips64r2el_multilib,
979 - "loongson2e" : arch_loongson2e,
980 - "loongson2e_n32" : arch_loongson2e_n32,
981 - "loongson2e_n64" : arch_loongson2e_n64,
982 - "loongson2e_multilib" : arch_loongson2e_multilib,
983 - "loongson2f" : arch_loongson2f,
984 - "loongson2f_n32" : arch_loongson2f_n32,
985 - "loongson2f_n64" : arch_loongson2f_n64,
986 - "loongson2f_multilib" : arch_loongson2f_multilib,
987 - "loongson3a" : arch_loongson3a,
988 - "loongson3a_n32" : arch_loongson3a_n32,
989 - "loongson3a_n64" : arch_loongson3a_n64,
990 - "loongson3a_multilib" : arch_loongson3a_multilib,
991 - }, ("mips","mips64"))
992 diff --git a/arch/powerpc.py b/arch/powerpc.py
993 deleted file mode 100644
994 index e9f611b..0000000
995 --- a/arch/powerpc.py
996 +++ /dev/null
997 @@ -1,124 +0,0 @@
998 -
999 -import os,builder
1000 -from catalyst_support import *
1001 -
1002 -class generic_ppc(builder.generic):
1003 - "abstract base class for all 32-bit powerpc builders"
1004 - def __init__(self,myspec):
1005 - builder.generic.__init__(self,myspec)
1006 - self.settings["CHOST"]="powerpc-unknown-linux-gnu"
1007 - if self.settings["buildarch"]=="ppc64":
1008 - if not os.path.exists("/bin/linux32") and not os.path.exists("/usr/bin/linux32"):
1009 - raise CatalystError,"required executable linux32 not found (\"emerge setarch\" to fix.)"
1010 - self.settings["CHROOT"]="linux32 chroot"
1011 - self.settings["crosscompile"] = False;
1012 - else:
1013 - self.settings["CHROOT"]="chroot"
1014 -
1015 -class generic_ppc64(builder.generic):
1016 - "abstract base class for all 64-bit powerpc builders"
1017 - def __init__(self,myspec):
1018 - builder.generic.__init__(self,myspec)
1019 - self.settings["CHROOT"]="chroot"
1020 -
1021 -class arch_ppc(generic_ppc):
1022 - "builder class for generic powerpc"
1023 - def __init__(self,myspec):
1024 - generic_ppc.__init__(self,myspec)
1025 - self.settings["CFLAGS"]="-O2 -mcpu=powerpc -mtune=powerpc -pipe"
1026 -
1027 -class arch_ppc64(generic_ppc64):
1028 - "builder class for generic ppc64"
1029 - def __init__(self,myspec):
1030 - generic_ppc64.__init__(self,myspec)
1031 - self.settings["CFLAGS"]="-O2 -pipe"
1032 - self.settings["CHOST"]="powerpc64-unknown-linux-gnu"
1033 -
1034 -class arch_970(arch_ppc64):
1035 - "builder class for 970 aka G5 under ppc64"
1036 - def __init__(self,myspec):
1037 - arch_ppc64.__init__(self,myspec)
1038 - self.settings["CFLAGS"]="-O2 -pipe -mcpu=970 -mtune=970"
1039 - self.settings["HOSTUSE"]=["altivec"]
1040 -
1041 -class arch_cell(arch_ppc64):
1042 - "builder class for cell under ppc64"
1043 - def __init__(self,myspec):
1044 - arch_ppc64.__init__(self,myspec)
1045 - self.settings["CFLAGS"]="-O2 -pipe -mcpu=cell -mtune=cell"
1046 - self.settings["HOSTUSE"]=["altivec","ibm"]
1047 -
1048 -class arch_g3(generic_ppc):
1049 - def __init__(self,myspec):
1050 - generic_ppc.__init__(self,myspec)
1051 - self.settings["CFLAGS"]="-O2 -mcpu=G3 -mtune=G3 -pipe"
1052 -
1053 -class arch_g4(generic_ppc):
1054 - def __init__(self,myspec):
1055 - generic_ppc.__init__(self,myspec)
1056 - self.settings["CFLAGS"]="-O2 -mcpu=G4 -mtune=G4 -maltivec -mabi=altivec -pipe"
1057 - self.settings["HOSTUSE"]=["altivec"]
1058 -
1059 -class arch_g5(generic_ppc):
1060 - def __init__(self,myspec):
1061 - generic_ppc.__init__(self,myspec)
1062 - self.settings["CFLAGS"]="-O2 -mcpu=G5 -mtune=G5 -maltivec -mabi=altivec -pipe"
1063 - self.settings["HOSTUSE"]=["altivec"]
1064 -
1065 -class arch_power(generic_ppc):
1066 - "builder class for generic power"
1067 - def __init__(self,myspec):
1068 - generic_ppc.__init__(self,myspec)
1069 - self.settings["CFLAGS"]="-O2 -mcpu=power -mtune=power -pipe"
1070 -
1071 -class arch_power_ppc(generic_ppc):
1072 - "builder class for generic powerpc/power"
1073 - def __init__(self,myspec):
1074 - generic_ppc.__init__(self,myspec)
1075 - self.settings["CFLAGS"]="-O2 -mcpu=common -mtune=common -pipe"
1076 -
1077 -class arch_power3(arch_ppc64):
1078 - "builder class for power3 under ppc64"
1079 - def __init__(self,myspec):
1080 - arch_ppc64.__init__(self,myspec)
1081 - self.settings["CFLAGS"]="-O2 -pipe -mcpu=power3 -mtune=power3"
1082 - self.settings["HOSTUSE"]=["ibm"]
1083 -
1084 -class arch_power4(arch_ppc64):
1085 - "builder class for power4 under ppc64"
1086 - def __init__(self,myspec):
1087 - arch_ppc64.__init__(self,myspec)
1088 - self.settings["CFLAGS"]="-O2 -pipe -mcpu=power4 -mtune=power4"
1089 - self.settings["HOSTUSE"]=["ibm"]
1090 -
1091 -class arch_power5(arch_ppc64):
1092 - "builder class for power5 under ppc64"
1093 - def __init__(self,myspec):
1094 - arch_ppc64.__init__(self,myspec)
1095 - self.settings["CFLAGS"]="-O2 -pipe -mcpu=power5 -mtune=power5"
1096 - self.settings["HOSTUSE"]=["ibm"]
1097 -
1098 -class arch_power6(arch_ppc64):
1099 - "builder class for power6 under ppc64"
1100 - def __init__(self,myspec):
1101 - arch_ppc64.__init__(self,myspec)
1102 - self.settings["CFLAGS"]="-O2 -pipe -mcpu=power6 -mtune=power6"
1103 - self.settings["HOSTUSE"]=["altivec","ibm"]
1104 -
1105 -def register():
1106 - "Inform main catalyst program of the contents of this plugin."
1107 - return ({
1108 - "970" : arch_970,
1109 - "cell" : arch_cell,
1110 - "g3" : arch_g3,
1111 - "g4" : arch_g4,
1112 - "g5" : arch_g5,
1113 - "power" : arch_power,
1114 - "power-ppc" : arch_power_ppc,
1115 - "power3" : arch_power3,
1116 - "power4" : arch_power4,
1117 - "power5" : arch_power5,
1118 - "power6" : arch_power6,
1119 - "ppc" : arch_ppc,
1120 - "ppc64" : arch_ppc64
1121 - }, ("ppc","ppc64","powerpc","powerpc64"))
1122 diff --git a/arch/s390.py b/arch/s390.py
1123 deleted file mode 100644
1124 index bf22f66..0000000
1125 --- a/arch/s390.py
1126 +++ /dev/null
1127 @@ -1,33 +0,0 @@
1128 -
1129 -import builder,os
1130 -from catalyst_support import *
1131 -
1132 -class generic_s390(builder.generic):
1133 - "abstract base class for all s390 builders"
1134 - def __init__(self,myspec):
1135 - builder.generic.__init__(self,myspec)
1136 - self.settings["CHROOT"]="chroot"
1137 -
1138 -class generic_s390x(builder.generic):
1139 - "abstract base class for all s390x builders"
1140 - def __init__(self,myspec):
1141 - builder.generic.__init__(self,myspec)
1142 - self.settings["CHROOT"]="chroot"
1143 -
1144 -class arch_s390(generic_s390):
1145 - "builder class for generic s390"
1146 - def __init__(self,myspec):
1147 - generic_s390.__init__(self,myspec)
1148 - self.settings["CFLAGS"]="-O2 -pipe"
1149 - self.settings["CHOST"]="s390-ibm-linux-gnu"
1150 -
1151 -class arch_s390x(generic_s390x):
1152 - "builder class for generic s390x"
1153 - def __init__(self,myspec):
1154 - generic_s390x.__init__(self,myspec)
1155 - self.settings["CFLAGS"]="-O2 -pipe"
1156 - self.settings["CHOST"]="s390x-ibm-linux-gnu"
1157 -
1158 -def register():
1159 - "Inform main catalyst program of the contents of this plugin."
1160 - return ({"s390":arch_s390,"s390x":arch_s390x}, ("s390", "s390x"))
1161 diff --git a/arch/sh.py b/arch/sh.py
1162 deleted file mode 100644
1163 index 2fc9531..0000000
1164 --- a/arch/sh.py
1165 +++ /dev/null
1166 @@ -1,116 +0,0 @@
1167 -
1168 -import builder,os
1169 -from catalyst_support import *
1170 -
1171 -class generic_sh(builder.generic):
1172 - "Abstract base class for all sh builders [Little-endian]"
1173 - def __init__(self,myspec):
1174 - builder.generic.__init__(self,myspec)
1175 - self.settings["CHROOT"]="chroot"
1176 -
1177 -class generic_sheb(builder.generic):
1178 - "Abstract base class for all sheb builders [Big-endian]"
1179 - def __init__(self,myspec):
1180 - builder.generic.__init__(self,myspec)
1181 - self.settings["CHROOT"]="chroot"
1182 -
1183 -class arch_sh(generic_sh):
1184 - "Builder class for SH [Little-endian]"
1185 - def __init__(self,myspec):
1186 - generic_sh.__init__(self,myspec)
1187 - self.settings["CFLAGS"]="-O2 -pipe"
1188 - self.settings["CHOST"]="sh-unknown-linux-gnu"
1189 -
1190 -class arch_sh2(generic_sh):
1191 - "Builder class for SH-2 [Little-endian]"
1192 - def __init__(self,myspec):
1193 - generic_sh.__init__(self,myspec)
1194 - self.settings["CFLAGS"]="-O2 -m2 -pipe"
1195 - self.settings["CHOST"]="sh2-unknown-linux-gnu"
1196 -
1197 -class arch_sh2a(generic_sh):
1198 - "Builder class for SH-2A [Little-endian]"
1199 - def __init__(self,myspec):
1200 - generic_sh.__init__(self,myspec)
1201 - self.settings["CFLAGS"]="-O2 -m2a -pipe"
1202 - self.settings["CHOST"]="sh2a-unknown-linux-gnu"
1203 -
1204 -class arch_sh3(generic_sh):
1205 - "Builder class for SH-3 [Little-endian]"
1206 - def __init__(self,myspec):
1207 - generic_sh.__init__(self,myspec)
1208 - self.settings["CFLAGS"]="-O2 -m3 -pipe"
1209 - self.settings["CHOST"]="sh3-unknown-linux-gnu"
1210 -
1211 -class arch_sh4(generic_sh):
1212 - "Builder class for SH-4 [Little-endian]"
1213 - def __init__(self,myspec):
1214 - generic_sh.__init__(self,myspec)
1215 - self.settings["CFLAGS"]="-O2 -m4 -pipe"
1216 - self.settings["CHOST"]="sh4-unknown-linux-gnu"
1217 -
1218 -class arch_sh4a(generic_sh):
1219 - "Builder class for SH-4A [Little-endian]"
1220 - def __init__(self,myspec):
1221 - generic_sh.__init__(self,myspec)
1222 - self.settings["CFLAGS"]="-O2 -m4a -pipe"
1223 - self.settings["CHOST"]="sh4a-unknown-linux-gnu"
1224 -
1225 -class arch_sheb(generic_sheb):
1226 - "Builder class for SH [Big-endian]"
1227 - def __init__(self,myspec):
1228 - generic_sheb.__init__(self,myspec)
1229 - self.settings["CFLAGS"]="-O2 -pipe"
1230 - self.settings["CHOST"]="sheb-unknown-linux-gnu"
1231 -
1232 -class arch_sh2eb(generic_sheb):
1233 - "Builder class for SH-2 [Big-endian]"
1234 - def __init__(self,myspec):
1235 - generic_sheb.__init__(self,myspec)
1236 - self.settings["CFLAGS"]="-O2 -m2 -pipe"
1237 - self.settings["CHOST"]="sh2eb-unknown-linux-gnu"
1238 -
1239 -class arch_sh2aeb(generic_sheb):
1240 - "Builder class for SH-2A [Big-endian]"
1241 - def __init__(self,myspec):
1242 - generic_sheb.__init__(self,myspec)
1243 - self.settings["CFLAGS"]="-O2 -m2a -pipe"
1244 - self.settings["CHOST"]="sh2aeb-unknown-linux-gnu"
1245 -
1246 -class arch_sh3eb(generic_sheb):
1247 - "Builder class for SH-3 [Big-endian]"
1248 - def __init__(self,myspec):
1249 - generic_sheb.__init__(self,myspec)
1250 - self.settings["CFLAGS"]="-O2 -m3 -pipe"
1251 - self.settings["CHOST"]="sh3eb-unknown-linux-gnu"
1252 -
1253 -class arch_sh4eb(generic_sheb):
1254 - "Builder class for SH-4 [Big-endian]"
1255 - def __init__(self,myspec):
1256 - generic_sheb.__init__(self,myspec)
1257 - self.settings["CFLAGS"]="-O2 -m4 -pipe"
1258 - self.settings["CHOST"]="sh4eb-unknown-linux-gnu"
1259 -
1260 -class arch_sh4aeb(generic_sheb):
1261 - "Builder class for SH-4A [Big-endian]"
1262 - def __init__(self,myspec):
1263 - generic_sheb.__init__(self,myspec)
1264 - self.settings["CFLAGS"]="-O2 -m4a -pipe"
1265 - self.settings["CHOST"]="sh4aeb-unknown-linux-gnu"
1266 -
1267 -def register():
1268 - "Inform main catalyst program of the contents of this plugin."
1269 - return ({
1270 - "sh" :arch_sh,
1271 - "sh2" :arch_sh2,
1272 - "sh2a" :arch_sh2a,
1273 - "sh3" :arch_sh3,
1274 - "sh4" :arch_sh4,
1275 - "sh4a" :arch_sh4a,
1276 - "sheb" :arch_sheb,
1277 - "sh2eb" :arch_sh2eb,
1278 - "sh2aeb" :arch_sh2aeb,
1279 - "sh3eb" :arch_sh3eb,
1280 - "sh4eb" :arch_sh4eb,
1281 - "sh4aeb" :arch_sh4aeb
1282 - }, ("sh2","sh2a","sh3","sh4","sh4a","sh2eb","sh2aeb","sh3eb","sh4eb","sh4aeb"))
1283 diff --git a/arch/sparc.py b/arch/sparc.py
1284 deleted file mode 100644
1285 index 5eb5344..0000000
1286 --- a/arch/sparc.py
1287 +++ /dev/null
1288 @@ -1,42 +0,0 @@
1289 -
1290 -import builder,os
1291 -from catalyst_support import *
1292 -
1293 -class generic_sparc(builder.generic):
1294 - "abstract base class for all sparc builders"
1295 - def __init__(self,myspec):
1296 - builder.generic.__init__(self,myspec)
1297 - if self.settings["buildarch"]=="sparc64":
1298 - if not os.path.exists("/bin/linux32") and not os.path.exists("/usr/bin/linux32"):
1299 - raise CatalystError,"required executable linux32 not found (\"emerge setarch\" to fix.)"
1300 - self.settings["CHROOT"]="linux32 chroot"
1301 - self.settings["crosscompile"] = False;
1302 - else:
1303 - self.settings["CHROOT"]="chroot"
1304 -
1305 -class generic_sparc64(builder.generic):
1306 - "abstract base class for all sparc64 builders"
1307 - def __init__(self,myspec):
1308 - builder.generic.__init__(self,myspec)
1309 - self.settings["CHROOT"]="chroot"
1310 -
1311 -class arch_sparc(generic_sparc):
1312 - "builder class for generic sparc (sun4cdm)"
1313 - def __init__(self,myspec):
1314 - generic_sparc.__init__(self,myspec)
1315 - self.settings["CFLAGS"]="-O2 -pipe"
1316 - self.settings["CHOST"]="sparc-unknown-linux-gnu"
1317 -
1318 -class arch_sparc64(generic_sparc64):
1319 - "builder class for generic sparc64 (sun4u)"
1320 - def __init__(self,myspec):
1321 - generic_sparc64.__init__(self,myspec)
1322 - self.settings["CFLAGS"]="-O2 -mcpu=ultrasparc -pipe"
1323 - self.settings["CHOST"]="sparc-unknown-linux-gnu"
1324 -
1325 -def register():
1326 - "Inform main catalyst program of the contents of this plugin."
1327 - return ({
1328 - "sparc" : arch_sparc,
1329 - "sparc64" : arch_sparc64
1330 - }, ("sparc","sparc64", ))
1331 diff --git a/arch/x86.py b/arch/x86.py
1332 deleted file mode 100644
1333 index 0391b79..0000000
1334 --- a/arch/x86.py
1335 +++ /dev/null
1336 @@ -1,153 +0,0 @@
1337 -
1338 -import builder,os
1339 -from catalyst_support import *
1340 -
1341 -class generic_x86(builder.generic):
1342 - "abstract base class for all x86 builders"
1343 - def __init__(self,myspec):
1344 - builder.generic.__init__(self,myspec)
1345 - if self.settings["buildarch"]=="amd64":
1346 - if not os.path.exists("/bin/linux32") and not os.path.exists("/usr/bin/linux32"):
1347 - raise CatalystError,"required executable linux32 not found (\"emerge setarch\" to fix.)"
1348 - self.settings["CHROOT"]="linux32 chroot"
1349 - self.settings["crosscompile"] = False;
1350 - else:
1351 - self.settings["CHROOT"]="chroot"
1352 -
1353 -class arch_x86(generic_x86):
1354 - "builder class for generic x86 (386+)"
1355 - def __init__(self,myspec):
1356 - generic_x86.__init__(self,myspec)
1357 - self.settings["CFLAGS"]="-O2 -mtune=i686 -pipe"
1358 - self.settings["CHOST"]="i386-pc-linux-gnu"
1359 -
1360 -class arch_i386(generic_x86):
1361 - "Intel i386 CPU"
1362 - def __init__(self,myspec):
1363 - generic_x86.__init__(self,myspec)
1364 - self.settings["CFLAGS"]="-O2 -march=i386 -pipe"
1365 - self.settings["CHOST"]="i386-pc-linux-gnu"
1366 -
1367 -class arch_i486(generic_x86):
1368 - "Intel i486 CPU"
1369 - def __init__(self,myspec):
1370 - generic_x86.__init__(self,myspec)
1371 - self.settings["CFLAGS"]="-O2 -march=i486 -pipe"
1372 - self.settings["CHOST"]="i486-pc-linux-gnu"
1373 -
1374 -class arch_i586(generic_x86):
1375 - "Intel Pentium CPU"
1376 - def __init__(self,myspec):
1377 - generic_x86.__init__(self,myspec)
1378 - self.settings["CFLAGS"]="-O2 -march=i586 -pipe"
1379 - self.settings["CHOST"]="i586-pc-linux-gnu"
1380 -
1381 -class arch_i686(generic_x86):
1382 - "Intel Pentium Pro CPU"
1383 - def __init__(self,myspec):
1384 - generic_x86.__init__(self,myspec)
1385 - self.settings["CFLAGS"]="-O2 -march=i686 -pipe"
1386 - self.settings["CHOST"]="i686-pc-linux-gnu"
1387 -
1388 -class arch_pentium_mmx(generic_x86):
1389 - "Intel Pentium MMX CPU with MMX support"
1390 - def __init__(self,myspec):
1391 - generic_x86.__init__(self,myspec)
1392 - self.settings["CFLAGS"]="-O2 -march=pentium-mmx -pipe"
1393 - self.settings["HOSTUSE"]=["mmx"]
1394 -
1395 -class arch_pentium2(generic_x86):
1396 - "Intel Pentium 2 CPU with MMX support"
1397 - def __init__(self,myspec):
1398 - generic_x86.__init__(self,myspec)
1399 - self.settings["CFLAGS"]="-O2 -march=pentium2 -pipe"
1400 - self.settings["HOSTUSE"]=["mmx"]
1401 -
1402 -class arch_pentium3(generic_x86):
1403 - "Intel Pentium 3 CPU with MMX and SSE support"
1404 - def __init__(self,myspec):
1405 - generic_x86.__init__(self,myspec)
1406 - self.settings["CFLAGS"]="-O2 -march=pentium3 -pipe"
1407 - self.settings["HOSTUSE"]=["mmx","sse"]
1408 -
1409 -class arch_pentium4(generic_x86):
1410 - "Intel Pentium 4 CPU with MMX, SSE and SSE2 support"
1411 - def __init__(self,myspec):
1412 - generic_x86.__init__(self,myspec)
1413 - self.settings["CFLAGS"]="-O2 -march=pentium4 -pipe"
1414 - self.settings["HOSTUSE"]=["mmx","sse","sse2"]
1415 -
1416 -class arch_pentium_m(generic_x86):
1417 - "Intel Pentium M CPU with MMX, SSE and SSE2 support"
1418 - def __init__(self,myspec):
1419 - generic_x86.__init__(self,myspec)
1420 - self.settings["CFLAGS"]="-O2 -march=pentium-m -pipe"
1421 - self.settings["HOSTUSE"]=["mmx","sse","sse2"]
1422 -
1423 -class arch_prescott(generic_x86):
1424 - "improved version of Intel Pentium 4 CPU with MMX, SSE, SSE2 and SSE3 support"
1425 - def __init__(self,myspec):
1426 - generic_x86.__init__(self,myspec)
1427 - self.settings["CFLAGS"]="-O2 -march=prescott -pipe"
1428 - self.settings["HOSTUSE"]=["mmx","sse","sse2"]
1429 - self.settings["CHOST"]="i686-pc-linux-gnu"
1430 -
1431 -class arch_k6(generic_x86):
1432 - "AMD K6 CPU with MMX support"
1433 - def __init__(self,myspec):
1434 - generic_x86.__init__(self,myspec)
1435 - self.settings["CFLAGS"]="-O2 -march=k6 -pipe"
1436 - self.settings["CHOST"]="i686-pc-linux-gnu"
1437 - self.settings["HOSTUSE"]=["mmx"]
1438 -
1439 -class arch_k6_2(generic_x86):
1440 - "AMD K6-2 CPU with MMX and 3dNOW! support"
1441 - def __init__(self,myspec):
1442 - generic_x86.__init__(self,myspec)
1443 - self.settings["CFLAGS"]="-O2 -march=k6-2 -pipe"
1444 - self.settings["CHOST"]="i686-pc-linux-gnu"
1445 - self.settings["HOSTUSE"]=["mmx","3dnow"]
1446 -
1447 -class arch_athlon(generic_x86):
1448 - "AMD Athlon CPU with MMX, 3dNOW!, enhanced 3dNOW! and SSE prefetch support"
1449 - def __init__(self,myspec):
1450 - generic_x86.__init__(self,myspec)
1451 - self.settings["CFLAGS"]="-O2 -march=athlon -pipe"
1452 - self.settings["CHOST"]="i686-pc-linux-gnu"
1453 - self.settings["HOSTUSE"]=["mmx","3dnow"]
1454 -
1455 -class arch_athlon_xp(generic_x86):
1456 - "improved AMD Athlon CPU with MMX, 3dNOW!, enhanced 3dNOW! and full SSE support"
1457 - def __init__(self,myspec):
1458 - generic_x86.__init__(self,myspec)
1459 - self.settings["CFLAGS"]="-O2 -march=athlon-xp -pipe"
1460 - self.settings["CHOST"]="i686-pc-linux-gnu"
1461 - self.settings["HOSTUSE"]=["mmx","3dnow","sse"]
1462 -
1463 -def register():
1464 - "Inform main catalyst program of the contents of this plugin."
1465 - return ({
1466 - "x86" : arch_x86,
1467 - "i386" : arch_i386,
1468 - "i486" : arch_i486,
1469 - "i586" : arch_i586,
1470 - "i686" : arch_i686,
1471 - "pentium" : arch_i586,
1472 - "pentium2" : arch_pentium2,
1473 - "pentium3" : arch_pentium3,
1474 - "pentium3m" : arch_pentium3,
1475 - "pentium-m" : arch_pentium_m,
1476 - "pentium4" : arch_pentium4,
1477 - "pentium4m" : arch_pentium4,
1478 - "pentiumpro" : arch_i686,
1479 - "pentium-mmx" : arch_pentium_mmx,
1480 - "prescott" : arch_prescott,
1481 - "k6" : arch_k6,
1482 - "k6-2" : arch_k6_2,
1483 - "k6-3" : arch_k6_2,
1484 - "athlon" : arch_athlon,
1485 - "athlon-tbird" : arch_athlon,
1486 - "athlon-4" : arch_athlon_xp,
1487 - "athlon-xp" : arch_athlon_xp,
1488 - "athlon-mp" : arch_athlon_xp
1489 - }, ('i386', 'i486', 'i586', 'i686'))
1490 diff --git a/bin/catalyst b/bin/catalyst
1491 new file mode 100755
1492 index 0000000..ace43fc
1493 --- /dev/null
1494 +++ b/bin/catalyst
1495 @@ -0,0 +1,46 @@
1496 +#!/usr/bin/python2 -OO
1497 +
1498 +# Maintained in full by:
1499 +# Catalyst Team <catalyst@g.o>
1500 +# Release Engineering Team <releng@g.o>
1501 +# Andrew Gaffney <agaffney@g.o>
1502 +# Chris Gianelloni <wolf31o2@××××××××.org>
1503 +# $Id$
1504 +
1505 +
1506 +from __future__ import print_function
1507 +
1508 +import sys
1509 +
1510 +__maintainer__="Catalyst <catalyst@g.o>"
1511 +__version__="2.0.12.2"
1512 +
1513 +
1514 +# This block ensures that ^C interrupts are handled quietly.
1515 +try:
1516 + import signal
1517 +
1518 + def exithandler(signum,frame):
1519 + signal.signal(signal.SIGINT, signal.SIG_IGN)
1520 + signal.signal(signal.SIGTERM, signal.SIG_IGN)
1521 + print()
1522 + sys.exit(1)
1523 +
1524 + signal.signal(signal.SIGINT, exithandler)
1525 + signal.signal(signal.SIGTERM, exithandler)
1526 + signal.signal(signal.SIGPIPE, signal.SIG_DFL)
1527 +
1528 +except KeyboardInterrupt:
1529 + print()
1530 + sys.exit(1)
1531 +
1532 +
1533 +from catalyst.main import main
1534 +
1535 +try:
1536 + main()
1537 +except KeyboardInterrupt:
1538 + print("Aborted.")
1539 + sys.exit(130)
1540 +sys.exit(0)
1541 +
1542 diff --git a/catalyst b/catalyst
1543 deleted file mode 100755
1544 index cb6c022..0000000
1545 --- a/catalyst
1546 +++ /dev/null
1547 @@ -1,419 +0,0 @@
1548 -#!/usr/bin/python2 -OO
1549 -
1550 -# Maintained in full by:
1551 -# Catalyst Team <catalyst@g.o>
1552 -# Release Engineering Team <releng@g.o>
1553 -# Andrew Gaffney <agaffney@g.o>
1554 -# Chris Gianelloni <wolf31o2@××××××××.org>
1555 -# $Id$
1556 -
1557 -import os
1558 -import sys
1559 -import imp
1560 -import string
1561 -import getopt
1562 -import pdb
1563 -import os.path
1564 -
1565 -import modules.catalyst.config
1566 -import modules.catalyst.util
1567 -
1568 -__maintainer__="Catalyst <catalyst@g.o>"
1569 -__version__="2.0.15"
1570 -
1571 -conf_values={}
1572 -
1573 -def usage():
1574 - print """Usage catalyst [options] [-C variable=value...] [ -s identifier]
1575 - -a --clear-autoresume clear autoresume flags
1576 - -c --config use specified configuration file
1577 - -C --cli catalyst commandline (MUST BE LAST OPTION)
1578 - -d --debug enable debugging
1579 - -f --file read specfile
1580 - -F --fetchonly fetch files only
1581 - -h --help print this help message
1582 - -p --purge clear tmp dirs,package cache, autoresume flags
1583 - -P --purgeonly clear tmp dirs,package cache, autoresume flags and exit
1584 - -T --purgetmponly clear tmp dirs and autoresume flags and exit
1585 - -s --snapshot generate a release snapshot
1586 - -V --version display version information
1587 - -v --verbose verbose output
1588 -
1589 -Usage examples:
1590 -
1591 -Using the commandline option (-C, --cli) to build a Portage snapshot:
1592 -catalyst -C target=snapshot version_stamp=my_date
1593 -
1594 -Using the snapshot option (-s, --snapshot) to build a release snapshot:
1595 -catalyst -s 20071121"
1596 -
1597 -Using the specfile option (-f, --file) to build a stage target:
1598 -catalyst -f stage1-specfile.spec
1599 -"""
1600 -
1601 -
1602 -def version():
1603 - print "Catalyst, version "+__version__
1604 - print "Copyright 2003-2008 Gentoo Foundation"
1605 - print "Copyright 2008-2012 various authors"
1606 - print "Distributed under the GNU General Public License version 2.1\n"
1607 -
1608 -def parse_config(myconfig):
1609 - # search a couple of different areas for the main config file
1610 - myconf={}
1611 - config_file=""
1612 -
1613 - confdefaults = {
1614 - "distdir": "/usr/portage/distfiles",
1615 - "hash_function": "crc32",
1616 - "icecream": "/var/cache/icecream",
1617 - "local_overlay": "/usr/local/portage",
1618 - "options": "",
1619 - "packagedir": "/usr/portage/packages",
1620 - "portdir": "/usr/portage",
1621 - "repo_name": "portage",
1622 - "sharedir": "/usr/share/catalyst",
1623 - "snapshot_name": "portage-",
1624 - "snapshot_cache": "/var/tmp/catalyst/snapshot_cache",
1625 - "storedir": "/var/tmp/catalyst",
1626 - }
1627 -
1628 - # first, try the one passed (presumably from the cmdline)
1629 - if myconfig:
1630 - if os.path.exists(myconfig):
1631 - print "Using command line specified Catalyst configuration file, "+myconfig
1632 - config_file=myconfig
1633 -
1634 - else:
1635 - print "!!! catalyst: Could not use specified configuration file "+\
1636 - myconfig
1637 - sys.exit(1)
1638 -
1639 - # next, try the default location
1640 - elif os.path.exists("/etc/catalyst/catalyst.conf"):
1641 - print "Using default Catalyst configuration file, /etc/catalyst/catalyst.conf"
1642 - config_file="/etc/catalyst/catalyst.conf"
1643 -
1644 - # can't find a config file (we are screwed), so bail out
1645 - else:
1646 - print "!!! catalyst: Could not find a suitable configuration file"
1647 - sys.exit(1)
1648 -
1649 - # now, try and parse the config file "config_file"
1650 - try:
1651 -# execfile(config_file, myconf, myconf)
1652 - myconfig = modules.catalyst.config.ConfigParser(config_file)
1653 - myconf.update(myconfig.get_values())
1654 -
1655 - except:
1656 - print "!!! catalyst: Unable to parse configuration file, "+myconfig
1657 - sys.exit(1)
1658 -
1659 - # now, load up the values into conf_values so that we can use them
1660 - for x in confdefaults.keys():
1661 - if x in myconf:
1662 - print "Setting",x,"to config file value \""+myconf[x]+"\""
1663 - conf_values[x]=myconf[x]
1664 - else:
1665 - print "Setting",x,"to default value \""+confdefaults[x]+"\""
1666 - conf_values[x]=confdefaults[x]
1667 -
1668 - # parse out the rest of the options from the config file
1669 - if "autoresume" in string.split(conf_values["options"]):
1670 - print "Autoresuming support enabled."
1671 - conf_values["AUTORESUME"]="1"
1672 -
1673 - if "bindist" in string.split(conf_values["options"]):
1674 - print "Binary redistribution enabled"
1675 - conf_values["BINDIST"]="1"
1676 - else:
1677 - print "Bindist is not enabled in catalyst.conf"
1678 - print "Binary redistribution of generated stages/isos may be prohibited by law."
1679 - print "Please see the use description for bindist on any package you are including."
1680 -
1681 - if "ccache" in string.split(conf_values["options"]):
1682 - print "Compiler cache support enabled."
1683 - conf_values["CCACHE"]="1"
1684 -
1685 - if "clear-autoresume" in string.split(conf_values["options"]):
1686 - print "Cleaning autoresume flags support enabled."
1687 - conf_values["CLEAR_AUTORESUME"]="1"
1688 -
1689 - if "distcc" in string.split(conf_values["options"]):
1690 - print "Distcc support enabled."
1691 - conf_values["DISTCC"]="1"
1692 -
1693 - if "icecream" in string.split(conf_values["options"]):
1694 - print "Icecream compiler cluster support enabled."
1695 - conf_values["ICECREAM"]="1"
1696 -
1697 - if "kerncache" in string.split(conf_values["options"]):
1698 - print "Kernel cache support enabled."
1699 - conf_values["KERNCACHE"]="1"
1700 -
1701 - if "pkgcache" in string.split(conf_values["options"]):
1702 - print "Package cache support enabled."
1703 - conf_values["PKGCACHE"]="1"
1704 -
1705 - if "preserve_libs" in string.split(conf_values["options"]):
1706 - print "Preserving libs during unmerge."
1707 - conf_values["PRESERVE_LIBS"]="1"
1708 -
1709 - if "purge" in string.split(conf_values["options"]):
1710 - print "Purge support enabled."
1711 - conf_values["PURGE"]="1"
1712 -
1713 - if "seedcache" in string.split(conf_values["options"]):
1714 - print "Seed cache support enabled."
1715 - conf_values["SEEDCACHE"]="1"
1716 -
1717 - if "snapcache" in string.split(conf_values["options"]):
1718 - print "Snapshot cache support enabled."
1719 - conf_values["SNAPCACHE"]="1"
1720 -
1721 - if "digests" in myconf:
1722 - conf_values["digests"]=myconf["digests"]
1723 - if "contents" in myconf:
1724 - conf_values["contents"]=myconf["contents"]
1725 -
1726 - if "envscript" in myconf:
1727 - print "Envscript support enabled."
1728 - conf_values["ENVSCRIPT"]=myconf["envscript"]
1729 -
1730 - if "var_tmpfs_portage" in myconf:
1731 - conf_values["var_tmpfs_portage"]=myconf["var_tmpfs_portage"];
1732 -
1733 - if "port_logdir" in myconf:
1734 - conf_values["port_logdir"]=myconf["port_logdir"];
1735 -
1736 -def import_modules():
1737 - # import catalyst's own modules (i.e. catalyst_support and the arch modules)
1738 - targetmap={}
1739 -
1740 - try:
1741 - for x in required_build_targets:
1742 - try:
1743 - fh=open(conf_values["sharedir"]+"/modules/"+x+".py")
1744 - module=imp.load_module(x,fh,"modules/"+x+".py",(".py","r",imp.PY_SOURCE))
1745 - fh.close()
1746 -
1747 - except IOError:
1748 - raise CatalystError,"Can't find "+x+".py plugin in "+\
1749 - conf_values["sharedir"]+"/modules/"
1750 -
1751 - for x in valid_build_targets:
1752 - try:
1753 - fh=open(conf_values["sharedir"]+"/modules/"+x+".py")
1754 - module=imp.load_module(x,fh,"modules/"+x+".py",(".py","r",imp.PY_SOURCE))
1755 - module.register(targetmap)
1756 - fh.close()
1757 -
1758 - except IOError:
1759 - raise CatalystError,"Can't find "+x+".py plugin in "+\
1760 - conf_values["sharedir"]+"/modules/"
1761 -
1762 - except ImportError:
1763 - print "!!! catalyst: Python modules not found in "+\
1764 - conf_values["sharedir"]+"/modules; exiting."
1765 - sys.exit(1)
1766 -
1767 - return targetmap
1768 -
1769 -def build_target(addlargs, targetmap):
1770 - try:
1771 - if addlargs["target"] not in targetmap:
1772 - raise CatalystError,"Target \""+addlargs["target"]+"\" not available."
1773 -
1774 - mytarget=targetmap[addlargs["target"]](conf_values, addlargs)
1775 -
1776 - mytarget.run()
1777 -
1778 - except:
1779 - modules.catalyst.util.print_traceback()
1780 - print "!!! catalyst: Error encountered during run of target " + addlargs["target"]
1781 - sys.exit(1)
1782 -
1783 -if __name__ == "__main__":
1784 - targetmap={}
1785 -
1786 - version()
1787 - if os.getuid() != 0:
1788 - # catalyst cannot be run as a normal user due to chroots, mounts, etc
1789 - print "!!! catalyst: This script requires root privileges to operate"
1790 - sys.exit(2)
1791 -
1792 - # we need some options in order to work correctly
1793 - if len(sys.argv) < 2:
1794 - usage()
1795 - sys.exit(2)
1796 -
1797 - # parse out the command line arguments
1798 - try:
1799 - opts,args = getopt.getopt(sys.argv[1:], "apPThvdc:C:f:FVs:", ["purge", "purgeonly", "purgetmponly", "help", "version", "debug",\
1800 - "clear-autoresume", "config=", "cli=", "file=", "fetch", "verbose","snapshot="])
1801 -
1802 - except getopt.GetoptError:
1803 - usage()
1804 - sys.exit(2)
1805 -
1806 - # defaults for commandline opts
1807 - debug=False
1808 - verbose=False
1809 - fetch=False
1810 - myconfig=""
1811 - myspecfile=""
1812 - mycmdline=[]
1813 - myopts=[]
1814 -
1815 - # check preconditions
1816 - if len(opts) == 0:
1817 - print "!!! catalyst: please specify one of either -f or -C\n"
1818 - usage()
1819 - sys.exit(2)
1820 -
1821 - run = False
1822 - for o, a in opts:
1823 - if o in ("-h", "--help"):
1824 - usage()
1825 - sys.exit(1)
1826 -
1827 - if o in ("-V", "--version"):
1828 - print "Catalyst version "+__version__
1829 - sys.exit(1)
1830 -
1831 - if o in ("-d", "--debug"):
1832 - conf_values["DEBUG"]="1"
1833 - conf_values["VERBOSE"]="1"
1834 -
1835 - if o in ("-c", "--config"):
1836 - myconfig=a
1837 -
1838 - if o in ("-C", "--cli"):
1839 - run = True
1840 - x=sys.argv.index(o)+1
1841 - while x < len(sys.argv):
1842 - mycmdline.append(sys.argv[x])
1843 - x=x+1
1844 -
1845 - if o in ("-f", "--file"):
1846 - run = True
1847 - myspecfile=a
1848 -
1849 - if o in ("-F", "--fetchonly"):
1850 - conf_values["FETCH"]="1"
1851 -
1852 - if o in ("-v", "--verbose"):
1853 - conf_values["VERBOSE"]="1"
1854 -
1855 - if o in ("-s", "--snapshot"):
1856 - if len(sys.argv) < 3:
1857 - print "!!! catalyst: missing snapshot identifier\n"
1858 - usage()
1859 - sys.exit(2)
1860 - else:
1861 - run = True
1862 - mycmdline.append("target=snapshot")
1863 - mycmdline.append("version_stamp="+a)
1864 -
1865 - if o in ("-p", "--purge"):
1866 - conf_values["PURGE"] = "1"
1867 -
1868 - if o in ("-P", "--purgeonly"):
1869 - conf_values["PURGEONLY"] = "1"
1870 -
1871 - if o in ("-T", "--purgetmponly"):
1872 - conf_values["PURGETMPONLY"] = "1"
1873 -
1874 - if o in ("-a", "--clear-autoresume"):
1875 - conf_values["CLEAR_AUTORESUME"] = "1"
1876 -
1877 - if not run:
1878 - print "!!! catalyst: please specify one of either -f or -C\n"
1879 - usage()
1880 - sys.exit(2)
1881 -
1882 - # import configuration file and import our main module using those settings
1883 - parse_config(myconfig)
1884 - sys.path.append(conf_values["sharedir"]+"/modules")
1885 - from catalyst_support import *
1886 -
1887 - # Start checking that digests are valid now that the hash_map was imported
1888 - # from catalyst_support
1889 - if "digests" in conf_values:
1890 - for i in conf_values["digests"].split():
1891 - if i not in hash_map:
1892 - print
1893 - print i+" is not a valid digest entry"
1894 - print "Valid digest entries:"
1895 - print hash_map.keys()
1896 - print
1897 - print "Catalyst aborting...."
1898 - sys.exit(2)
1899 - if find_binary(hash_map[i][1]) == None:
1900 - print
1901 - print "digest="+i
1902 - print "\tThe "+hash_map[i][1]+\
1903 - " binary was not found. It needs to be in your system path"
1904 - print
1905 - print "Catalyst aborting...."
1906 - sys.exit(2)
1907 - if "hash_function" in conf_values:
1908 - if conf_values["hash_function"] not in hash_map:
1909 - print
1910 - print conf_values["hash_function"]+\
1911 - " is not a valid hash_function entry"
1912 - print "Valid hash_function entries:"
1913 - print hash_map.keys()
1914 - print
1915 - print "Catalyst aborting...."
1916 - sys.exit(2)
1917 - if find_binary(hash_map[conf_values["hash_function"]][1]) == None:
1918 - print
1919 - print "hash_function="+conf_values["hash_function"]
1920 - print "\tThe "+hash_map[conf_values["hash_function"]][1]+\
1921 - " binary was not found. It needs to be in your system path"
1922 - print
1923 - print "Catalyst aborting...."
1924 - sys.exit(2)
1925 -
1926 - # import the rest of the catalyst modules
1927 - targetmap=import_modules()
1928 -
1929 - addlargs={}
1930 -
1931 - if myspecfile:
1932 - spec = modules.catalyst.config.SpecParser(myspecfile)
1933 - addlargs.update(spec.get_values())
1934 -
1935 - if mycmdline:
1936 - try:
1937 - cmdline = modules.catalyst.config.ConfigParser()
1938 - cmdline.parse_lines(mycmdline)
1939 - addlargs.update(cmdline.get_values())
1940 - except CatalystError:
1941 - print "!!! catalyst: Could not parse commandline, exiting."
1942 - sys.exit(1)
1943 -
1944 - if "target" not in addlargs:
1945 - raise CatalystError, "Required value \"target\" not specified."
1946 -
1947 - # everything is setup, so the build is a go
1948 - try:
1949 - build_target(addlargs, targetmap)
1950 -
1951 - except CatalystError:
1952 - print
1953 - print "Catalyst aborting...."
1954 - sys.exit(2)
1955 - except KeyboardInterrupt:
1956 - print "\nCatalyst build aborted due to user interrupt ( Ctrl-C )"
1957 - print
1958 - print "Catalyst aborting...."
1959 - sys.exit(2)
1960 - except LockInUse:
1961 - print "Catalyst aborting...."
1962 - sys.exit(2)
1963 - except:
1964 - print "Catalyst aborting...."
1965 - raise
1966 - sys.exit(2)
1967 diff --git a/catalyst/__init__.py b/catalyst/__init__.py
1968 new file mode 100644
1969 index 0000000..e69de29
1970 diff --git a/catalyst/arch/__init__.py b/catalyst/arch/__init__.py
1971 new file mode 100644
1972 index 0000000..8b13789
1973 --- /dev/null
1974 +++ b/catalyst/arch/__init__.py
1975 @@ -0,0 +1 @@
1976 +
1977 diff --git a/catalyst/arch/alpha.py b/catalyst/arch/alpha.py
1978 new file mode 100644
1979 index 0000000..f0fc95a
1980 --- /dev/null
1981 +++ b/catalyst/arch/alpha.py
1982 @@ -0,0 +1,75 @@
1983 +
1984 +import builder,os
1985 +from catalyst_support import *
1986 +
1987 +class generic_alpha(builder.generic):
1988 + "abstract base class for all alpha builders"
1989 + def __init__(self,myspec):
1990 + builder.generic.__init__(self,myspec)
1991 + self.settings["CHROOT"]="chroot"
1992 + self.settings["CFLAGS"]="-mieee -pipe"
1993 +
1994 +class arch_alpha(generic_alpha):
1995 + "builder class for generic alpha (ev4+)"
1996 + def __init__(self,myspec):
1997 + generic_alpha.__init__(self,myspec)
1998 + self.settings["CFLAGS"]+=" -O2 -mcpu=ev4"
1999 + self.settings["CHOST"]="alpha-unknown-linux-gnu"
2000 +
2001 +class arch_ev4(generic_alpha):
2002 + "builder class for alpha ev4"
2003 + def __init__(self,myspec):
2004 + generic_alpha.__init__(self,myspec)
2005 + self.settings["CFLAGS"]+=" -O2 -mcpu=ev4"
2006 + self.settings["CHOST"]="alphaev4-unknown-linux-gnu"
2007 +
2008 +class arch_ev45(generic_alpha):
2009 + "builder class for alpha ev45"
2010 + def __init__(self,myspec):
2011 + generic_alpha.__init__(self,myspec)
2012 + self.settings["CFLAGS"]+=" -O2 -mcpu=ev45"
2013 + self.settings["CHOST"]="alphaev45-unknown-linux-gnu"
2014 +
2015 +class arch_ev5(generic_alpha):
2016 + "builder class for alpha ev5"
2017 + def __init__(self,myspec):
2018 + generic_alpha.__init__(self,myspec)
2019 + self.settings["CFLAGS"]+=" -O2 -mcpu=ev5"
2020 + self.settings["CHOST"]="alphaev5-unknown-linux-gnu"
2021 +
2022 +class arch_ev56(generic_alpha):
2023 + "builder class for alpha ev56 (ev5 plus BWX)"
2024 + def __init__(self,myspec):
2025 + generic_alpha.__init__(self,myspec)
2026 + self.settings["CFLAGS"]+=" -O2 -mcpu=ev56"
2027 + self.settings["CHOST"]="alphaev56-unknown-linux-gnu"
2028 +
2029 +class arch_pca56(generic_alpha):
2030 + "builder class for alpha pca56 (ev5 plus BWX & MAX)"
2031 + def __init__(self,myspec):
2032 + generic_alpha.__init__(self,myspec)
2033 + self.settings["CFLAGS"]+=" -O2 -mcpu=pca56"
2034 + self.settings["CHOST"]="alphaev56-unknown-linux-gnu"
2035 +
2036 +class arch_ev6(generic_alpha):
2037 + "builder class for alpha ev6"
2038 + def __init__(self,myspec):
2039 + generic_alpha.__init__(self,myspec)
2040 + self.settings["CFLAGS"]+=" -O2 -mcpu=ev6"
2041 + self.settings["CHOST"]="alphaev6-unknown-linux-gnu"
2042 + self.settings["HOSTUSE"]=["ev6"]
2043 +
2044 +class arch_ev67(generic_alpha):
2045 + "builder class for alpha ev67 (ev6 plus CIX)"
2046 + def __init__(self,myspec):
2047 + generic_alpha.__init__(self,myspec)
2048 + self.settings["CFLAGS"]+=" -O2 -mcpu=ev67"
2049 + self.settings["CHOST"]="alphaev67-unknown-linux-gnu"
2050 + self.settings["HOSTUSE"]=["ev6"]
2051 +
2052 +def register():
2053 + "Inform main catalyst program of the contents of this plugin."
2054 + return ({ "alpha":arch_alpha, "ev4":arch_ev4, "ev45":arch_ev45,
2055 + "ev5":arch_ev5, "ev56":arch_ev56, "pca56":arch_pca56,
2056 + "ev6":arch_ev6, "ev67":arch_ev67 },
2057 + ("alpha", ))
2058 diff --git a/catalyst/arch/amd64.py b/catalyst/arch/amd64.py
2059 new file mode 100644
2060 index 0000000..262b55a
2061 --- /dev/null
2062 +++ b/catalyst/arch/amd64.py
2063 @@ -0,0 +1,83 @@
2064 +
2065 +import builder
2066 +
2067 +class generic_amd64(builder.generic):
2068 + "abstract base class for all amd64 builders"
2069 + def __init__(self,myspec):
2070 + builder.generic.__init__(self,myspec)
2071 + self.settings["CHROOT"]="chroot"
2072 +
2073 +class arch_amd64(generic_amd64):
2074 + "builder class for generic amd64 (Intel and AMD)"
2075 + def __init__(self,myspec):
2076 + generic_amd64.__init__(self,myspec)
2077 + self.settings["CFLAGS"]="-O2 -pipe"
2078 + self.settings["CHOST"]="x86_64-pc-linux-gnu"
2079 + self.settings["HOSTUSE"]=["mmx","sse","sse2"]
2080 +
2081 +class arch_nocona(generic_amd64):
2082 + "improved version of Intel Pentium 4 CPU with 64-bit extensions, MMX, SSE, SSE2 and SSE3 support"
2083 + def __init__(self,myspec):
2084 + generic_amd64.__init__(self,myspec)
2085 + self.settings["CFLAGS"]="-O2 -march=nocona -pipe"
2086 + self.settings["CHOST"]="x86_64-pc-linux-gnu"
2087 + self.settings["HOSTUSE"]=["mmx","sse","sse2"]
2088 +
2089 +# Requires gcc 4.3 to use this class
2090 +class arch_core2(generic_amd64):
2091 + "Intel Core 2 CPU with 64-bit extensions, MMX, SSE, SSE2, SSE3 and SSSE3 support"
2092 + def __init__(self,myspec):
2093 + generic_amd64.__init__(self,myspec)
2094 + self.settings["CFLAGS"]="-O2 -march=core2 -pipe"
2095 + self.settings["CHOST"]="x86_64-pc-linux-gnu"
2096 + self.settings["HOSTUSE"]=["mmx","sse","sse2","ssse3"]
2097 +
2098 +class arch_k8(generic_amd64):
2099 + "generic k8, opteron and athlon64 support"
2100 + def __init__(self,myspec):
2101 + generic_amd64.__init__(self,myspec)
2102 + self.settings["CFLAGS"]="-O2 -march=k8 -pipe"
2103 + self.settings["CHOST"]="x86_64-pc-linux-gnu"
2104 + self.settings["HOSTUSE"]=["mmx","sse","sse2","3dnow"]
2105 +
2106 +class arch_k8_sse3(generic_amd64):
2107 + "improved versions of k8, opteron and athlon64 with SSE3 support"
2108 + def __init__(self,myspec):
2109 + generic_amd64.__init__(self,myspec)
2110 + self.settings["CFLAGS"]="-O2 -march=k8-sse3 -pipe"
2111 + self.settings["CHOST"]="x86_64-pc-linux-gnu"
2112 + self.settings["HOSTUSE"]=["mmx","sse","sse2","3dnow"]
2113 +
2114 +class arch_amdfam10(generic_amd64):
2115 + "AMD Family 10h core based CPUs with x86-64 instruction set support"
2116 + def __init__(self,myspec):
2117 + generic_amd64.__init__(self,myspec)
2118 + self.settings["CFLAGS"]="-O2 -march=amdfam10 -pipe"
2119 + self.settings["CHOST"]="x86_64-pc-linux-gnu"
2120 + self.settings["HOSTUSE"]=["mmx","sse","sse2","3dnow"]
2121 +
2122 +class arch_x32(generic_amd64):
2123 + "builder class for generic x32 (Intel and AMD)"
2124 + def __init__(self,myspec):
2125 + generic_amd64.__init__(self,myspec)
2126 + self.settings["CFLAGS"]="-O2 -pipe"
2127 + self.settings["CHOST"]="x86_64-pc-linux-gnux32"
2128 + self.settings["HOSTUSE"]=["mmx","sse","sse2"]
2129 +
2130 +def register():
2131 + "inform main catalyst program of the contents of this plugin"
2132 + return ({
2133 + "amd64" : arch_amd64,
2134 + "k8" : arch_k8,
2135 + "opteron" : arch_k8,
2136 + "athlon64" : arch_k8,
2137 + "athlonfx" : arch_k8,
2138 + "nocona" : arch_nocona,
2139 + "core2" : arch_core2,
2140 + "k8-sse3" : arch_k8_sse3,
2141 + "opteron-sse3" : arch_k8_sse3,
2142 + "athlon64-sse3" : arch_k8_sse3,
2143 + "amdfam10" : arch_amdfam10,
2144 + "barcelona" : arch_amdfam10,
2145 + "x32" : arch_x32,
2146 + }, ("x86_64","amd64","nocona"))
2147 diff --git a/catalyst/arch/arm.py b/catalyst/arch/arm.py
2148 new file mode 100644
2149 index 0000000..2de3942
2150 --- /dev/null
2151 +++ b/catalyst/arch/arm.py
2152 @@ -0,0 +1,133 @@
2153 +
2154 +import builder,os
2155 +from catalyst_support import *
2156 +
2157 +class generic_arm(builder.generic):
2158 + "Abstract base class for all arm (little endian) builders"
2159 + def __init__(self,myspec):
2160 + builder.generic.__init__(self,myspec)
2161 + self.settings["CHROOT"]="chroot"
2162 + self.settings["CFLAGS"]="-O2 -pipe"
2163 +
2164 +class generic_armeb(builder.generic):
2165 + "Abstract base class for all arm (big endian) builders"
2166 + def __init__(self,myspec):
2167 + builder.generic.__init__(self,myspec)
2168 + self.settings["CHROOT"]="chroot"
2169 + self.settings["CFLAGS"]="-O2 -pipe"
2170 +
2171 +class arch_arm(generic_arm):
2172 + "Builder class for arm (little endian) target"
2173 + def __init__(self,myspec):
2174 + generic_arm.__init__(self,myspec)
2175 + self.settings["CHOST"]="arm-unknown-linux-gnu"
2176 +
2177 +class arch_armeb(generic_armeb):
2178 + "Builder class for arm (big endian) target"
2179 + def __init__(self,myspec):
2180 + generic_armeb.__init__(self,myspec)
2181 + self.settings["CHOST"]="armeb-unknown-linux-gnu"
2182 +
2183 +class arch_armv4l(generic_arm):
2184 + "Builder class for armv4l target"
2185 + def __init__(self,myspec):
2186 + generic_arm.__init__(self,myspec)
2187 + self.settings["CHOST"]="armv4l-unknown-linux-gnu"
2188 + self.settings["CFLAGS"]+=" -march=armv4"
2189 +
2190 +class arch_armv4tl(generic_arm):
2191 + "Builder class for armv4tl target"
2192 + def __init__(self,myspec):
2193 + generic_arm.__init__(self,myspec)
2194 + self.settings["CHOST"]="armv4tl-softfloat-linux-gnueabi"
2195 + self.settings["CFLAGS"]+=" -march=armv4t"
2196 +
2197 +class arch_armv5tl(generic_arm):
2198 + "Builder class for armv5tl target"
2199 + def __init__(self,myspec):
2200 + generic_arm.__init__(self,myspec)
2201 + self.settings["CHOST"]="armv5tl-softfloat-linux-gnueabi"
2202 + self.settings["CFLAGS"]+=" -march=armv5t"
2203 +
2204 +class arch_armv5tel(generic_arm):
2205 + "Builder class for armv5tel target"
2206 + def __init__(self,myspec):
2207 + generic_arm.__init__(self,myspec)
2208 + self.settings["CHOST"]="armv5tel-softfloat-linux-gnueabi"
2209 + self.settings["CFLAGS"]+=" -march=armv5te"
2210 +
2211 +class arch_armv5tejl(generic_arm):
2212 + "Builder class for armv5tejl target"
2213 + def __init__(self,myspec):
2214 + generic_arm.__init__(self,myspec)
2215 + self.settings["CHOST"]="armv5tejl-softfloat-linux-gnueabi"
2216 + self.settings["CFLAGS"]+=" -march=armv5te"
2217 +
2218 +class arch_armv6j(generic_arm):
2219 + "Builder class for armv6j target"
2220 + def __init__(self,myspec):
2221 + generic_arm.__init__(self,myspec)
2222 + self.settings["CHOST"]="armv6j-softfp-linux-gnueabi"
2223 + self.settings["CFLAGS"]+=" -march=armv6j -mfpu=vfp -mfloat-abi=softfp"
2224 +
2225 +class arch_armv6z(generic_arm):
2226 + "Builder class for armv6z target"
2227 + def __init__(self,myspec):
2228 + generic_arm.__init__(self,myspec)
2229 + self.settings["CHOST"]="armv6z-softfp-linux-gnueabi"
2230 + self.settings["CFLAGS"]+=" -march=armv6z -mfpu=vfp -mfloat-abi=softfp"
2231 +
2232 +class arch_armv6zk(generic_arm):
2233 + "Builder class for armv6zk target"
2234 + def __init__(self,myspec):
2235 + generic_arm.__init__(self,myspec)
2236 + self.settings["CHOST"]="armv6zk-softfp-linux-gnueabi"
2237 + self.settings["CFLAGS"]+=" -march=armv6zk -mfpu=vfp -mfloat-abi=softfp"
2238 +
2239 +class arch_armv7a(generic_arm):
2240 + "Builder class for armv7a target"
2241 + def __init__(self,myspec):
2242 + generic_arm.__init__(self,myspec)
2243 + self.settings["CHOST"]="armv7a-softfp-linux-gnueabi"
2244 + self.settings["CFLAGS"]+=" -march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=softfp"
2245 +
2246 +class arch_armv6j_hardfp(generic_arm):
2247 + "Builder class for armv6j hardfloat target, needs >=gcc-4.5"
2248 + def __init__(self,myspec):
2249 + generic_arm.__init__(self,myspec)
2250 + self.settings["CHOST"]="armv6j-hardfloat-linux-gnueabi"
2251 + self.settings["CFLAGS"]+=" -march=armv6j -mfpu=vfp -mfloat-abi=hard"
2252 +
2253 +class arch_armv7a_hardfp(generic_arm):
2254 + "Builder class for armv7a hardfloat target, needs >=gcc-4.5"
2255 + def __init__(self,myspec):
2256 + generic_arm.__init__(self,myspec)
2257 + self.settings["CHOST"]="armv7a-hardfloat-linux-gnueabi"
2258 + self.settings["CFLAGS"]+=" -march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=hard"
2259 +
2260 +class arch_armv5teb(generic_armeb):
2261 + "Builder class for armv5teb (XScale) target"
2262 + def __init__(self,myspec):
2263 + generic_armeb.__init__(self,myspec)
2264 + self.settings["CFLAGS"]+=" -mcpu=xscale"
2265 + self.settings["CHOST"]="armv5teb-softfloat-linux-gnueabi"
2266 +
2267 +def register():
2268 + "Inform main catalyst program of the contents of this plugin."
2269 + return ({
2270 + "arm" : arch_arm,
2271 + "armv4l" : arch_armv4l,
2272 + "armv4tl": arch_armv4tl,
2273 + "armv5tl": arch_armv5tl,
2274 + "armv5tel": arch_armv5tel,
2275 + "armv5tejl": arch_armv5tejl,
2276 + "armv6j" : arch_armv6j,
2277 + "armv6z" : arch_armv6z,
2278 + "armv6zk" : arch_armv6zk,
2279 + "armv7a" : arch_armv7a,
2280 + "armv6j_hardfp" : arch_armv6j_hardfp,
2281 + "armv7a_hardfp" : arch_armv7a_hardfp,
2282 + "armeb" : arch_armeb,
2283 + "armv5teb" : arch_armv5teb
2284 + }, ("arm", "armv4l", "armv4tl", "armv5tl", "armv5tel", "armv5tejl", "armv6l",
2285 +"armv7l", "armeb", "armv5teb") )
2286 diff --git a/catalyst/arch/hppa.py b/catalyst/arch/hppa.py
2287 new file mode 100644
2288 index 0000000..f804398
2289 --- /dev/null
2290 +++ b/catalyst/arch/hppa.py
2291 @@ -0,0 +1,40 @@
2292 +
2293 +import builder,os
2294 +from catalyst_support import *
2295 +
2296 +class generic_hppa(builder.generic):
2297 + "Abstract base class for all hppa builders"
2298 + def __init__(self,myspec):
2299 + builder.generic.__init__(self,myspec)
2300 + self.settings["CHROOT"]="chroot"
2301 + self.settings["CFLAGS"]="-O2 -pipe"
2302 + self.settings["CXXFLAGS"]="-O2 -pipe"
2303 +
2304 +class arch_hppa(generic_hppa):
2305 + "Builder class for hppa systems"
2306 + def __init__(self,myspec):
2307 + generic_hppa.__init__(self,myspec)
2308 + self.settings["CFLAGS"]+=" -march=1.0"
2309 + self.settings["CHOST"]="hppa-unknown-linux-gnu"
2310 +
2311 +class arch_hppa1_1(generic_hppa):
2312 + "Builder class for hppa 1.1 systems"
2313 + def __init__(self,myspec):
2314 + generic_hppa.__init__(self,myspec)
2315 + self.settings["CFLAGS"]+=" -march=1.1"
2316 + self.settings["CHOST"]="hppa1.1-unknown-linux-gnu"
2317 +
2318 +class arch_hppa2_0(generic_hppa):
2319 + "Builder class for hppa 2.0 systems"
2320 + def __init__(self,myspec):
2321 + generic_hppa.__init__(self,myspec)
2322 + self.settings["CFLAGS"]+=" -march=2.0"
2323 + self.settings["CHOST"]="hppa2.0-unknown-linux-gnu"
2324 +
2325 +def register():
2326 + "Inform main catalyst program of the contents of this plugin."
2327 + return ({
2328 + "hppa": arch_hppa,
2329 + "hppa1.1": arch_hppa1_1,
2330 + "hppa2.0": arch_hppa2_0
2331 + }, ("parisc","parisc64","hppa","hppa64") )
2332 diff --git a/catalyst/arch/ia64.py b/catalyst/arch/ia64.py
2333 new file mode 100644
2334 index 0000000..825af70
2335 --- /dev/null
2336 +++ b/catalyst/arch/ia64.py
2337 @@ -0,0 +1,16 @@
2338 +
2339 +import builder,os
2340 +from catalyst_support import *
2341 +
2342 +class arch_ia64(builder.generic):
2343 + "builder class for ia64"
2344 + def __init__(self,myspec):
2345 + builder.generic.__init__(self,myspec)
2346 + self.settings["CHROOT"]="chroot"
2347 + self.settings["CFLAGS"]="-O2 -pipe"
2348 + self.settings["CFLAGS"]="-O2 -pipe"
2349 + self.settings["CHOST"]="ia64-unknown-linux-gnu"
2350 +
2351 +def register():
2352 + "Inform main catalyst program of the contents of this plugin."
2353 + return ({ "ia64":arch_ia64 }, ("ia64", ))
2354 diff --git a/catalyst/arch/mips.py b/catalyst/arch/mips.py
2355 new file mode 100644
2356 index 0000000..b3730fa
2357 --- /dev/null
2358 +++ b/catalyst/arch/mips.py
2359 @@ -0,0 +1,464 @@
2360 +
2361 +import builder,os
2362 +from catalyst_support import *
2363 +
2364 +class generic_mips(builder.generic):
2365 + "Abstract base class for all mips builders [Big-endian]"
2366 + def __init__(self,myspec):
2367 + builder.generic.__init__(self,myspec)
2368 + self.settings["CHROOT"]="chroot"
2369 + self.settings["CHOST"]="mips-unknown-linux-gnu"
2370 +
2371 +class generic_mipsel(builder.generic):
2372 + "Abstract base class for all mipsel builders [Little-endian]"
2373 + def __init__(self,myspec):
2374 + builder.generic.__init__(self,myspec)
2375 + self.settings["CHROOT"]="chroot"
2376 + self.settings["CHOST"]="mipsel-unknown-linux-gnu"
2377 +
2378 +class generic_mips64(builder.generic):
2379 + "Abstract base class for all mips64 builders [Big-endian]"
2380 + def __init__(self,myspec):
2381 + builder.generic.__init__(self,myspec)
2382 + self.settings["CHROOT"]="chroot"
2383 + self.settings["CHOST"]="mips64-unknown-linux-gnu"
2384 +
2385 +class generic_mips64el(builder.generic):
2386 + "Abstract base class for all mips64el builders [Little-endian]"
2387 + def __init__(self,myspec):
2388 + builder.generic.__init__(self,myspec)
2389 + self.settings["CHROOT"]="chroot"
2390 + self.settings["CHOST"]="mips64el-unknown-linux-gnu"
2391 +
2392 +class arch_mips1(generic_mips):
2393 + "Builder class for MIPS I [Big-endian]"
2394 + def __init__(self,myspec):
2395 + generic_mips.__init__(self,myspec)
2396 + self.settings["CFLAGS"]="-O2 -march=mips1 -mabi=32 -mplt -pipe"
2397 +
2398 +class arch_mips32(generic_mips):
2399 + "Builder class for MIPS 32 [Big-endian]"
2400 + def __init__(self,myspec):
2401 + generic_mips.__init__(self,myspec)
2402 + self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
2403 +
2404 +class arch_mips32_softfloat(generic_mips):
2405 + "Builder class for MIPS 32 [Big-endian softfloat]"
2406 + def __init__(self,myspec):
2407 + generic_mips.__init__(self,myspec)
2408 + self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
2409 + self.settings["CHOST"]="mips-softfloat-linux-gnu"
2410 +
2411 +class arch_mips32r2(generic_mips):
2412 + "Builder class for MIPS 32r2 [Big-endian]"
2413 + def __init__(self,myspec):
2414 + generic_mips.__init__(self,myspec)
2415 + self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
2416 +
2417 +class arch_mips32r2_softfloat(generic_mips):
2418 + "Builder class for MIPS 32r2 [Big-endian softfloat]"
2419 + def __init__(self,myspec):
2420 + generic_mips.__init__(self,myspec)
2421 + self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
2422 + self.settings["CHOST"]="mips-softfloat-linux-gnu"
2423 +
2424 +class arch_mips3(generic_mips):
2425 + "Builder class for MIPS III [Big-endian]"
2426 + def __init__(self,myspec):
2427 + generic_mips.__init__(self,myspec)
2428 + self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=32 -mplt -mfix-r4000 -mfix-r4400 -pipe"
2429 +
2430 +class arch_mips3_n32(generic_mips64):
2431 + "Builder class for MIPS III [Big-endian N32]"
2432 + def __init__(self,myspec):
2433 + generic_mips64.__init__(self,myspec)
2434 + self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=n32 -mplt -mfix-r4000 -mfix-r4400 -pipe"
2435 +
2436 +class arch_mips3_n64(generic_mips64):
2437 + "Builder class for MIPS III [Big-endian N64]"
2438 + def __init__(self,myspec):
2439 + generic_mips64.__init__(self,myspec)
2440 + self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=64 -mfix-r4000 -mfix-r4400 -pipe"
2441 +
2442 +class arch_mips3_multilib(generic_mips64):
2443 + "Builder class for MIPS III [Big-endian multilib]"
2444 + def __init__(self,myspec):
2445 + generic_mips64.__init__(self,myspec)
2446 + self.settings["CFLAGS"]="-O2 -march=mips3 -mplt -mfix-r4000 -mfix-r4400 -pipe"
2447 +
2448 +class arch_mips4(generic_mips):
2449 + "Builder class for MIPS IV [Big-endian]"
2450 + def __init__(self,myspec):
2451 + generic_mips.__init__(self,myspec)
2452 + self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=32 -mplt -pipe"
2453 +
2454 +class arch_mips4_n32(generic_mips64):
2455 + "Builder class for MIPS IV [Big-endian N32]"
2456 + def __init__(self,myspec):
2457 + generic_mips64.__init__(self,myspec)
2458 + self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=n32 -mplt -pipe"
2459 +
2460 +class arch_mips4_n64(generic_mips64):
2461 + "Builder class for MIPS IV [Big-endian N64]"
2462 + def __init__(self,myspec):
2463 + generic_mips64.__init__(self,myspec)
2464 + self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=64 -pipe"
2465 +
2466 +class arch_mips4_multilib(generic_mips64):
2467 + "Builder class for MIPS IV [Big-endian multilib]"
2468 + def __init__(self,myspec):
2469 + generic_mips64.__init__(self,myspec)
2470 + self.settings["CFLAGS"]="-O2 -march=mips4 -mplt -pipe"
2471 +
2472 +class arch_mips4_r10k(generic_mips):
2473 + "Builder class for MIPS IV R10k [Big-endian]"
2474 + def __init__(self,myspec):
2475 + generic_mips.__init__(self,myspec)
2476 + self.settings["CFLAGS"]="-O2 -march=r10k -mabi=32 -mplt -pipe"
2477 +
2478 +class arch_mips4_r10k_n32(generic_mips64):
2479 + "Builder class for MIPS IV R10k [Big-endian N32]"
2480 + def __init__(self,myspec):
2481 + generic_mips64.__init__(self,myspec)
2482 + self.settings["CFLAGS"]="-O2 -march=r10k -mabi=n32 -mplt -pipe"
2483 +
2484 +class arch_mips4_r10k_n64(generic_mips64):
2485 + "Builder class for MIPS IV R10k [Big-endian N64]"
2486 + def __init__(self,myspec):
2487 + generic_mips64.__init__(self,myspec)
2488 + self.settings["CFLAGS"]="-O2 -march=r10k -mabi=64 -pipe"
2489 +
2490 +class arch_mips4_r10k_multilib(generic_mips64):
2491 + "Builder class for MIPS IV R10k [Big-endian multilib]"
2492 + def __init__(self,myspec):
2493 + generic_mips64.__init__(self,myspec)
2494 + self.settings["CFLAGS"]="-O2 -march=r10k -mplt -pipe"
2495 +
2496 +class arch_mips64(generic_mips):
2497 + "Builder class for MIPS 64 [Big-endian]"
2498 + def __init__(self,myspec):
2499 + generic_mips.__init__(self,myspec)
2500 + self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=32 -mplt -pipe"
2501 +
2502 +class arch_mips64_n32(generic_mips64):
2503 + "Builder class for MIPS 64 [Big-endian N32]"
2504 + def __init__(self,myspec):
2505 + generic_mips64.__init__(self,myspec)
2506 + self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=n32 -mplt -pipe"
2507 +
2508 +class arch_mips64_n64(generic_mips64):
2509 + "Builder class for MIPS 64 [Big-endian N64]"
2510 + def __init__(self,myspec):
2511 + generic_mips64.__init__(self,myspec)
2512 + self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=64 -pipe"
2513 +
2514 +class arch_mips64_multilib(generic_mips64):
2515 + "Builder class for MIPS 64 [Big-endian multilib]"
2516 + def __init__(self,myspec):
2517 + generic_mips64.__init__(self,myspec)
2518 + self.settings["CFLAGS"]="-O2 -march=mips64 -mplt -pipe"
2519 +
2520 +class arch_mips64r2(generic_mips):
2521 + "Builder class for MIPS 64r2 [Big-endian]"
2522 + def __init__(self,myspec):
2523 + generic_mips.__init__(self,myspec)
2524 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=32 -mplt -pipe"
2525 +
2526 +class arch_mips64r2_n32(generic_mips64):
2527 + "Builder class for MIPS 64r2 [Big-endian N32]"
2528 + def __init__(self,myspec):
2529 + generic_mips64.__init__(self,myspec)
2530 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=n32 -mplt -pipe"
2531 +
2532 +class arch_mips64r2_n64(generic_mips64):
2533 + "Builder class for MIPS 64r2 [Big-endian N64]"
2534 + def __init__(self,myspec):
2535 + generic_mips64.__init__(self,myspec)
2536 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=64 -pipe"
2537 +
2538 +class arch_mips64r2_multilib(generic_mips64):
2539 + "Builder class for MIPS 64r2 [Big-endian multilib]"
2540 + def __init__(self,myspec):
2541 + generic_mips64.__init__(self,myspec)
2542 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mplt -pipe"
2543 +
2544 +class arch_mipsel1(generic_mipsel):
2545 + "Builder class for MIPS I [Little-endian]"
2546 + def __init__(self,myspec):
2547 + generic_mipsel.__init__(self,myspec)
2548 + self.settings["CFLAGS"]="-O2 -march=mips1 -mabi=32 -mplt -pipe"
2549 +
2550 +class arch_mips32el(generic_mipsel):
2551 + "Builder class for MIPS 32 [Little-endian]"
2552 + def __init__(self,myspec):
2553 + generic_mipsel.__init__(self,myspec)
2554 + self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
2555 +
2556 +class arch_mips32el_softfloat(generic_mipsel):
2557 + "Builder class for MIPS 32 [Little-endian softfloat]"
2558 + def __init__(self,myspec):
2559 + generic_mipsel.__init__(self,myspec)
2560 + self.settings["CFLAGS"]="-O2 -march=mips32 -mabi=32 -mplt -pipe"
2561 + self.settings["CHOST"]="mipsel-softfloat-linux-gnu"
2562 +
2563 +class arch_mips32r2el(generic_mipsel):
2564 + "Builder class for MIPS 32r2 [Little-endian]"
2565 + def __init__(self,myspec):
2566 + generic_mipsel.__init__(self,myspec)
2567 + self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
2568 +
2569 +class arch_mips32r2el_softfloat(generic_mipsel):
2570 + "Builder class for MIPS 32r2 [Little-endian softfloat]"
2571 + def __init__(self,myspec):
2572 + generic_mipsel.__init__(self,myspec)
2573 + self.settings["CFLAGS"]="-O2 -march=mips32r2 -mabi=32 -mplt -pipe"
2574 + self.settings["CHOST"]="mipsel-softfloat-linux-gnu"
2575 +
2576 +class arch_mipsel3(generic_mipsel):
2577 + "Builder class for MIPS III [Little-endian]"
2578 + def __init__(self,myspec):
2579 + generic_mipsel.__init__(self,myspec)
2580 + self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
2581 +
2582 +class arch_mipsel3_n32(generic_mips64el):
2583 + "Builder class for MIPS III [Little-endian N32]"
2584 + def __init__(self,myspec):
2585 + generic_mips64el.__init__(self,myspec)
2586 + self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=n32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
2587 +
2588 +class arch_mipsel3_n64(generic_mips64el):
2589 + "Builder class for MIPS III [Little-endian N64]"
2590 + def __init__(self,myspec):
2591 + generic_mips64el.__init__(self,myspec)
2592 + self.settings["CFLAGS"]="-O2 -march=mips3 -mabi=64 -Wa,-mfix-loongson2f-nop -pipe"
2593 +
2594 +class arch_mipsel3_multilib(generic_mips64el):
2595 + "Builder class for MIPS III [Little-endian multilib]"
2596 + def __init__(self,myspec):
2597 + generic_mips64el.__init__(self,myspec)
2598 + self.settings["CFLAGS"]="-O2 -march=mips3 -mplt -Wa,-mfix-loongson2f-nop -pipe"
2599 +
2600 +class arch_loongson2e(generic_mipsel):
2601 + "Builder class for Loongson 2E [Little-endian]"
2602 + def __init__(self,myspec):
2603 + generic_mipsel.__init__(self,myspec)
2604 + self.settings["CFLAGS"]="-O2 -march=loongson2e -mabi=32 -mplt -pipe"
2605 +
2606 +class arch_loongson2e_n32(generic_mips64el):
2607 + "Builder class for Loongson 2E [Little-endian N32]"
2608 + def __init__(self,myspec):
2609 + generic_mips64el.__init__(self,myspec)
2610 + self.settings["CFLAGS"]="-O2 -march=loongson2e -mabi=n32 -mplt -pipe"
2611 +
2612 +class arch_loongson2e_n64(generic_mips64el):
2613 + "Builder class for Loongson 2E [Little-endian N64]"
2614 + def __init__(self,myspec):
2615 + generic_mips64el.__init__(self,myspec)
2616 + self.settings["CFLAGS"]="-O2 -march=loongson2e -mabi=64 -pipe"
2617 +
2618 +class arch_loongson2e_multilib(generic_mips64el):
2619 + "Builder class for Loongson 2E [Little-endian multilib]"
2620 + def __init__(self,myspec):
2621 + generic_mips64el.__init__(self,myspec)
2622 + self.settings["CFLAGS"]="-O2 -march=loongson2e -mplt -pipe"
2623 +
2624 +class arch_loongson2f(generic_mipsel):
2625 + "Builder class for Loongson 2F [Little-endian]"
2626 + def __init__(self,myspec):
2627 + generic_mipsel.__init__(self,myspec)
2628 + self.settings["CFLAGS"]="-O2 -march=loongson2f -mabi=32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
2629 +
2630 +class arch_loongson2f_n32(generic_mips64el):
2631 + "Builder class for Loongson 2F [Little-endian N32]"
2632 + def __init__(self,myspec):
2633 + generic_mips64el.__init__(self,myspec)
2634 + self.settings["CFLAGS"]="-O2 -march=loongson2f -mabi=n32 -mplt -Wa,-mfix-loongson2f-nop -pipe"
2635 +
2636 +class arch_loongson2f_n64(generic_mips64el):
2637 + "Builder class for Loongson 2F [Little-endian N64]"
2638 + def __init__(self,myspec):
2639 + generic_mips64el.__init__(self,myspec)
2640 + self.settings["CFLAGS"]="-O2 -march=loongson2f -mabi=64 -Wa,-mfix-loongson2f-nop -pipe"
2641 +
2642 +class arch_loongson2f_multilib(generic_mips64el):
2643 + "Builder class for Loongson 2F [Little-endian multilib]"
2644 + def __init__(self,myspec):
2645 + generic_mips64el.__init__(self,myspec)
2646 + self.settings["CFLAGS"]="-O2 -march=loongson2f -mplt -Wa,-mfix-loongson2f-nop -pipe"
2647 +
2648 +class arch_mipsel4(generic_mipsel):
2649 + "Builder class for MIPS IV [Little-endian]"
2650 + def __init__(self,myspec):
2651 + generic_mipsel.__init__(self,myspec)
2652 + self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=32 -mplt -pipe"
2653 +
2654 +class arch_mipsel4_n32(generic_mips64el):
2655 + "Builder class for MIPS IV [Little-endian N32]"
2656 + def __init__(self,myspec):
2657 + generic_mips64el.__init__(self,myspec)
2658 + self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=n32 -mplt -pipe"
2659 +
2660 +class arch_mipsel4_n64(generic_mips64el):
2661 + "Builder class for MIPS IV [Little-endian N64]"
2662 + def __init__(self,myspec):
2663 + generic_mips64el.__init__(self,myspec)
2664 + self.settings["CFLAGS"]="-O2 -march=mips4 -mabi=64 -pipe"
2665 +
2666 +class arch_mipsel4_multilib(generic_mips64el):
2667 + "Builder class for MIPS IV [Little-endian multilib]"
2668 + def __init__(self,myspec):
2669 + generic_mips64el.__init__(self,myspec)
2670 + self.settings["CFLAGS"]="-O2 -march=mips4 -mplt -pipe"
2671 +
2672 +class arch_mips64el(generic_mipsel):
2673 + "Builder class for MIPS 64 [Little-endian]"
2674 + def __init__(self,myspec):
2675 + generic_mipsel.__init__(self,myspec)
2676 + self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=32 -mplt -pipe"
2677 +
2678 +class arch_mips64el_n32(generic_mips64el):
2679 + "Builder class for MIPS 64 [Little-endian N32]"
2680 + def __init__(self,myspec):
2681 + generic_mips64el.__init__(self,myspec)
2682 + self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=n32 -mplt -pipe"
2683 +
2684 +class arch_mips64el_n64(generic_mips64el):
2685 + "Builder class for MIPS 64 [Little-endian N64]"
2686 + def __init__(self,myspec):
2687 + generic_mips64el.__init__(self,myspec)
2688 + self.settings["CFLAGS"]="-O2 -march=mips64 -mabi=64 -pipe"
2689 +
2690 +class arch_mips64el_multilib(generic_mips64el):
2691 + "Builder class for MIPS 64 [Little-endian multilib]"
2692 + def __init__(self,myspec):
2693 + generic_mips64el.__init__(self,myspec)
2694 + self.settings["CFLAGS"]="-O2 -march=mips64 -mplt -pipe"
2695 +
2696 +class arch_mips64r2el(generic_mipsel):
2697 + "Builder class for MIPS 64r2 [Little-endian]"
2698 + def __init__(self,myspec):
2699 + generic_mipsel.__init__(self,myspec)
2700 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=32 -mplt -pipe"
2701 +
2702 +class arch_mips64r2el_n32(generic_mips64el):
2703 + "Builder class for MIPS 64r2 [Little-endian N32]"
2704 + def __init__(self,myspec):
2705 + generic_mips64el.__init__(self,myspec)
2706 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=n32 -mplt -pipe"
2707 +
2708 +class arch_mips64r2el_n64(generic_mips64el):
2709 + "Builder class for MIPS 64r2 [Little-endian N64]"
2710 + def __init__(self,myspec):
2711 + generic_mips64el.__init__(self,myspec)
2712 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mabi=64 -pipe"
2713 +
2714 +class arch_mips64r2el_multilib(generic_mips64el):
2715 + "Builder class for MIPS 64r2 [Little-endian multilib]"
2716 + def __init__(self,myspec):
2717 + generic_mips64el.__init__(self,myspec)
2718 + self.settings["CFLAGS"]="-O2 -march=mips64r2 -mplt -pipe"
2719 +
2720 +class arch_loongson3a(generic_mipsel):
2721 + "Builder class for Loongson 3A [Little-endian]"
2722 + def __init__(self,myspec):
2723 + generic_mipsel.__init__(self,myspec)
2724 + self.settings["CFLAGS"]="-O2 -march=loongson3a -mabi=32 -mplt -pipe"
2725 +
2726 +class arch_loongson3a_n32(generic_mips64el):
2727 + "Builder class for Loongson 3A [Little-endian N32]"
2728 + def __init__(self,myspec):
2729 + generic_mips64el.__init__(self,myspec)
2730 + self.settings["CFLAGS"]="-O2 -march=loongson3a -mabi=n32 -mplt -pipe"
2731 +
2732 +class arch_loongson3a_n64(generic_mips64el):
2733 + "Builder class for Loongson 3A [Little-endian N64]"
2734 + def __init__(self,myspec):
2735 + generic_mips64el.__init__(self,myspec)
2736 + self.settings["CFLAGS"]="-O2 -march=loongson3a -mabi=64 -pipe"
2737 +
2738 +class arch_loongson3a_multilib(generic_mips64el):
2739 + "Builder class for Loongson 3A [Little-endian multilib]"
2740 + def __init__(self,myspec):
2741 + generic_mips64el.__init__(self,myspec)
2742 + self.settings["CFLAGS"]="-O2 -march=loongson3a -mplt -pipe"
2743 +
2744 +class arch_cobalt(generic_mipsel):
2745 + "Builder class for cobalt [Little-endian]"
2746 + def __init__(self,myspec):
2747 + generic_mipsel.__init__(self,myspec)
2748 + self.settings["CFLAGS"]="-O2 -march=r5000 -mabi=32 -mplt -pipe"
2749 + self.settings["HOSTUSE"]=["cobalt"]
2750 +
2751 +class arch_cobalt_n32(generic_mips64el):
2752 + "Builder class for cobalt [Little-endian N32]"
2753 + def __init__(self,myspec):
2754 + generic_mips64el.__init__(self,myspec)
2755 + self.settings["CFLAGS"]="-O2 -march=r5000 -mabi=n32 -mplt -pipe"
2756 + self.settings["HOSTUSE"]=["cobalt"]
2757 +
2758 +def register():
2759 + "Inform main catalyst program of the contents of this plugin."
2760 + return ({
2761 + "cobalt" : arch_cobalt,
2762 + "cobalt_n32" : arch_cobalt_n32,
2763 + "mips" : arch_mips1,
2764 + "mips1" : arch_mips1,
2765 + "mips32" : arch_mips32,
2766 + "mips32_softfloat" : arch_mips32_softfloat,
2767 + "mips32r2" : arch_mips32r2,
2768 + "mips32r2_softfloat" : arch_mips32r2_softfloat,
2769 + "mips3" : arch_mips3,
2770 + "mips3_n32" : arch_mips3_n32,
2771 + "mips3_n64" : arch_mips3_n64,
2772 + "mips3_multilib" : arch_mips3_multilib,
2773 + "mips4" : arch_mips4,
2774 + "mips4_n32" : arch_mips4_n32,
2775 + "mips4_n64" : arch_mips4_n64,
2776 + "mips4_multilib" : arch_mips4_multilib,
2777 + "mips4_r10k" : arch_mips4_r10k,
2778 + "mips4_r10k_n32" : arch_mips4_r10k_n32,
2779 + "mips4_r10k_n64" : arch_mips4_r10k_n64,
2780 + "mips4_r10k_multilib" : arch_mips4_r10k_multilib,
2781 + "mips64" : arch_mips64,
2782 + "mips64_n32" : arch_mips64_n32,
2783 + "mips64_n64" : arch_mips64_n64,
2784 + "mips64_multilib" : arch_mips64_multilib,
2785 + "mips64r2" : arch_mips64r2,
2786 + "mips64r2_n32" : arch_mips64r2_n32,
2787 + "mips64r2_n64" : arch_mips64r2_n64,
2788 + "mips64r2_multilib" : arch_mips64r2_multilib,
2789 + "mipsel" : arch_mipsel1,
2790 + "mipsel1" : arch_mipsel1,
2791 + "mips32el" : arch_mips32el,
2792 + "mips32el_softfloat" : arch_mips32el_softfloat,
2793 + "mips32r2el" : arch_mips32r2el,
2794 + "mips32r2el_softfloat" : arch_mips32r2el_softfloat,
2795 + "mipsel3" : arch_mipsel3,
2796 + "mipsel3_n32" : arch_mipsel3_n32,
2797 + "mipsel3_n64" : arch_mipsel3_n64,
2798 + "mipsel3_multilib" : arch_mipsel3_multilib,
2799 + "mipsel4" : arch_mipsel4,
2800 + "mipsel4_n32" : arch_mipsel4_n32,
2801 + "mipsel4_n64" : arch_mipsel4_n64,
2802 + "mipsel4_multilib" : arch_mipsel4_multilib,
2803 + "mips64el" : arch_mips64el,
2804 + "mips64el_n32" : arch_mips64el_n32,
2805 + "mips64el_n64" : arch_mips64el_n64,
2806 + "mips64el_multilib" : arch_mips64el_multilib,
2807 + "mips64r2el" : arch_mips64r2el,
2808 + "mips64r2el_n32" : arch_mips64r2el_n32,
2809 + "mips64r2el_n64" : arch_mips64r2el_n64,
2810 + "mips64r2el_multilib" : arch_mips64r2el_multilib,
2811 + "loongson2e" : arch_loongson2e,
2812 + "loongson2e_n32" : arch_loongson2e_n32,
2813 + "loongson2e_n64" : arch_loongson2e_n64,
2814 + "loongson2e_multilib" : arch_loongson2e_multilib,
2815 + "loongson2f" : arch_loongson2f,
2816 + "loongson2f_n32" : arch_loongson2f_n32,
2817 + "loongson2f_n64" : arch_loongson2f_n64,
2818 + "loongson2f_multilib" : arch_loongson2f_multilib,
2819 + "loongson3a" : arch_loongson3a,
2820 + "loongson3a_n32" : arch_loongson3a_n32,
2821 + "loongson3a_n64" : arch_loongson3a_n64,
2822 + "loongson3a_multilib" : arch_loongson3a_multilib,
2823 + }, ("mips","mips64"))
2824 diff --git a/catalyst/arch/powerpc.py b/catalyst/arch/powerpc.py
2825 new file mode 100644
2826 index 0000000..e9f611b
2827 --- /dev/null
2828 +++ b/catalyst/arch/powerpc.py
2829 @@ -0,0 +1,124 @@
2830 +
2831 +import os,builder
2832 +from catalyst_support import *
2833 +
2834 +class generic_ppc(builder.generic):
2835 + "abstract base class for all 32-bit powerpc builders"
2836 + def __init__(self,myspec):
2837 + builder.generic.__init__(self,myspec)
2838 + self.settings["CHOST"]="powerpc-unknown-linux-gnu"
2839 + if self.settings["buildarch"]=="ppc64":
2840 + if not os.path.exists("/bin/linux32") and not os.path.exists("/usr/bin/linux32"):
2841 + raise CatalystError,"required executable linux32 not found (\"emerge setarch\" to fix.)"
2842 + self.settings["CHROOT"]="linux32 chroot"
2843 + self.settings["crosscompile"] = False;
2844 + else:
2845 + self.settings["CHROOT"]="chroot"
2846 +
2847 +class generic_ppc64(builder.generic):
2848 + "abstract base class for all 64-bit powerpc builders"
2849 + def __init__(self,myspec):
2850 + builder.generic.__init__(self,myspec)
2851 + self.settings["CHROOT"]="chroot"
2852 +
2853 +class arch_ppc(generic_ppc):
2854 + "builder class for generic powerpc"
2855 + def __init__(self,myspec):
2856 + generic_ppc.__init__(self,myspec)
2857 + self.settings["CFLAGS"]="-O2 -mcpu=powerpc -mtune=powerpc -pipe"
2858 +
2859 +class arch_ppc64(generic_ppc64):
2860 + "builder class for generic ppc64"
2861 + def __init__(self,myspec):
2862 + generic_ppc64.__init__(self,myspec)
2863 + self.settings["CFLAGS"]="-O2 -pipe"
2864 + self.settings["CHOST"]="powerpc64-unknown-linux-gnu"
2865 +
2866 +class arch_970(arch_ppc64):
2867 + "builder class for 970 aka G5 under ppc64"
2868 + def __init__(self,myspec):
2869 + arch_ppc64.__init__(self,myspec)
2870 + self.settings["CFLAGS"]="-O2 -pipe -mcpu=970 -mtune=970"
2871 + self.settings["HOSTUSE"]=["altivec"]
2872 +
2873 +class arch_cell(arch_ppc64):
2874 + "builder class for cell under ppc64"
2875 + def __init__(self,myspec):
2876 + arch_ppc64.__init__(self,myspec)
2877 + self.settings["CFLAGS"]="-O2 -pipe -mcpu=cell -mtune=cell"
2878 + self.settings["HOSTUSE"]=["altivec","ibm"]
2879 +
2880 +class arch_g3(generic_ppc):
2881 + def __init__(self,myspec):
2882 + generic_ppc.__init__(self,myspec)
2883 + self.settings["CFLAGS"]="-O2 -mcpu=G3 -mtune=G3 -pipe"
2884 +
2885 +class arch_g4(generic_ppc):
2886 + def __init__(self,myspec):
2887 + generic_ppc.__init__(self,myspec)
2888 + self.settings["CFLAGS"]="-O2 -mcpu=G4 -mtune=G4 -maltivec -mabi=altivec -pipe"
2889 + self.settings["HOSTUSE"]=["altivec"]
2890 +
2891 +class arch_g5(generic_ppc):
2892 + def __init__(self,myspec):
2893 + generic_ppc.__init__(self,myspec)
2894 + self.settings["CFLAGS"]="-O2 -mcpu=G5 -mtune=G5 -maltivec -mabi=altivec -pipe"
2895 + self.settings["HOSTUSE"]=["altivec"]
2896 +
2897 +class arch_power(generic_ppc):
2898 + "builder class for generic power"
2899 + def __init__(self,myspec):
2900 + generic_ppc.__init__(self,myspec)
2901 + self.settings["CFLAGS"]="-O2 -mcpu=power -mtune=power -pipe"
2902 +
2903 +class arch_power_ppc(generic_ppc):
2904 + "builder class for generic powerpc/power"
2905 + def __init__(self,myspec):
2906 + generic_ppc.__init__(self,myspec)
2907 + self.settings["CFLAGS"]="-O2 -mcpu=common -mtune=common -pipe"
2908 +
2909 +class arch_power3(arch_ppc64):
2910 + "builder class for power3 under ppc64"
2911 + def __init__(self,myspec):
2912 + arch_ppc64.__init__(self,myspec)
2913 + self.settings["CFLAGS"]="-O2 -pipe -mcpu=power3 -mtune=power3"
2914 + self.settings["HOSTUSE"]=["ibm"]
2915 +
2916 +class arch_power4(arch_ppc64):
2917 + "builder class for power4 under ppc64"
2918 + def __init__(self,myspec):
2919 + arch_ppc64.__init__(self,myspec)
2920 + self.settings["CFLAGS"]="-O2 -pipe -mcpu=power4 -mtune=power4"
2921 + self.settings["HOSTUSE"]=["ibm"]
2922 +
2923 +class arch_power5(arch_ppc64):
2924 + "builder class for power5 under ppc64"
2925 + def __init__(self,myspec):
2926 + arch_ppc64.__init__(self,myspec)
2927 + self.settings["CFLAGS"]="-O2 -pipe -mcpu=power5 -mtune=power5"
2928 + self.settings["HOSTUSE"]=["ibm"]
2929 +
2930 +class arch_power6(arch_ppc64):
2931 + "builder class for power6 under ppc64"
2932 + def __init__(self,myspec):
2933 + arch_ppc64.__init__(self,myspec)
2934 + self.settings["CFLAGS"]="-O2 -pipe -mcpu=power6 -mtune=power6"
2935 + self.settings["HOSTUSE"]=["altivec","ibm"]
2936 +
2937 +def register():
2938 + "Inform main catalyst program of the contents of this plugin."
2939 + return ({
2940 + "970" : arch_970,
2941 + "cell" : arch_cell,
2942 + "g3" : arch_g3,
2943 + "g4" : arch_g4,
2944 + "g5" : arch_g5,
2945 + "power" : arch_power,
2946 + "power-ppc" : arch_power_ppc,
2947 + "power3" : arch_power3,
2948 + "power4" : arch_power4,
2949 + "power5" : arch_power5,
2950 + "power6" : arch_power6,
2951 + "ppc" : arch_ppc,
2952 + "ppc64" : arch_ppc64
2953 + }, ("ppc","ppc64","powerpc","powerpc64"))
2954 diff --git a/catalyst/arch/s390.py b/catalyst/arch/s390.py
2955 new file mode 100644
2956 index 0000000..bf22f66
2957 --- /dev/null
2958 +++ b/catalyst/arch/s390.py
2959 @@ -0,0 +1,33 @@
2960 +
2961 +import builder,os
2962 +from catalyst_support import *
2963 +
2964 +class generic_s390(builder.generic):
2965 + "abstract base class for all s390 builders"
2966 + def __init__(self,myspec):
2967 + builder.generic.__init__(self,myspec)
2968 + self.settings["CHROOT"]="chroot"
2969 +
2970 +class generic_s390x(builder.generic):
2971 + "abstract base class for all s390x builders"
2972 + def __init__(self,myspec):
2973 + builder.generic.__init__(self,myspec)
2974 + self.settings["CHROOT"]="chroot"
2975 +
2976 +class arch_s390(generic_s390):
2977 + "builder class for generic s390"
2978 + def __init__(self,myspec):
2979 + generic_s390.__init__(self,myspec)
2980 + self.settings["CFLAGS"]="-O2 -pipe"
2981 + self.settings["CHOST"]="s390-ibm-linux-gnu"
2982 +
2983 +class arch_s390x(generic_s390x):
2984 + "builder class for generic s390x"
2985 + def __init__(self,myspec):
2986 + generic_s390x.__init__(self,myspec)
2987 + self.settings["CFLAGS"]="-O2 -pipe"
2988 + self.settings["CHOST"]="s390x-ibm-linux-gnu"
2989 +
2990 +def register():
2991 + "Inform main catalyst program of the contents of this plugin."
2992 + return ({"s390":arch_s390,"s390x":arch_s390x}, ("s390", "s390x"))
2993 diff --git a/catalyst/arch/sh.py b/catalyst/arch/sh.py
2994 new file mode 100644
2995 index 0000000..2fc9531
2996 --- /dev/null
2997 +++ b/catalyst/arch/sh.py
2998 @@ -0,0 +1,116 @@
2999 +
3000 +import builder,os
3001 +from catalyst_support import *
3002 +
3003 +class generic_sh(builder.generic):
3004 + "Abstract base class for all sh builders [Little-endian]"
3005 + def __init__(self,myspec):
3006 + builder.generic.__init__(self,myspec)
3007 + self.settings["CHROOT"]="chroot"
3008 +
3009 +class generic_sheb(builder.generic):
3010 + "Abstract base class for all sheb builders [Big-endian]"
3011 + def __init__(self,myspec):
3012 + builder.generic.__init__(self,myspec)
3013 + self.settings["CHROOT"]="chroot"
3014 +
3015 +class arch_sh(generic_sh):
3016 + "Builder class for SH [Little-endian]"
3017 + def __init__(self,myspec):
3018 + generic_sh.__init__(self,myspec)
3019 + self.settings["CFLAGS"]="-O2 -pipe"
3020 + self.settings["CHOST"]="sh-unknown-linux-gnu"
3021 +
3022 +class arch_sh2(generic_sh):
3023 + "Builder class for SH-2 [Little-endian]"
3024 + def __init__(self,myspec):
3025 + generic_sh.__init__(self,myspec)
3026 + self.settings["CFLAGS"]="-O2 -m2 -pipe"
3027 + self.settings["CHOST"]="sh2-unknown-linux-gnu"
3028 +
3029 +class arch_sh2a(generic_sh):
3030 + "Builder class for SH-2A [Little-endian]"
3031 + def __init__(self,myspec):
3032 + generic_sh.__init__(self,myspec)
3033 + self.settings["CFLAGS"]="-O2 -m2a -pipe"
3034 + self.settings["CHOST"]="sh2a-unknown-linux-gnu"
3035 +
3036 +class arch_sh3(generic_sh):
3037 + "Builder class for SH-3 [Little-endian]"
3038 + def __init__(self,myspec):
3039 + generic_sh.__init__(self,myspec)
3040 + self.settings["CFLAGS"]="-O2 -m3 -pipe"
3041 + self.settings["CHOST"]="sh3-unknown-linux-gnu"
3042 +
3043 +class arch_sh4(generic_sh):
3044 + "Builder class for SH-4 [Little-endian]"
3045 + def __init__(self,myspec):
3046 + generic_sh.__init__(self,myspec)
3047 + self.settings["CFLAGS"]="-O2 -m4 -pipe"
3048 + self.settings["CHOST"]="sh4-unknown-linux-gnu"
3049 +
3050 +class arch_sh4a(generic_sh):
3051 + "Builder class for SH-4A [Little-endian]"
3052 + def __init__(self,myspec):
3053 + generic_sh.__init__(self,myspec)
3054 + self.settings["CFLAGS"]="-O2 -m4a -pipe"
3055 + self.settings["CHOST"]="sh4a-unknown-linux-gnu"
3056 +
3057 +class arch_sheb(generic_sheb):
3058 + "Builder class for SH [Big-endian]"
3059 + def __init__(self,myspec):
3060 + generic_sheb.__init__(self,myspec)
3061 + self.settings["CFLAGS"]="-O2 -pipe"
3062 + self.settings["CHOST"]="sheb-unknown-linux-gnu"
3063 +
3064 +class arch_sh2eb(generic_sheb):
3065 + "Builder class for SH-2 [Big-endian]"
3066 + def __init__(self,myspec):
3067 + generic_sheb.__init__(self,myspec)
3068 + self.settings["CFLAGS"]="-O2 -m2 -pipe"
3069 + self.settings["CHOST"]="sh2eb-unknown-linux-gnu"
3070 +
3071 +class arch_sh2aeb(generic_sheb):
3072 + "Builder class for SH-2A [Big-endian]"
3073 + def __init__(self,myspec):
3074 + generic_sheb.__init__(self,myspec)
3075 + self.settings["CFLAGS"]="-O2 -m2a -pipe"
3076 + self.settings["CHOST"]="sh2aeb-unknown-linux-gnu"
3077 +
3078 +class arch_sh3eb(generic_sheb):
3079 + "Builder class for SH-3 [Big-endian]"
3080 + def __init__(self,myspec):
3081 + generic_sheb.__init__(self,myspec)
3082 + self.settings["CFLAGS"]="-O2 -m3 -pipe"
3083 + self.settings["CHOST"]="sh3eb-unknown-linux-gnu"
3084 +
3085 +class arch_sh4eb(generic_sheb):
3086 + "Builder class for SH-4 [Big-endian]"
3087 + def __init__(self,myspec):
3088 + generic_sheb.__init__(self,myspec)
3089 + self.settings["CFLAGS"]="-O2 -m4 -pipe"
3090 + self.settings["CHOST"]="sh4eb-unknown-linux-gnu"
3091 +
3092 +class arch_sh4aeb(generic_sheb):
3093 + "Builder class for SH-4A [Big-endian]"
3094 + def __init__(self,myspec):
3095 + generic_sheb.__init__(self,myspec)
3096 + self.settings["CFLAGS"]="-O2 -m4a -pipe"
3097 + self.settings["CHOST"]="sh4aeb-unknown-linux-gnu"
3098 +
3099 +def register():
3100 + "Inform main catalyst program of the contents of this plugin."
3101 + return ({
3102 + "sh" :arch_sh,
3103 + "sh2" :arch_sh2,
3104 + "sh2a" :arch_sh2a,
3105 + "sh3" :arch_sh3,
3106 + "sh4" :arch_sh4,
3107 + "sh4a" :arch_sh4a,
3108 + "sheb" :arch_sheb,
3109 + "sh2eb" :arch_sh2eb,
3110 + "sh2aeb" :arch_sh2aeb,
3111 + "sh3eb" :arch_sh3eb,
3112 + "sh4eb" :arch_sh4eb,
3113 + "sh4aeb" :arch_sh4aeb
3114 + }, ("sh2","sh2a","sh3","sh4","sh4a","sh2eb","sh2aeb","sh3eb","sh4eb","sh4aeb"))
3115 diff --git a/catalyst/arch/sparc.py b/catalyst/arch/sparc.py
3116 new file mode 100644
3117 index 0000000..5eb5344
3118 --- /dev/null
3119 +++ b/catalyst/arch/sparc.py
3120 @@ -0,0 +1,42 @@
3121 +
3122 +import builder,os
3123 +from catalyst_support import *
3124 +
3125 +class generic_sparc(builder.generic):
3126 + "abstract base class for all sparc builders"
3127 + def __init__(self,myspec):
3128 + builder.generic.__init__(self,myspec)
3129 + if self.settings["buildarch"]=="sparc64":
3130 + if not os.path.exists("/bin/linux32") and not os.path.exists("/usr/bin/linux32"):
3131 + raise CatalystError,"required executable linux32 not found (\"emerge setarch\" to fix.)"
3132 + self.settings["CHROOT"]="linux32 chroot"
3133 + self.settings["crosscompile"] = False;
3134 + else:
3135 + self.settings["CHROOT"]="chroot"
3136 +
3137 +class generic_sparc64(builder.generic):
3138 + "abstract base class for all sparc64 builders"
3139 + def __init__(self,myspec):
3140 + builder.generic.__init__(self,myspec)
3141 + self.settings["CHROOT"]="chroot"
3142 +
3143 +class arch_sparc(generic_sparc):
3144 + "builder class for generic sparc (sun4cdm)"
3145 + def __init__(self,myspec):
3146 + generic_sparc.__init__(self,myspec)
3147 + self.settings["CFLAGS"]="-O2 -pipe"
3148 + self.settings["CHOST"]="sparc-unknown-linux-gnu"
3149 +
3150 +class arch_sparc64(generic_sparc64):
3151 + "builder class for generic sparc64 (sun4u)"
3152 + def __init__(self,myspec):
3153 + generic_sparc64.__init__(self,myspec)
3154 + self.settings["CFLAGS"]="-O2 -mcpu=ultrasparc -pipe"
3155 + self.settings["CHOST"]="sparc-unknown-linux-gnu"
3156 +
3157 +def register():
3158 + "Inform main catalyst program of the contents of this plugin."
3159 + return ({
3160 + "sparc" : arch_sparc,
3161 + "sparc64" : arch_sparc64
3162 + }, ("sparc","sparc64", ))
3163 diff --git a/catalyst/arch/x86.py b/catalyst/arch/x86.py
3164 new file mode 100644
3165 index 0000000..0391b79
3166 --- /dev/null
3167 +++ b/catalyst/arch/x86.py
3168 @@ -0,0 +1,153 @@
3169 +
3170 +import builder,os
3171 +from catalyst_support import *
3172 +
3173 +class generic_x86(builder.generic):
3174 + "abstract base class for all x86 builders"
3175 + def __init__(self,myspec):
3176 + builder.generic.__init__(self,myspec)
3177 + if self.settings["buildarch"]=="amd64":
3178 + if not os.path.exists("/bin/linux32") and not os.path.exists("/usr/bin/linux32"):
3179 + raise CatalystError,"required executable linux32 not found (\"emerge setarch\" to fix.)"
3180 + self.settings["CHROOT"]="linux32 chroot"
3181 + self.settings["crosscompile"] = False;
3182 + else:
3183 + self.settings["CHROOT"]="chroot"
3184 +
3185 +class arch_x86(generic_x86):
3186 + "builder class for generic x86 (386+)"
3187 + def __init__(self,myspec):
3188 + generic_x86.__init__(self,myspec)
3189 + self.settings["CFLAGS"]="-O2 -mtune=i686 -pipe"
3190 + self.settings["CHOST"]="i386-pc-linux-gnu"
3191 +
3192 +class arch_i386(generic_x86):
3193 + "Intel i386 CPU"
3194 + def __init__(self,myspec):
3195 + generic_x86.__init__(self,myspec)
3196 + self.settings["CFLAGS"]="-O2 -march=i386 -pipe"
3197 + self.settings["CHOST"]="i386-pc-linux-gnu"
3198 +
3199 +class arch_i486(generic_x86):
3200 + "Intel i486 CPU"
3201 + def __init__(self,myspec):
3202 + generic_x86.__init__(self,myspec)
3203 + self.settings["CFLAGS"]="-O2 -march=i486 -pipe"
3204 + self.settings["CHOST"]="i486-pc-linux-gnu"
3205 +
3206 +class arch_i586(generic_x86):
3207 + "Intel Pentium CPU"
3208 + def __init__(self,myspec):
3209 + generic_x86.__init__(self,myspec)
3210 + self.settings["CFLAGS"]="-O2 -march=i586 -pipe"
3211 + self.settings["CHOST"]="i586-pc-linux-gnu"
3212 +
3213 +class arch_i686(generic_x86):
3214 + "Intel Pentium Pro CPU"
3215 + def __init__(self,myspec):
3216 + generic_x86.__init__(self,myspec)
3217 + self.settings["CFLAGS"]="-O2 -march=i686 -pipe"
3218 + self.settings["CHOST"]="i686-pc-linux-gnu"
3219 +
3220 +class arch_pentium_mmx(generic_x86):
3221 + "Intel Pentium MMX CPU with MMX support"
3222 + def __init__(self,myspec):
3223 + generic_x86.__init__(self,myspec)
3224 + self.settings["CFLAGS"]="-O2 -march=pentium-mmx -pipe"
3225 + self.settings["HOSTUSE"]=["mmx"]
3226 +
3227 +class arch_pentium2(generic_x86):
3228 + "Intel Pentium 2 CPU with MMX support"
3229 + def __init__(self,myspec):
3230 + generic_x86.__init__(self,myspec)
3231 + self.settings["CFLAGS"]="-O2 -march=pentium2 -pipe"
3232 + self.settings["HOSTUSE"]=["mmx"]
3233 +
3234 +class arch_pentium3(generic_x86):
3235 + "Intel Pentium 3 CPU with MMX and SSE support"
3236 + def __init__(self,myspec):
3237 + generic_x86.__init__(self,myspec)
3238 + self.settings["CFLAGS"]="-O2 -march=pentium3 -pipe"
3239 + self.settings["HOSTUSE"]=["mmx","sse"]
3240 +
3241 +class arch_pentium4(generic_x86):
3242 + "Intel Pentium 4 CPU with MMX, SSE and SSE2 support"
3243 + def __init__(self,myspec):
3244 + generic_x86.__init__(self,myspec)
3245 + self.settings["CFLAGS"]="-O2 -march=pentium4 -pipe"
3246 + self.settings["HOSTUSE"]=["mmx","sse","sse2"]
3247 +
3248 +class arch_pentium_m(generic_x86):
3249 + "Intel Pentium M CPU with MMX, SSE and SSE2 support"
3250 + def __init__(self,myspec):
3251 + generic_x86.__init__(self,myspec)
3252 + self.settings["CFLAGS"]="-O2 -march=pentium-m -pipe"
3253 + self.settings["HOSTUSE"]=["mmx","sse","sse2"]
3254 +
3255 +class arch_prescott(generic_x86):
3256 + "improved version of Intel Pentium 4 CPU with MMX, SSE, SSE2 and SSE3 support"
3257 + def __init__(self,myspec):
3258 + generic_x86.__init__(self,myspec)
3259 + self.settings["CFLAGS"]="-O2 -march=prescott -pipe"
3260 + self.settings["HOSTUSE"]=["mmx","sse","sse2"]
3261 + self.settings["CHOST"]="i686-pc-linux-gnu"
3262 +
3263 +class arch_k6(generic_x86):
3264 + "AMD K6 CPU with MMX support"
3265 + def __init__(self,myspec):
3266 + generic_x86.__init__(self,myspec)
3267 + self.settings["CFLAGS"]="-O2 -march=k6 -pipe"
3268 + self.settings["CHOST"]="i686-pc-linux-gnu"
3269 + self.settings["HOSTUSE"]=["mmx"]
3270 +
3271 +class arch_k6_2(generic_x86):
3272 + "AMD K6-2 CPU with MMX and 3dNOW! support"
3273 + def __init__(self,myspec):
3274 + generic_x86.__init__(self,myspec)
3275 + self.settings["CFLAGS"]="-O2 -march=k6-2 -pipe"
3276 + self.settings["CHOST"]="i686-pc-linux-gnu"
3277 + self.settings["HOSTUSE"]=["mmx","3dnow"]
3278 +
3279 +class arch_athlon(generic_x86):
3280 + "AMD Athlon CPU with MMX, 3dNOW!, enhanced 3dNOW! and SSE prefetch support"
3281 + def __init__(self,myspec):
3282 + generic_x86.__init__(self,myspec)
3283 + self.settings["CFLAGS"]="-O2 -march=athlon -pipe"
3284 + self.settings["CHOST"]="i686-pc-linux-gnu"
3285 + self.settings["HOSTUSE"]=["mmx","3dnow"]
3286 +
3287 +class arch_athlon_xp(generic_x86):
3288 + "improved AMD Athlon CPU with MMX, 3dNOW!, enhanced 3dNOW! and full SSE support"
3289 + def __init__(self,myspec):
3290 + generic_x86.__init__(self,myspec)
3291 + self.settings["CFLAGS"]="-O2 -march=athlon-xp -pipe"
3292 + self.settings["CHOST"]="i686-pc-linux-gnu"
3293 + self.settings["HOSTUSE"]=["mmx","3dnow","sse"]
3294 +
3295 +def register():
3296 + "Inform main catalyst program of the contents of this plugin."
3297 + return ({
3298 + "x86" : arch_x86,
3299 + "i386" : arch_i386,
3300 + "i486" : arch_i486,
3301 + "i586" : arch_i586,
3302 + "i686" : arch_i686,
3303 + "pentium" : arch_i586,
3304 + "pentium2" : arch_pentium2,
3305 + "pentium3" : arch_pentium3,
3306 + "pentium3m" : arch_pentium3,
3307 + "pentium-m" : arch_pentium_m,
3308 + "pentium4" : arch_pentium4,
3309 + "pentium4m" : arch_pentium4,
3310 + "pentiumpro" : arch_i686,
3311 + "pentium-mmx" : arch_pentium_mmx,
3312 + "prescott" : arch_prescott,
3313 + "k6" : arch_k6,
3314 + "k6-2" : arch_k6_2,
3315 + "k6-3" : arch_k6_2,
3316 + "athlon" : arch_athlon,
3317 + "athlon-tbird" : arch_athlon,
3318 + "athlon-4" : arch_athlon_xp,
3319 + "athlon-xp" : arch_athlon_xp,
3320 + "athlon-mp" : arch_athlon_xp
3321 + }, ('i386', 'i486', 'i586', 'i686'))
3322 diff --git a/catalyst/config.py b/catalyst/config.py
3323 new file mode 100644
3324 index 0000000..726bf74
3325 --- /dev/null
3326 +++ b/catalyst/config.py
3327 @@ -0,0 +1,122 @@
3328 +import re
3329 +from modules.catalyst_support import *
3330 +
3331 +class ParserBase:
3332 +
3333 + filename = ""
3334 + lines = None
3335 + values = None
3336 + key_value_separator = "="
3337 + multiple_values = False
3338 + empty_values = True
3339 +
3340 + def __getitem__(self, key):
3341 + return self.values[key]
3342 +
3343 + def get_values(self):
3344 + return self.values
3345 +
3346 + def dump(self):
3347 + dump = ""
3348 + for x in self.values.keys():
3349 + dump += x + " = " + repr(self.values[x]) + "\n"
3350 + return dump
3351 +
3352 + def parse_file(self, filename):
3353 + try:
3354 + myf = open(filename, "r")
3355 + except:
3356 + raise CatalystError, "Could not open file " + filename
3357 + self.lines = myf.readlines()
3358 + myf.close()
3359 + self.filename = filename
3360 + self.parse()
3361 +
3362 + def parse_lines(self, lines):
3363 + self.lines = lines
3364 + self.parse()
3365 +
3366 + def parse(self):
3367 + values = {}
3368 + cur_array = []
3369 +
3370 + trailing_comment=re.compile('\s*#.*$')
3371 + white_space=re.compile('\s+')
3372 +
3373 + for x, myline in enumerate(self.lines):
3374 + myline = myline.strip()
3375 +
3376 + # Force the line to be clean
3377 + # Remove Comments ( anything following # )
3378 + myline = trailing_comment.sub("", myline)
3379 +
3380 + # Skip any blank lines
3381 + if not myline: continue
3382 +
3383 + # Look for separator
3384 + msearch = myline.find(self.key_value_separator)
3385 +
3386 + # If separator found assume its a new key
3387 + if msearch != -1:
3388 + # Split on the first occurence of the separator creating two strings in the array mobjs
3389 + mobjs = myline.split(self.key_value_separator, 1)
3390 + mobjs[1] = mobjs[1].strip().strip('"')
3391 +
3392 +# # Check that this key doesn't exist already in the spec
3393 +# if mobjs[0] in values:
3394 +# raise Exception("You have a duplicate key (" + mobjs[0] + ") in your spec. Please fix it")
3395 +
3396 + # Start a new array using the first element of mobjs
3397 + cur_array = [mobjs[0]]
3398 + if mobjs[1]:
3399 + if self.multiple_values:
3400 + # split on white space creating additional array elements
3401 +# subarray = white_space.split(mobjs[1])
3402 + subarray = mobjs[1].split()
3403 + cur_array += subarray
3404 + else:
3405 + cur_array += [mobjs[1]]
3406 +
3407 + # Else add on to the last key we were working on
3408 + else:
3409 + if self.multiple_values:
3410 +# mobjs = white_space.split(myline)
3411 +# cur_array += mobjs
3412 + cur_array += myline.split()
3413 + else:
3414 + raise CatalystError, "Syntax error: " + x
3415 +
3416 + # XXX: Do we really still need this "single value is a string" behavior?
3417 + if len(cur_array) == 2:
3418 + values[cur_array[0]] = cur_array[1]
3419 + else:
3420 + values[cur_array[0]] = cur_array[1:]
3421 +
3422 + if not self.empty_values:
3423 + for x in values.keys():
3424 + # Delete empty key pairs
3425 + if not values[x]:
3426 + print "\n\tWARNING: No value set for key " + x + "...deleting"
3427 + del values[x]
3428 +
3429 + self.values = values
3430 +
3431 +class SpecParser(ParserBase):
3432 +
3433 + key_value_separator = ':'
3434 + multiple_values = True
3435 + empty_values = False
3436 +
3437 + def __init__(self, filename=""):
3438 + if filename:
3439 + self.parse_file(filename)
3440 +
3441 +class ConfigParser(ParserBase):
3442 +
3443 + key_value_separator = '='
3444 + multiple_values = False
3445 + empty_values = True
3446 +
3447 + def __init__(self, filename=""):
3448 + if filename:
3449 + self.parse_file(filename)
3450 diff --git a/catalyst/main.py b/catalyst/main.py
3451 new file mode 100644
3452 index 0000000..aebb495
3453 --- /dev/null
3454 +++ b/catalyst/main.py
3455 @@ -0,0 +1,428 @@
3456 +#!/usr/bin/python2 -OO
3457 +
3458 +# Maintained in full by:
3459 +# Catalyst Team <catalyst@g.o>
3460 +# Release Engineering Team <releng@g.o>
3461 +# Andrew Gaffney <agaffney@g.o>
3462 +# Chris Gianelloni <wolf31o2@××××××××.org>
3463 +# $Id$
3464 +
3465 +import os
3466 +import sys
3467 +import imp
3468 +import string
3469 +import getopt
3470 +import pdb
3471 +import os.path
3472 +
3473 +__selfpath__ = os.path.abspath(os.path.dirname(__file__))
3474 +
3475 +sys.path.append(__selfpath__ + "/modules")
3476 +
3477 +import catalyst.config
3478 +import catalyst.util
3479 +from catalyst.modules.catalyst_support import (required_build_targets,
3480 + valid_build_targets, CatalystError, hash_map, find_binary, LockInUse)
3481 +
3482 +__maintainer__="Catalyst <catalyst@g.o>"
3483 +__version__="2.0.15"
3484 +
3485 +conf_values={}
3486 +
3487 +def usage():
3488 + print """Usage catalyst [options] [-C variable=value...] [ -s identifier]
3489 + -a --clear-autoresume clear autoresume flags
3490 + -c --config use specified configuration file
3491 + -C --cli catalyst commandline (MUST BE LAST OPTION)
3492 + -d --debug enable debugging
3493 + -f --file read specfile
3494 + -F --fetchonly fetch files only
3495 + -h --help print this help message
3496 + -p --purge clear tmp dirs,package cache, autoresume flags
3497 + -P --purgeonly clear tmp dirs,package cache, autoresume flags and exit
3498 + -T --purgetmponly clear tmp dirs and autoresume flags and exit
3499 + -s --snapshot generate a release snapshot
3500 + -V --version display version information
3501 + -v --verbose verbose output
3502 +
3503 +Usage examples:
3504 +
3505 +Using the commandline option (-C, --cli) to build a Portage snapshot:
3506 +catalyst -C target=snapshot version_stamp=my_date
3507 +
3508 +Using the snapshot option (-s, --snapshot) to build a release snapshot:
3509 +catalyst -s 20071121"
3510 +
3511 +Using the specfile option (-f, --file) to build a stage target:
3512 +catalyst -f stage1-specfile.spec
3513 +"""
3514 +
3515 +
3516 +def version():
3517 + print "Catalyst, version "+__version__
3518 + print "Copyright 2003-2008 Gentoo Foundation"
3519 + print "Copyright 2008-2012 various authors"
3520 + print "Distributed under the GNU General Public License version 2.1\n"
3521 +
3522 +def parse_config(myconfig):
3523 + # search a couple of different areas for the main config file
3524 + myconf={}
3525 + config_file=""
3526 +
3527 + confdefaults = {
3528 + "distdir": "/usr/portage/distfiles",
3529 + "hash_function": "crc32",
3530 + "icecream": "/var/cache/icecream",
3531 + "local_overlay": "/usr/local/portage",
3532 + "options": "",
3533 + "packagedir": "/usr/portage/packages",
3534 + "portdir": "/usr/portage",
3535 + "repo_name": "portage",
3536 + "sharedir": "/usr/share/catalyst",
3537 + "snapshot_name": "portage-",
3538 + "snapshot_cache": "/var/tmp/catalyst/snapshot_cache",
3539 + "storedir": "/var/tmp/catalyst",
3540 + }
3541 +
3542 + # first, try the one passed (presumably from the cmdline)
3543 + if myconfig:
3544 + if os.path.exists(myconfig):
3545 + print "Using command line specified Catalyst configuration file, "+myconfig
3546 + config_file=myconfig
3547 +
3548 + else:
3549 + print "!!! catalyst: Could not use specified configuration file "+\
3550 + myconfig
3551 + sys.exit(1)
3552 +
3553 + # next, try the default location
3554 + elif os.path.exists("/etc/catalyst/catalyst.conf"):
3555 + print "Using default Catalyst configuration file, /etc/catalyst/catalyst.conf"
3556 + config_file="/etc/catalyst/catalyst.conf"
3557 +
3558 + # can't find a config file (we are screwed), so bail out
3559 + else:
3560 + print "!!! catalyst: Could not find a suitable configuration file"
3561 + sys.exit(1)
3562 +
3563 + # now, try and parse the config file "config_file"
3564 + try:
3565 +# execfile(config_file, myconf, myconf)
3566 + myconfig = catalyst.config.ConfigParser(config_file)
3567 + myconf.update(myconfig.get_values())
3568 +
3569 + except:
3570 + print "!!! catalyst: Unable to parse configuration file, "+myconfig
3571 + sys.exit(1)
3572 +
3573 + # now, load up the values into conf_values so that we can use them
3574 + for x in confdefaults.keys():
3575 + if x in myconf:
3576 + print "Setting",x,"to config file value \""+myconf[x]+"\""
3577 + conf_values[x]=myconf[x]
3578 + else:
3579 + print "Setting",x,"to default value \""+confdefaults[x]+"\""
3580 + conf_values[x]=confdefaults[x]
3581 +
3582 + # add our python base directory to use for loading target arch's
3583 + conf_values["PythonDir"] = __selfpath__
3584 +
3585 + # parse out the rest of the options from the config file
3586 + if "autoresume" in string.split(conf_values["options"]):
3587 + print "Autoresuming support enabled."
3588 + conf_values["AUTORESUME"]="1"
3589 +
3590 + if "bindist" in string.split(conf_values["options"]):
3591 + print "Binary redistribution enabled"
3592 + conf_values["BINDIST"]="1"
3593 + else:
3594 + print "Bindist is not enabled in catalyst.conf"
3595 + print "Binary redistribution of generated stages/isos may be prohibited by law."
3596 + print "Please see the use description for bindist on any package you are including."
3597 +
3598 + if "ccache" in string.split(conf_values["options"]):
3599 + print "Compiler cache support enabled."
3600 + conf_values["CCACHE"]="1"
3601 +
3602 + if "clear-autoresume" in string.split(conf_values["options"]):
3603 + print "Cleaning autoresume flags support enabled."
3604 + conf_values["CLEAR_AUTORESUME"]="1"
3605 +
3606 + if "distcc" in string.split(conf_values["options"]):
3607 + print "Distcc support enabled."
3608 + conf_values["DISTCC"]="1"
3609 +
3610 + if "icecream" in string.split(conf_values["options"]):
3611 + print "Icecream compiler cluster support enabled."
3612 + conf_values["ICECREAM"]="1"
3613 +
3614 + if "kerncache" in string.split(conf_values["options"]):
3615 + print "Kernel cache support enabled."
3616 + conf_values["KERNCACHE"]="1"
3617 +
3618 + if "pkgcache" in string.split(conf_values["options"]):
3619 + print "Package cache support enabled."
3620 + conf_values["PKGCACHE"]="1"
3621 +
3622 + if "preserve_libs" in string.split(conf_values["options"]):
3623 + print "Preserving libs during unmerge."
3624 + conf_values["PRESERVE_LIBS"]="1"
3625 +
3626 + if "purge" in string.split(conf_values["options"]):
3627 + print "Purge support enabled."
3628 + conf_values["PURGE"]="1"
3629 +
3630 + if "seedcache" in string.split(conf_values["options"]):
3631 + print "Seed cache support enabled."
3632 + conf_values["SEEDCACHE"]="1"
3633 +
3634 + if "snapcache" in string.split(conf_values["options"]):
3635 + print "Snapshot cache support enabled."
3636 + conf_values["SNAPCACHE"]="1"
3637 +
3638 + if "digests" in myconf:
3639 + conf_values["digests"]=myconf["digests"]
3640 + if "contents" in myconf:
3641 + conf_values["contents"]=myconf["contents"]
3642 +
3643 + if "envscript" in myconf:
3644 + print "Envscript support enabled."
3645 + conf_values["ENVSCRIPT"]=myconf["envscript"]
3646 +
3647 + if "var_tmpfs_portage" in myconf:
3648 + conf_values["var_tmpfs_portage"]=myconf["var_tmpfs_portage"];
3649 +
3650 + if "port_logdir" in myconf:
3651 + conf_values["port_logdir"]=myconf["port_logdir"];
3652 +
3653 +def import_modules():
3654 + # import catalyst's own modules (i.e. catalyst_support and the arch modules)
3655 + targetmap={}
3656 +
3657 + try:
3658 + module_dir = __selfpath__ + "/modules/"
3659 + for x in required_build_targets:
3660 + try:
3661 + fh=open(module_dir + x + ".py")
3662 + module=imp.load_module(x, fh,"modules/" + x + ".py",
3663 + (".py", "r", imp.PY_SOURCE))
3664 + fh.close()
3665 +
3666 + except IOError:
3667 + raise CatalystError, "Can't find " + x + ".py plugin in " + \
3668 + module_dir
3669 + for x in valid_build_targets:
3670 + try:
3671 + fh=open(module_dir + x + ".py")
3672 + module=imp.load_module(x, fh, "modules/" + x + ".py",
3673 + (".py", "r", imp.PY_SOURCE))
3674 + module.register(targetmap)
3675 + fh.close()
3676 +
3677 + except IOError:
3678 + raise CatalystError,"Can't find " + x + ".py plugin in " + \
3679 + module_dir
3680 +
3681 + except ImportError:
3682 + print "!!! catalyst: Python modules not found in "+\
3683 + module_dir + "; exiting."
3684 + sys.exit(1)
3685 +
3686 + return targetmap
3687 +
3688 +def build_target(addlargs, targetmap):
3689 + try:
3690 + if addlargs["target"] not in targetmap:
3691 + raise CatalystError,"Target \""+addlargs["target"]+"\" not available."
3692 +
3693 + mytarget=targetmap[addlargs["target"]](conf_values, addlargs)
3694 +
3695 + mytarget.run()
3696 +
3697 + except:
3698 + catalyst.util.print_traceback()
3699 + print "!!! catalyst: Error encountered during run of target " + addlargs["target"]
3700 + sys.exit(1)
3701 +
3702 +def main():
3703 + targetmap={}
3704 +
3705 + version()
3706 + if os.getuid() != 0:
3707 + # catalyst cannot be run as a normal user due to chroots, mounts, etc
3708 + print "!!! catalyst: This script requires root privileges to operate"
3709 + sys.exit(2)
3710 +
3711 + # we need some options in order to work correctly
3712 + if len(sys.argv) < 2:
3713 + usage()
3714 + sys.exit(2)
3715 +
3716 + # parse out the command line arguments
3717 + try:
3718 + opts,args = getopt.getopt(sys.argv[1:], "apPThvdc:C:f:FVs:", ["purge", "purgeonly", "purgetmponly", "help", "version", "debug",\
3719 + "clear-autoresume", "config=", "cli=", "file=", "fetch", "verbose","snapshot="])
3720 +
3721 + except getopt.GetoptError:
3722 + usage()
3723 + sys.exit(2)
3724 +
3725 + # defaults for commandline opts
3726 + debug=False
3727 + verbose=False
3728 + fetch=False
3729 + myconfig=""
3730 + myspecfile=""
3731 + mycmdline=[]
3732 + myopts=[]
3733 +
3734 + # check preconditions
3735 + if len(opts) == 0:
3736 + print "!!! catalyst: please specify one of either -f or -C\n"
3737 + usage()
3738 + sys.exit(2)
3739 +
3740 + run = False
3741 + for o, a in opts:
3742 + if o in ("-h", "--help"):
3743 + usage()
3744 + sys.exit(1)
3745 +
3746 + if o in ("-V", "--version"):
3747 + print "Catalyst version "+__version__
3748 + sys.exit(1)
3749 +
3750 + if o in ("-d", "--debug"):
3751 + conf_values["DEBUG"]="1"
3752 + conf_values["VERBOSE"]="1"
3753 +
3754 + if o in ("-c", "--config"):
3755 + myconfig=a
3756 +
3757 + if o in ("-C", "--cli"):
3758 + run = True
3759 + x=sys.argv.index(o)+1
3760 + while x < len(sys.argv):
3761 + mycmdline.append(sys.argv[x])
3762 + x=x+1
3763 +
3764 + if o in ("-f", "--file"):
3765 + run = True
3766 + myspecfile=a
3767 +
3768 + if o in ("-F", "--fetchonly"):
3769 + conf_values["FETCH"]="1"
3770 +
3771 + if o in ("-v", "--verbose"):
3772 + conf_values["VERBOSE"]="1"
3773 +
3774 + if o in ("-s", "--snapshot"):
3775 + if len(sys.argv) < 3:
3776 + print "!!! catalyst: missing snapshot identifier\n"
3777 + usage()
3778 + sys.exit(2)
3779 + else:
3780 + run = True
3781 + mycmdline.append("target=snapshot")
3782 + mycmdline.append("version_stamp="+a)
3783 +
3784 + if o in ("-p", "--purge"):
3785 + conf_values["PURGE"] = "1"
3786 +
3787 + if o in ("-P", "--purgeonly"):
3788 + conf_values["PURGEONLY"] = "1"
3789 +
3790 + if o in ("-T", "--purgetmponly"):
3791 + conf_values["PURGETMPONLY"] = "1"
3792 +
3793 + if o in ("-a", "--clear-autoresume"):
3794 + conf_values["CLEAR_AUTORESUME"] = "1"
3795 +
3796 + if not run:
3797 + print "!!! catalyst: please specify one of either -f or -C\n"
3798 + usage()
3799 + sys.exit(2)
3800 +
3801 + # import configuration file and import our main module using those settings
3802 + parse_config(myconfig)
3803 +
3804 + # Start checking that digests are valid now that the hash_map was imported
3805 + # from catalyst_support
3806 + if "digests" in conf_values:
3807 + for i in conf_values["digests"].split():
3808 + if i not in hash_map:
3809 + print
3810 + print i+" is not a valid digest entry"
3811 + print "Valid digest entries:"
3812 + print hash_map.keys()
3813 + print
3814 + print "Catalyst aborting...."
3815 + sys.exit(2)
3816 + if find_binary(hash_map[i][1]) == None:
3817 + print
3818 + print "digest="+i
3819 + print "\tThe "+hash_map[i][1]+\
3820 + " binary was not found. It needs to be in your system path"
3821 + print
3822 + print "Catalyst aborting...."
3823 + sys.exit(2)
3824 + if "hash_function" in conf_values:
3825 + if conf_values["hash_function"] not in hash_map:
3826 + print
3827 + print conf_values["hash_function"]+\
3828 + " is not a valid hash_function entry"
3829 + print "Valid hash_function entries:"
3830 + print hash_map.keys()
3831 + print
3832 + print "Catalyst aborting...."
3833 + sys.exit(2)
3834 + if find_binary(hash_map[conf_values["hash_function"]][1]) == None:
3835 + print
3836 + print "hash_function="+conf_values["hash_function"]
3837 + print "\tThe "+hash_map[conf_values["hash_function"]][1]+\
3838 + " binary was not found. It needs to be in your system path"
3839 + print
3840 + print "Catalyst aborting...."
3841 + sys.exit(2)
3842 +
3843 + # import the rest of the catalyst modules
3844 + targetmap=import_modules()
3845 +
3846 + addlargs={}
3847 +
3848 + if myspecfile:
3849 + spec = catalyst.config.SpecParser(myspecfile)
3850 + addlargs.update(spec.get_values())
3851 +
3852 + if mycmdline:
3853 + try:
3854 + cmdline = catalyst.config.ConfigParser()
3855 + cmdline.parse_lines(mycmdline)
3856 + addlargs.update(cmdline.get_values())
3857 + except CatalystError:
3858 + print "!!! catalyst: Could not parse commandline, exiting."
3859 + sys.exit(1)
3860 +
3861 + if "target" not in addlargs:
3862 + raise CatalystError, "Required value \"target\" not specified."
3863 +
3864 + # everything is setup, so the build is a go
3865 + try:
3866 + build_target(addlargs, targetmap)
3867 +
3868 + except CatalystError:
3869 + print
3870 + print "Catalyst aborting...."
3871 + sys.exit(2)
3872 + except KeyboardInterrupt:
3873 + print "\nCatalyst build aborted due to user interrupt ( Ctrl-C )"
3874 + print
3875 + print "Catalyst aborting...."
3876 + sys.exit(2)
3877 + except LockInUse:
3878 + print "Catalyst aborting...."
3879 + sys.exit(2)
3880 + except:
3881 + print "Catalyst aborting...."
3882 + raise
3883 + sys.exit(2)
3884 diff --git a/catalyst/modules/__init__.py b/catalyst/modules/__init__.py
3885 new file mode 100644
3886 index 0000000..8b13789
3887 --- /dev/null
3888 +++ b/catalyst/modules/__init__.py
3889 @@ -0,0 +1 @@
3890 +
3891 diff --git a/catalyst/modules/builder.py b/catalyst/modules/builder.py
3892 new file mode 100644
3893 index 0000000..ad27d78
3894 --- /dev/null
3895 +++ b/catalyst/modules/builder.py
3896 @@ -0,0 +1,20 @@
3897 +
3898 +class generic:
3899 + def __init__(self,myspec):
3900 + self.settings=myspec
3901 +
3902 + def mount_safety_check(self):
3903 + """
3904 + Make sure that no bind mounts exist in chrootdir (to use before
3905 + cleaning the directory, to make sure we don't wipe the contents of
3906 + a bind mount
3907 + """
3908 + pass
3909 +
3910 + def mount_all(self):
3911 + """do all bind mounts"""
3912 + pass
3913 +
3914 + def umount_all(self):
3915 + """unmount all bind mounts"""
3916 + pass
3917 diff --git a/catalyst/modules/catalyst_lock.py b/catalyst/modules/catalyst_lock.py
3918 new file mode 100644
3919 index 0000000..5311cf8
3920 --- /dev/null
3921 +++ b/catalyst/modules/catalyst_lock.py
3922 @@ -0,0 +1,468 @@
3923 +#!/usr/bin/python
3924 +import os
3925 +import fcntl
3926 +import errno
3927 +import sys
3928 +import string
3929 +import time
3930 +from catalyst_support import *
3931 +
3932 +def writemsg(mystr):
3933 + sys.stderr.write(mystr)
3934 + sys.stderr.flush()
3935 +
3936 +class LockDir:
3937 + locking_method=fcntl.flock
3938 + lock_dirs_in_use=[]
3939 + die_on_failed_lock=True
3940 + def __del__(self):
3941 + self.clean_my_hardlocks()
3942 + self.delete_lock_from_path_list()
3943 + if self.islocked():
3944 + self.fcntl_unlock()
3945 +
3946 + def __init__(self,lockdir):
3947 + self.locked=False
3948 + self.myfd=None
3949 + self.set_gid(250)
3950 + self.locking_method=LockDir.locking_method
3951 + self.set_lockdir(lockdir)
3952 + self.set_lockfilename(".catalyst_lock")
3953 + self.set_lockfile()
3954 +
3955 + if LockDir.lock_dirs_in_use.count(lockdir)>0:
3956 + raise "This directory already associated with a lock object"
3957 + else:
3958 + LockDir.lock_dirs_in_use.append(lockdir)
3959 +
3960 + self.hardlock_paths={}
3961 +
3962 + def delete_lock_from_path_list(self):
3963 + i=0
3964 + try:
3965 + if LockDir.lock_dirs_in_use:
3966 + for x in LockDir.lock_dirs_in_use:
3967 + if LockDir.lock_dirs_in_use[i] == self.lockdir:
3968 + del LockDir.lock_dirs_in_use[i]
3969 + break
3970 + i=i+1
3971 + except AttributeError:
3972 + pass
3973 +
3974 + def islocked(self):
3975 + if self.locked:
3976 + return True
3977 + else:
3978 + return False
3979 +
3980 + def set_gid(self,gid):
3981 + if not self.islocked():
3982 +# if "DEBUG" in self.settings:
3983 +# print "setting gid to", gid
3984 + self.gid=gid
3985 +
3986 + def set_lockdir(self,lockdir):
3987 + if not os.path.exists(lockdir):
3988 + os.makedirs(lockdir)
3989 + if os.path.isdir(lockdir):
3990 + if not self.islocked():
3991 + if lockdir[-1] == "/":
3992 + lockdir=lockdir[:-1]
3993 + self.lockdir=normpath(lockdir)
3994 +# if "DEBUG" in self.settings:
3995 +# print "setting lockdir to", self.lockdir
3996 + else:
3997 + raise "the lock object needs a path to a dir"
3998 +
3999 + def set_lockfilename(self,lockfilename):
4000 + if not self.islocked():
4001 + self.lockfilename=lockfilename
4002 +# if "DEBUG" in self.settings:
4003 +# print "setting lockfilename to", self.lockfilename
4004 +
4005 + def set_lockfile(self):
4006 + if not self.islocked():
4007 + self.lockfile=normpath(self.lockdir+'/'+self.lockfilename)
4008 +# if "DEBUG" in self.settings:
4009 +# print "setting lockfile to", self.lockfile
4010 +
4011 + def read_lock(self):
4012 + if not self.locking_method == "HARDLOCK":
4013 + self.fcntl_lock("read")
4014 + else:
4015 + print "HARDLOCKING doesnt support shared-read locks"
4016 + print "using exclusive write locks"
4017 + self.hard_lock()
4018 +
4019 + def write_lock(self):
4020 + if not self.locking_method == "HARDLOCK":
4021 + self.fcntl_lock("write")
4022 + else:
4023 + self.hard_lock()
4024 +
4025 + def unlock(self):
4026 + if not self.locking_method == "HARDLOCK":
4027 + self.fcntl_unlock()
4028 + else:
4029 + self.hard_unlock()
4030 +
4031 + def fcntl_lock(self,locktype):
4032 + if self.myfd==None:
4033 + if not os.path.exists(os.path.dirname(self.lockdir)):
4034 + raise DirectoryNotFound, os.path.dirname(self.lockdir)
4035 + if not os.path.exists(self.lockfile):
4036 + old_mask=os.umask(000)
4037 + self.myfd = os.open(self.lockfile, os.O_CREAT|os.O_RDWR,0660)
4038 + try:
4039 + if os.stat(self.lockfile).st_gid != self.gid:
4040 + os.chown(self.lockfile,os.getuid(),self.gid)
4041 + except SystemExit, e:
4042 + raise
4043 + except OSError, e:
4044 + if e[0] == 2: #XXX: No such file or directory
4045 + return self.fcntl_locking(locktype)
4046 + else:
4047 + writemsg("Cannot chown a lockfile. This could cause inconvenience later.\n")
4048 +
4049 + os.umask(old_mask)
4050 + else:
4051 + self.myfd = os.open(self.lockfile, os.O_CREAT|os.O_RDWR,0660)
4052 +
4053 + try:
4054 + if locktype == "read":
4055 + self.locking_method(self.myfd,fcntl.LOCK_SH|fcntl.LOCK_NB)
4056 + else:
4057 + self.locking_method(self.myfd,fcntl.LOCK_EX|fcntl.LOCK_NB)
4058 + except IOError, e:
4059 + if "errno" not in dir(e):
4060 + raise
4061 + if e.errno == errno.EAGAIN:
4062 + if not LockDir.die_on_failed_lock:
4063 + # Resource temp unavailable; eg, someone beat us to the lock.
4064 + writemsg("waiting for lock on %s\n" % self.lockfile)
4065 +
4066 + # Try for the exclusive or shared lock again.
4067 + if locktype == "read":
4068 + self.locking_method(self.myfd,fcntl.LOCK_SH)
4069 + else:
4070 + self.locking_method(self.myfd,fcntl.LOCK_EX)
4071 + else:
4072 + raise LockInUse,self.lockfile
4073 + elif e.errno == errno.ENOLCK:
4074 + pass
4075 + else:
4076 + raise
4077 + if not os.path.exists(self.lockfile):
4078 + os.close(self.myfd)
4079 + self.myfd=None
4080 + #writemsg("lockfile recurse\n")
4081 + self.fcntl_lock(locktype)
4082 + else:
4083 + self.locked=True
4084 + #writemsg("Lockfile obtained\n")
4085 +
4086 + def fcntl_unlock(self):
4087 + import fcntl
4088 + unlinkfile = 1
4089 + if not os.path.exists(self.lockfile):
4090 + print "lockfile does not exist '%s'" % self.lockfile
4091 + if (self.myfd != None):
4092 + try:
4093 + os.close(myfd)
4094 + self.myfd=None
4095 + except:
4096 + pass
4097 + return False
4098 +
4099 + try:
4100 + if self.myfd == None:
4101 + self.myfd = os.open(self.lockfile, os.O_WRONLY,0660)
4102 + unlinkfile = 1
4103 + self.locking_method(self.myfd,fcntl.LOCK_UN)
4104 + except SystemExit, e:
4105 + raise
4106 + except Exception, e:
4107 + os.close(self.myfd)
4108 + self.myfd=None
4109 + raise IOError, "Failed to unlock file '%s'\n" % self.lockfile
4110 + try:
4111 + # This sleep call was added to allow other processes that are
4112 + # waiting for a lock to be able to grab it before it is deleted.
4113 + # lockfile() already accounts for this situation, however, and
4114 + # the sleep here adds more time than is saved overall, so am
4115 + # commenting until it is proved necessary.
4116 + #time.sleep(0.0001)
4117 + if unlinkfile:
4118 + InUse=False
4119 + try:
4120 + self.locking_method(self.myfd,fcntl.LOCK_EX|fcntl.LOCK_NB)
4121 + except:
4122 + print "Read lock may be in effect. skipping lockfile delete..."
4123 + InUse=True
4124 + # We won the lock, so there isn't competition for it.
4125 + # We can safely delete the file.
4126 + #writemsg("Got the lockfile...\n")
4127 + #writemsg("Unlinking...\n")
4128 + self.locking_method(self.myfd,fcntl.LOCK_UN)
4129 + if not InUse:
4130 + os.unlink(self.lockfile)
4131 + os.close(self.myfd)
4132 + self.myfd=None
4133 +# if "DEBUG" in self.settings:
4134 +# print "Unlinked lockfile..."
4135 + except SystemExit, e:
4136 + raise
4137 + except Exception, e:
4138 + # We really don't care... Someone else has the lock.
4139 + # So it is their problem now.
4140 + print "Failed to get lock... someone took it."
4141 + print str(e)
4142 +
4143 + # Why test lockfilename? Because we may have been handed an
4144 + # fd originally, and the caller might not like having their
4145 + # open fd closed automatically on them.
4146 + #if type(lockfilename) == types.StringType:
4147 + # os.close(myfd)
4148 +
4149 + if (self.myfd != None):
4150 + os.close(self.myfd)
4151 + self.myfd=None
4152 + self.locked=False
4153 + time.sleep(.0001)
4154 +
4155 + def hard_lock(self,max_wait=14400):
4156 + """Does the NFS, hardlink shuffle to ensure locking on the disk.
4157 + We create a PRIVATE lockfile, that is just a placeholder on the disk.
4158 + Then we HARDLINK the real lockfile to that private file.
4159 + If our file can 2 references, then we have the lock. :)
4160 + Otherwise we lather, rise, and repeat.
4161 + We default to a 4 hour timeout.
4162 + """
4163 +
4164 + self.myhardlock = self.hardlock_name(self.lockdir)
4165 +
4166 + start_time = time.time()
4167 + reported_waiting = False
4168 +
4169 + while(time.time() < (start_time + max_wait)):
4170 + # We only need it to exist.
4171 + self.myfd = os.open(self.myhardlock, os.O_CREAT|os.O_RDWR,0660)
4172 + os.close(self.myfd)
4173 +
4174 + self.add_hardlock_file_to_cleanup()
4175 + if not os.path.exists(self.myhardlock):
4176 + raise FileNotFound, "Created lockfile is missing: %(filename)s" % {"filename":self.myhardlock}
4177 + try:
4178 + res = os.link(self.myhardlock, self.lockfile)
4179 + except SystemExit, e:
4180 + raise
4181 + except Exception, e:
4182 +# if "DEBUG" in self.settings:
4183 +# print "lockfile(): Hardlink: Link failed."
4184 +# print "Exception: ",e
4185 + pass
4186 +
4187 + if self.hardlink_is_mine(self.myhardlock, self.lockfile):
4188 + # We have the lock.
4189 + if reported_waiting:
4190 + print
4191 + return True
4192 +
4193 + if reported_waiting:
4194 + writemsg(".")
4195 + else:
4196 + reported_waiting = True
4197 + print
4198 + print "Waiting on (hardlink) lockfile: (one '.' per 3 seconds)"
4199 + print "Lockfile: " + self.lockfile
4200 + time.sleep(3)
4201 +
4202 + os.unlink(self.myhardlock)
4203 + return False
4204 +
4205 + def hard_unlock(self):
4206 + try:
4207 + if os.path.exists(self.myhardlock):
4208 + os.unlink(self.myhardlock)
4209 + if os.path.exists(self.lockfile):
4210 + os.unlink(self.lockfile)
4211 + except SystemExit, e:
4212 + raise
4213 + except:
4214 + writemsg("Something strange happened to our hardlink locks.\n")
4215 +
4216 + def add_hardlock_file_to_cleanup(self):
4217 + #mypath = self.normpath(path)
4218 + if os.path.isdir(self.lockdir) and os.path.isfile(self.myhardlock):
4219 + self.hardlock_paths[self.lockdir]=self.myhardlock
4220 +
4221 + def remove_hardlock_file_from_cleanup(self):
4222 + if self.lockdir in self.hardlock_paths:
4223 + del self.hardlock_paths[self.lockdir]
4224 + print self.hardlock_paths
4225 +
4226 + def hardlock_name(self, path):
4227 + mypath=path+"/.hardlock-"+os.uname()[1]+"-"+str(os.getpid())
4228 + newpath = os.path.normpath(mypath)
4229 + if len(newpath) > 1:
4230 + if newpath[1] == "/":
4231 + newpath = "/"+newpath.lstrip("/")
4232 + return newpath
4233 +
4234 + def hardlink_is_mine(self,link,lock):
4235 + import stat
4236 + try:
4237 + myhls = os.stat(link)
4238 + mylfs = os.stat(lock)
4239 + except SystemExit, e:
4240 + raise
4241 + except:
4242 + myhls = None
4243 + mylfs = None
4244 +
4245 + if myhls:
4246 + if myhls[stat.ST_NLINK] == 2:
4247 + return True
4248 + if mylfs:
4249 + if mylfs[stat.ST_INO] == myhls[stat.ST_INO]:
4250 + return True
4251 + return False
4252 +
4253 + def hardlink_active(lock):
4254 + if not os.path.exists(lock):
4255 + return False
4256 +
4257 + def clean_my_hardlocks(self):
4258 + try:
4259 + for x in self.hardlock_paths.keys():
4260 + self.hardlock_cleanup(x)
4261 + except AttributeError:
4262 + pass
4263 +
4264 + def hardlock_cleanup(self,path):
4265 + mypid = str(os.getpid())
4266 + myhost = os.uname()[1]
4267 + mydl = os.listdir(path)
4268 + results = []
4269 + mycount = 0
4270 +
4271 + mylist = {}
4272 + for x in mydl:
4273 + filepath=path+"/"+x
4274 + if os.path.isfile(filepath):
4275 + parts = filepath.split(".hardlock-")
4276 + if len(parts) == 2:
4277 + filename = parts[0]
4278 + hostpid = parts[1].split("-")
4279 + host = "-".join(hostpid[:-1])
4280 + pid = hostpid[-1]
4281 + if filename not in mylist:
4282 + mylist[filename] = {}
4283 +
4284 + if host not in mylist[filename]:
4285 + mylist[filename][host] = []
4286 + mylist[filename][host].append(pid)
4287 + mycount += 1
4288 + else:
4289 + mylist[filename][host].append(pid)
4290 + mycount += 1
4291 +
4292 +
4293 + results.append("Found %(count)s locks" % {"count":mycount})
4294 + for x in mylist.keys():
4295 + if myhost in mylist[x]:
4296 + mylockname = self.hardlock_name(x)
4297 + if self.hardlink_is_mine(mylockname, self.lockfile) or \
4298 + not os.path.exists(self.lockfile):
4299 + for y in mylist[x].keys():
4300 + for z in mylist[x][y]:
4301 + filename = x+".hardlock-"+y+"-"+z
4302 + if filename == mylockname:
4303 + self.hard_unlock()
4304 + continue
4305 + try:
4306 + # We're sweeping through, unlinking everyone's locks.
4307 + os.unlink(filename)
4308 + results.append("Unlinked: " + filename)
4309 + except SystemExit, e:
4310 + raise
4311 + except Exception,e:
4312 + pass
4313 + try:
4314 + os.unlink(x)
4315 + results.append("Unlinked: " + x)
4316 + os.unlink(mylockname)
4317 + results.append("Unlinked: " + mylockname)
4318 + except SystemExit, e:
4319 + raise
4320 + except Exception,e:
4321 + pass
4322 + else:
4323 + try:
4324 + os.unlink(mylockname)
4325 + results.append("Unlinked: " + mylockname)
4326 + except SystemExit, e:
4327 + raise
4328 + except Exception,e:
4329 + pass
4330 + return results
4331 +
4332 +if __name__ == "__main__":
4333 +
4334 + def lock_work():
4335 + print
4336 + for i in range(1,6):
4337 + print i,time.time()
4338 + time.sleep(1)
4339 + print
4340 + def normpath(mypath):
4341 + newpath = os.path.normpath(mypath)
4342 + if len(newpath) > 1:
4343 + if newpath[1] == "/":
4344 + newpath = "/"+newpath.lstrip("/")
4345 + return newpath
4346 +
4347 + print "Lock 5 starting"
4348 + import time
4349 + Lock1=LockDir("/tmp/lock_path")
4350 + Lock1.write_lock()
4351 + print "Lock1 write lock"
4352 +
4353 + lock_work()
4354 +
4355 + Lock1.unlock()
4356 + print "Lock1 unlock"
4357 +
4358 + Lock1.read_lock()
4359 + print "Lock1 read lock"
4360 +
4361 + lock_work()
4362 +
4363 + Lock1.unlock()
4364 + print "Lock1 unlock"
4365 +
4366 + Lock1.read_lock()
4367 + print "Lock1 read lock"
4368 +
4369 + Lock1.write_lock()
4370 + print "Lock1 write lock"
4371 +
4372 + lock_work()
4373 +
4374 + Lock1.unlock()
4375 + print "Lock1 unlock"
4376 +
4377 + Lock1.read_lock()
4378 + print "Lock1 read lock"
4379 +
4380 + lock_work()
4381 +
4382 + Lock1.unlock()
4383 + print "Lock1 unlock"
4384 +
4385 +#Lock1.write_lock()
4386 +#time.sleep(2)
4387 +#Lock1.unlock()
4388 + ##Lock1.write_lock()
4389 + #time.sleep(2)
4390 + #Lock1.unlock()
4391 diff --git a/catalyst/modules/catalyst_support.py b/catalyst/modules/catalyst_support.py
4392 new file mode 100644
4393 index 0000000..316dfa3
4394 --- /dev/null
4395 +++ b/catalyst/modules/catalyst_support.py
4396 @@ -0,0 +1,718 @@
4397 +
4398 +import sys,string,os,types,re,signal,traceback,time
4399 +#import md5,sha
4400 +selinux_capable = False
4401 +#userpriv_capable = (os.getuid() == 0)
4402 +#fakeroot_capable = False
4403 +BASH_BINARY = "/bin/bash"
4404 +
4405 +try:
4406 + import resource
4407 + max_fd_limit=resource.getrlimit(RLIMIT_NOFILE)
4408 +except SystemExit, e:
4409 + raise
4410 +except:
4411 + # hokay, no resource module.
4412 + max_fd_limit=256
4413 +
4414 +# pids this process knows of.
4415 +spawned_pids = []
4416 +
4417 +try:
4418 + import urllib
4419 +except SystemExit, e:
4420 + raise
4421 +
4422 +def cleanup(pids,block_exceptions=True):
4423 + """function to go through and reap the list of pids passed to it"""
4424 + global spawned_pids
4425 + if type(pids) == int:
4426 + pids = [pids]
4427 + for x in pids:
4428 + try:
4429 + os.kill(x,signal.SIGTERM)
4430 + if os.waitpid(x,os.WNOHANG)[1] == 0:
4431 + # feisty bugger, still alive.
4432 + os.kill(x,signal.SIGKILL)
4433 + os.waitpid(x,0)
4434 +
4435 + except OSError, oe:
4436 + if block_exceptions:
4437 + pass
4438 + if oe.errno not in (10,3):
4439 + raise oe
4440 + except SystemExit:
4441 + raise
4442 + except Exception:
4443 + if block_exceptions:
4444 + pass
4445 + try: spawned_pids.remove(x)
4446 + except IndexError: pass
4447 +
4448 +
4449 +
4450 +# a function to turn a string of non-printable characters into a string of
4451 +# hex characters
4452 +def hexify(str):
4453 + hexStr = string.hexdigits
4454 + r = ''
4455 + for ch in str:
4456 + i = ord(ch)
4457 + r = r + hexStr[(i >> 4) & 0xF] + hexStr[i & 0xF]
4458 + return r
4459 +# hexify()
4460 +
4461 +def generate_contents(file,contents_function="auto",verbose=False):
4462 + try:
4463 + _ = contents_function
4464 + if _ == 'auto' and file.endswith('.iso'):
4465 + _ = 'isoinfo-l'
4466 + if (_ in ['tar-tv','auto']):
4467 + if file.endswith('.tgz') or file.endswith('.tar.gz'):
4468 + _ = 'tar-tvz'
4469 + elif file.endswith('.tbz2') or file.endswith('.tar.bz2'):
4470 + _ = 'tar-tvj'
4471 + elif file.endswith('.tar'):
4472 + _ = 'tar-tv'
4473 +
4474 + if _ == 'auto':
4475 + warn('File %r has unknown type for automatic detection.' % (file, ))
4476 + return None
4477 + else:
4478 + contents_function = _
4479 + _ = contents_map[contents_function]
4480 + return _[0](file,_[1],verbose)
4481 + except:
4482 + raise CatalystError,\
4483 + "Error generating contents, is appropriate utility (%s) installed on your system?" \
4484 + % (contents_function, )
4485 +
4486 +def calc_contents(file,cmd,verbose):
4487 + args={ 'file': file }
4488 + cmd=cmd % dict(args)
4489 + a=os.popen(cmd)
4490 + mylines=a.readlines()
4491 + a.close()
4492 + result="".join(mylines)
4493 + if verbose:
4494 + print result
4495 + return result
4496 +
4497 +# This has map must be defined after the function calc_content
4498 +# It is possible to call different functions from this but they must be defined
4499 +# before hash_map
4500 +# Key,function,cmd
4501 +contents_map={
4502 + # 'find' is disabled because it requires the source path, which is not
4503 + # always available
4504 + #"find" :[calc_contents,"find %(path)s"],
4505 + "tar-tv":[calc_contents,"tar tvf %(file)s"],
4506 + "tar-tvz":[calc_contents,"tar tvzf %(file)s"],
4507 + "tar-tvj":[calc_contents,"tar -I lbzip2 -tvf %(file)s"],
4508 + "isoinfo-l":[calc_contents,"isoinfo -l -i %(file)s"],
4509 + # isoinfo-f should be a last resort only
4510 + "isoinfo-f":[calc_contents,"isoinfo -f -i %(file)s"],
4511 +}
4512 +
4513 +def generate_hash(file,hash_function="crc32",verbose=False):
4514 + try:
4515 + return hash_map[hash_function][0](file,hash_map[hash_function][1],hash_map[hash_function][2],\
4516 + hash_map[hash_function][3],verbose)
4517 + except:
4518 + raise CatalystError,"Error generating hash, is appropriate utility installed on your system?"
4519 +
4520 +def calc_hash(file,cmd,cmd_args,id_string="MD5",verbose=False):
4521 + a=os.popen(cmd+" "+cmd_args+" "+file)
4522 + mylines=a.readlines()
4523 + a.close()
4524 + mylines=mylines[0].split()
4525 + result=mylines[0]
4526 + if verbose:
4527 + print id_string+" (%s) = %s" % (file, result)
4528 + return result
4529 +
4530 +def calc_hash2(file,cmd,cmd_args,id_string="MD5",verbose=False):
4531 + a=os.popen(cmd+" "+cmd_args+" "+file)
4532 + header=a.readline()
4533 + mylines=a.readline().split()
4534 + hash=mylines[0]
4535 + short_file=os.path.split(mylines[1])[1]
4536 + a.close()
4537 + result=header+hash+" "+short_file+"\n"
4538 + if verbose:
4539 + print header+" (%s) = %s" % (short_file, result)
4540 + return result
4541 +
4542 +# This has map must be defined after the function calc_hash
4543 +# It is possible to call different functions from this but they must be defined
4544 +# before hash_map
4545 +# Key,function,cmd,cmd_args,Print string
4546 +hash_map={
4547 + "adler32":[calc_hash2,"shash","-a ADLER32","ADLER32"],\
4548 + "crc32":[calc_hash2,"shash","-a CRC32","CRC32"],\
4549 + "crc32b":[calc_hash2,"shash","-a CRC32B","CRC32B"],\
4550 + "gost":[calc_hash2,"shash","-a GOST","GOST"],\
4551 + "haval128":[calc_hash2,"shash","-a HAVAL128","HAVAL128"],\
4552 + "haval160":[calc_hash2,"shash","-a HAVAL160","HAVAL160"],\
4553 + "haval192":[calc_hash2,"shash","-a HAVAL192","HAVAL192"],\
4554 + "haval224":[calc_hash2,"shash","-a HAVAL224","HAVAL224"],\
4555 + "haval256":[calc_hash2,"shash","-a HAVAL256","HAVAL256"],\
4556 + "md2":[calc_hash2,"shash","-a MD2","MD2"],\
4557 + "md4":[calc_hash2,"shash","-a MD4","MD4"],\
4558 + "md5":[calc_hash2,"shash","-a MD5","MD5"],\
4559 + "ripemd128":[calc_hash2,"shash","-a RIPEMD128","RIPEMD128"],\
4560 + "ripemd160":[calc_hash2,"shash","-a RIPEMD160","RIPEMD160"],\
4561 + "ripemd256":[calc_hash2,"shash","-a RIPEMD256","RIPEMD256"],\
4562 + "ripemd320":[calc_hash2,"shash","-a RIPEMD320","RIPEMD320"],\
4563 + "sha1":[calc_hash2,"shash","-a SHA1","SHA1"],\
4564 + "sha224":[calc_hash2,"shash","-a SHA224","SHA224"],\
4565 + "sha256":[calc_hash2,"shash","-a SHA256","SHA256"],\
4566 + "sha384":[calc_hash2,"shash","-a SHA384","SHA384"],\
4567 + "sha512":[calc_hash2,"shash","-a SHA512","SHA512"],\
4568 + "snefru128":[calc_hash2,"shash","-a SNEFRU128","SNEFRU128"],\
4569 + "snefru256":[calc_hash2,"shash","-a SNEFRU256","SNEFRU256"],\
4570 + "tiger":[calc_hash2,"shash","-a TIGER","TIGER"],\
4571 + "tiger128":[calc_hash2,"shash","-a TIGER128","TIGER128"],\
4572 + "tiger160":[calc_hash2,"shash","-a TIGER160","TIGER160"],\
4573 + "whirlpool":[calc_hash2,"shash","-a WHIRLPOOL","WHIRLPOOL"],\
4574 + }
4575 +
4576 +def read_from_clst(file):
4577 + line = ''
4578 + myline = ''
4579 + try:
4580 + myf=open(file,"r")
4581 + except:
4582 + return -1
4583 + #raise CatalystError, "Could not open file "+file
4584 + for line in myf.readlines():
4585 + #line = string.replace(line, "\n", "") # drop newline
4586 + myline = myline + line
4587 + myf.close()
4588 + return myline
4589 +# read_from_clst
4590 +
4591 +# these should never be touched
4592 +required_build_targets=["generic_target","generic_stage_target"]
4593 +
4594 +# new build types should be added here
4595 +valid_build_targets=["stage1_target","stage2_target","stage3_target","stage4_target","grp_target",
4596 + "livecd_stage1_target","livecd_stage2_target","embedded_target",
4597 + "tinderbox_target","snapshot_target","netboot_target","netboot2_target"]
4598 +
4599 +required_config_file_values=["storedir","sharedir","distdir","portdir"]
4600 +valid_config_file_values=required_config_file_values[:]
4601 +valid_config_file_values.append("PKGCACHE")
4602 +valid_config_file_values.append("KERNCACHE")
4603 +valid_config_file_values.append("CCACHE")
4604 +valid_config_file_values.append("DISTCC")
4605 +valid_config_file_values.append("ICECREAM")
4606 +valid_config_file_values.append("ENVSCRIPT")
4607 +valid_config_file_values.append("AUTORESUME")
4608 +valid_config_file_values.append("FETCH")
4609 +valid_config_file_values.append("CLEAR_AUTORESUME")
4610 +valid_config_file_values.append("options")
4611 +valid_config_file_values.append("DEBUG")
4612 +valid_config_file_values.append("VERBOSE")
4613 +valid_config_file_values.append("PURGE")
4614 +valid_config_file_values.append("PURGEONLY")
4615 +valid_config_file_values.append("SNAPCACHE")
4616 +valid_config_file_values.append("snapshot_cache")
4617 +valid_config_file_values.append("hash_function")
4618 +valid_config_file_values.append("digests")
4619 +valid_config_file_values.append("contents")
4620 +valid_config_file_values.append("SEEDCACHE")
4621 +
4622 +verbosity=1
4623 +
4624 +def list_bashify(mylist):
4625 + if type(mylist)==types.StringType:
4626 + mypack=[mylist]
4627 + else:
4628 + mypack=mylist[:]
4629 + for x in range(0,len(mypack)):
4630 + # surround args with quotes for passing to bash,
4631 + # allows things like "<" to remain intact
4632 + mypack[x]="'"+mypack[x]+"'"
4633 + mypack=string.join(mypack)
4634 + return mypack
4635 +
4636 +def list_to_string(mylist):
4637 + if type(mylist)==types.StringType:
4638 + mypack=[mylist]
4639 + else:
4640 + mypack=mylist[:]
4641 + for x in range(0,len(mypack)):
4642 + # surround args with quotes for passing to bash,
4643 + # allows things like "<" to remain intact
4644 + mypack[x]=mypack[x]
4645 + mypack=string.join(mypack)
4646 + return mypack
4647 +
4648 +class CatalystError(Exception):
4649 + def __init__(self, message):
4650 + if message:
4651 + (type,value)=sys.exc_info()[:2]
4652 + if value!=None:
4653 + print
4654 + print traceback.print_exc(file=sys.stdout)
4655 + print
4656 + print "!!! catalyst: "+message
4657 + print
4658 +
4659 +class LockInUse(Exception):
4660 + def __init__(self, message):
4661 + if message:
4662 + #(type,value)=sys.exc_info()[:2]
4663 + #if value!=None:
4664 + #print
4665 + #kprint traceback.print_exc(file=sys.stdout)
4666 + print
4667 + print "!!! catalyst lock file in use: "+message
4668 + print
4669 +
4670 +def die(msg=None):
4671 + warn(msg)
4672 + sys.exit(1)
4673 +
4674 +def warn(msg):
4675 + print "!!! catalyst: "+msg
4676 +
4677 +def find_binary(myc):
4678 + """look through the environmental path for an executable file named whatever myc is"""
4679 + # this sucks. badly.
4680 + p=os.getenv("PATH")
4681 + if p == None:
4682 + return None
4683 + for x in p.split(":"):
4684 + #if it exists, and is executable
4685 + if os.path.exists("%s/%s" % (x,myc)) and os.stat("%s/%s" % (x,myc))[0] & 0x0248:
4686 + return "%s/%s" % (x,myc)
4687 + return None
4688 +
4689 +def spawn_bash(mycommand,env={},debug=False,opt_name=None,**keywords):
4690 + """spawn mycommand as an arguement to bash"""
4691 + args=[BASH_BINARY]
4692 + if not opt_name:
4693 + opt_name=mycommand.split()[0]
4694 + if "BASH_ENV" not in env:
4695 + env["BASH_ENV"] = "/etc/spork/is/not/valid/profile.env"
4696 + if debug:
4697 + args.append("-x")
4698 + args.append("-c")
4699 + args.append(mycommand)
4700 + return spawn(args,env=env,opt_name=opt_name,**keywords)
4701 +
4702 +#def spawn_get_output(mycommand,spawn_type=spawn,raw_exit_code=False,emulate_gso=True, \
4703 +# collect_fds=[1],fd_pipes=None,**keywords):
4704 +
4705 +def spawn_get_output(mycommand,raw_exit_code=False,emulate_gso=True, \
4706 + collect_fds=[1],fd_pipes=None,**keywords):
4707 + """call spawn, collecting the output to fd's specified in collect_fds list
4708 + emulate_gso is a compatability hack to emulate commands.getstatusoutput's return, minus the
4709 + requirement it always be a bash call (spawn_type controls the actual spawn call), and minus the
4710 + 'lets let log only stdin and let stderr slide by'.
4711 +
4712 + emulate_gso was deprecated from the day it was added, so convert your code over.
4713 + spawn_type is the passed in function to call- typically spawn_bash, spawn, spawn_sandbox, or spawn_fakeroot"""
4714 + global selinux_capable
4715 + pr,pw=os.pipe()
4716 +
4717 + #if type(spawn_type) not in [types.FunctionType, types.MethodType]:
4718 + # s="spawn_type must be passed a function, not",type(spawn_type),spawn_type
4719 + # raise Exception,s
4720 +
4721 + if fd_pipes==None:
4722 + fd_pipes={}
4723 + fd_pipes[0] = 0
4724 +
4725 + for x in collect_fds:
4726 + fd_pipes[x] = pw
4727 + keywords["returnpid"]=True
4728 +
4729 + mypid=spawn_bash(mycommand,fd_pipes=fd_pipes,**keywords)
4730 + os.close(pw)
4731 + if type(mypid) != types.ListType:
4732 + os.close(pr)
4733 + return [mypid, "%s: No such file or directory" % mycommand.split()[0]]
4734 +
4735 + fd=os.fdopen(pr,"r")
4736 + mydata=fd.readlines()
4737 + fd.close()
4738 + if emulate_gso:
4739 + mydata=string.join(mydata)
4740 + if len(mydata) and mydata[-1] == "\n":
4741 + mydata=mydata[:-1]
4742 + retval=os.waitpid(mypid[0],0)[1]
4743 + cleanup(mypid)
4744 + if raw_exit_code:
4745 + return [retval,mydata]
4746 + retval=process_exit_code(retval)
4747 + return [retval, mydata]
4748 +
4749 +# base spawn function
4750 +def spawn(mycommand,env={},raw_exit_code=False,opt_name=None,fd_pipes=None,returnpid=False,\
4751 + uid=None,gid=None,groups=None,umask=None,logfile=None,path_lookup=True,\
4752 + selinux_context=None, raise_signals=False, func_call=False):
4753 + """base fork/execve function.
4754 + mycommand is the desired command- if you need a command to execute in a bash/sandbox/fakeroot
4755 + environment, use the appropriate spawn call. This is a straight fork/exec code path.
4756 + Can either have a tuple, or a string passed in. If uid/gid/groups/umask specified, it changes
4757 + the forked process to said value. If path_lookup is on, a non-absolute command will be converted
4758 + to an absolute command, otherwise it returns None.
4759 +
4760 + selinux_context is the desired context, dependant on selinux being available.
4761 + opt_name controls the name the processor goes by.
4762 + fd_pipes controls which file descriptor numbers are left open in the forked process- it's a dict of
4763 + current fd's raw fd #, desired #.
4764 +
4765 + func_call is a boolean for specifying to execute a python function- use spawn_func instead.
4766 + raise_signals is questionable. Basically throw an exception if signal'd. No exception is thrown
4767 + if raw_input is on.
4768 +
4769 + logfile overloads the specified fd's to write to a tee process which logs to logfile
4770 + returnpid returns the relevant pids (a list, including the logging process if logfile is on).
4771 +
4772 + non-returnpid calls to spawn will block till the process has exited, returning the exitcode/signal
4773 + raw_exit_code controls whether the actual waitpid result is returned, or intrepretted."""
4774 +
4775 + myc=''
4776 + if not func_call:
4777 + if type(mycommand)==types.StringType:
4778 + mycommand=mycommand.split()
4779 + myc = mycommand[0]
4780 + if not os.access(myc, os.X_OK):
4781 + if not path_lookup:
4782 + return None
4783 + myc = find_binary(myc)
4784 + if myc == None:
4785 + return None
4786 + mypid=[]
4787 + if logfile:
4788 + pr,pw=os.pipe()
4789 + mypid.extend(spawn(('tee','-i','-a',logfile),returnpid=True,fd_pipes={0:pr,1:1,2:2}))
4790 + retval=os.waitpid(mypid[-1],os.WNOHANG)[1]
4791 + if retval != 0:
4792 + # he's dead jim.
4793 + if raw_exit_code:
4794 + return retval
4795 + return process_exit_code(retval)
4796 +
4797 + if fd_pipes == None:
4798 + fd_pipes={}
4799 + fd_pipes[0] = 0
4800 + fd_pipes[1]=pw
4801 + fd_pipes[2]=pw
4802 +
4803 + if not opt_name:
4804 + opt_name = mycommand[0]
4805 + myargs=[opt_name]
4806 + myargs.extend(mycommand[1:])
4807 + global spawned_pids
4808 + mypid.append(os.fork())
4809 + if mypid[-1] != 0:
4810 + #log the bugger.
4811 + spawned_pids.extend(mypid)
4812 +
4813 + if mypid[-1] == 0:
4814 + if func_call:
4815 + spawned_pids = []
4816 +
4817 + # this may look ugly, but basically it moves file descriptors around to ensure no
4818 + # handles that are needed are accidentally closed during the final dup2 calls.
4819 + trg_fd=[]
4820 + if type(fd_pipes)==types.DictType:
4821 + src_fd=[]
4822 + k=fd_pipes.keys()
4823 + k.sort()
4824 +
4825 + #build list of which fds will be where, and where they are at currently
4826 + for x in k:
4827 + trg_fd.append(x)
4828 + src_fd.append(fd_pipes[x])
4829 +
4830 + # run through said list dup'ing descriptors so that they won't be waxed
4831 + # by other dup calls.
4832 + for x in range(0,len(trg_fd)):
4833 + if trg_fd[x] == src_fd[x]:
4834 + continue
4835 + if trg_fd[x] in src_fd[x+1:]:
4836 + new=os.dup2(trg_fd[x],max(src_fd) + 1)
4837 + os.close(trg_fd[x])
4838 + try:
4839 + while True:
4840 + src_fd[s.index(trg_fd[x])]=new
4841 + except SystemExit, e:
4842 + raise
4843 + except:
4844 + pass
4845 +
4846 + # transfer the fds to their final pre-exec position.
4847 + for x in range(0,len(trg_fd)):
4848 + if trg_fd[x] != src_fd[x]:
4849 + os.dup2(src_fd[x], trg_fd[x])
4850 + else:
4851 + trg_fd=[0,1,2]
4852 +
4853 + # wax all open descriptors that weren't requested be left open.
4854 + for x in range(0,max_fd_limit):
4855 + if x not in trg_fd:
4856 + try:
4857 + os.close(x)
4858 + except SystemExit, e:
4859 + raise
4860 + except:
4861 + pass
4862 +
4863 + # note this order must be preserved- can't change gid/groups if you change uid first.
4864 + if selinux_capable and selinux_context:
4865 + import selinux
4866 + selinux.setexec(selinux_context)
4867 + if gid:
4868 + os.setgid(gid)
4869 + if groups:
4870 + os.setgroups(groups)
4871 + if uid:
4872 + os.setuid(uid)
4873 + if umask:
4874 + os.umask(umask)
4875 + else:
4876 + os.umask(022)
4877 +
4878 + try:
4879 + #print "execing", myc, myargs
4880 + if func_call:
4881 + # either use a passed in func for interpretting the results, or return if no exception.
4882 + # note the passed in list, and dict are expanded.
4883 + if len(mycommand) == 4:
4884 + os._exit(mycommand[3](mycommand[0](*mycommand[1],**mycommand[2])))
4885 + try:
4886 + mycommand[0](*mycommand[1],**mycommand[2])
4887 + except Exception,e:
4888 + print "caught exception",e," in forked func",mycommand[0]
4889 + sys.exit(0)
4890 +
4891 + #os.execvp(myc,myargs)
4892 + os.execve(myc,myargs,env)
4893 + except SystemExit, e:
4894 + raise
4895 + except Exception, e:
4896 + if not func_call:
4897 + raise str(e)+":\n "+myc+" "+string.join(myargs)
4898 + print "func call failed"
4899 +
4900 + # If the execve fails, we need to report it, and exit
4901 + # *carefully* --- report error here
4902 + os._exit(1)
4903 + sys.exit(1)
4904 + return # should never get reached
4905 +
4906 + # if we were logging, kill the pipes.
4907 + if logfile:
4908 + os.close(pr)
4909 + os.close(pw)
4910 +
4911 + if returnpid:
4912 + return mypid
4913 +
4914 + # loop through pids (typically one, unless logging), either waiting on their death, or waxing them
4915 + # if the main pid (mycommand) returned badly.
4916 + while len(mypid):
4917 + retval=os.waitpid(mypid[-1],0)[1]
4918 + if retval != 0:
4919 + cleanup(mypid[0:-1],block_exceptions=False)
4920 + # at this point we've killed all other kid pids generated via this call.
4921 + # return now.
4922 + if raw_exit_code:
4923 + return retval
4924 + return process_exit_code(retval,throw_signals=raise_signals)
4925 + else:
4926 + mypid.pop(-1)
4927 + cleanup(mypid)
4928 + return 0
4929 +
4930 +def cmd(mycmd,myexc="",env={}):
4931 + try:
4932 + sys.stdout.flush()
4933 + retval=spawn_bash(mycmd,env)
4934 + if retval != 0:
4935 + raise CatalystError,myexc
4936 + except:
4937 + raise
4938 +
4939 +def process_exit_code(retval,throw_signals=False):
4940 + """process a waitpid returned exit code, returning exit code if it exit'd, or the
4941 + signal if it died from signalling
4942 + if throw_signals is on, it raises a SystemExit if the process was signaled.
4943 + This is intended for usage with threads, although at the moment you can't signal individual
4944 + threads in python, only the master thread, so it's a questionable option."""
4945 + if (retval & 0xff)==0:
4946 + return retval >> 8 # return exit code
4947 + else:
4948 + if throw_signals:
4949 + #use systemexit, since portage is stupid about exception catching.
4950 + raise SystemExit()
4951 + return (retval & 0xff) << 8 # interrupted by signal
4952 +
4953 +def file_locate(settings,filelist,expand=1):
4954 + #if expand=1, non-absolute paths will be accepted and
4955 + # expanded to os.getcwd()+"/"+localpath if file exists
4956 + for myfile in filelist:
4957 + if myfile not in settings:
4958 + #filenames such as cdtar are optional, so we don't assume the variable is defined.
4959 + pass
4960 + else:
4961 + if len(settings[myfile])==0:
4962 + raise CatalystError, "File variable \""+myfile+"\" has a length of zero (not specified.)"
4963 + if settings[myfile][0]=="/":
4964 + if not os.path.exists(settings[myfile]):
4965 + raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]
4966 + elif expand and os.path.exists(os.getcwd()+"/"+settings[myfile]):
4967 + settings[myfile]=os.getcwd()+"/"+settings[myfile]
4968 + else:
4969 + raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]+" (2nd try)"
4970 +"""
4971 +Spec file format:
4972 +
4973 +The spec file format is a very simple and easy-to-use format for storing data. Here's an example
4974 +file:
4975 +
4976 +item1: value1
4977 +item2: foo bar oni
4978 +item3:
4979 + meep
4980 + bark
4981 + gleep moop
4982 +
4983 +This file would be interpreted as defining three items: item1, item2 and item3. item1 would contain
4984 +the string value "value1". Item2 would contain an ordered list [ "foo", "bar", "oni" ]. item3
4985 +would contain an ordered list as well: [ "meep", "bark", "gleep", "moop" ]. It's important to note
4986 +that the order of multiple-value items is preserved, but the order that the items themselves are
4987 +defined are not preserved. In other words, "foo", "bar", "oni" ordering is preserved but "item1"
4988 +"item2" "item3" ordering is not, as the item strings are stored in a dictionary (hash).
4989 +"""
4990 +
4991 +def parse_makeconf(mylines):
4992 + mymakeconf={}
4993 + pos=0
4994 + pat=re.compile("([0-9a-zA-Z_]*)=(.*)")
4995 + while pos<len(mylines):
4996 + if len(mylines[pos])<=1:
4997 + #skip blanks
4998 + pos += 1
4999 + continue
5000 + if mylines[pos][0] in ["#"," ","\t"]:
5001 + #skip indented lines, comments
5002 + pos += 1
5003 + continue
5004 + else:
5005 + myline=mylines[pos]
5006 + mobj=pat.match(myline)
5007 + pos += 1
5008 + if mobj.group(2):
5009 + clean_string = re.sub(r"\"",r"",mobj.group(2))
5010 + mymakeconf[mobj.group(1)]=clean_string
5011 + return mymakeconf
5012 +
5013 +def read_makeconf(mymakeconffile):
5014 + if os.path.exists(mymakeconffile):
5015 + try:
5016 + try:
5017 + import snakeoil.fileutils
5018 + return snakeoil.fileutils.read_bash_dict(mymakeconffile, sourcing_command="source")
5019 + except ImportError:
5020 + try:
5021 + import portage.util
5022 + return portage.util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
5023 + except:
5024 + try:
5025 + import portage_util
5026 + return portage_util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
5027 + except ImportError:
5028 + myf=open(mymakeconffile,"r")
5029 + mylines=myf.readlines()
5030 + myf.close()
5031 + return parse_makeconf(mylines)
5032 + except:
5033 + raise CatalystError, "Could not parse make.conf file "+mymakeconffile
5034 + else:
5035 + makeconf={}
5036 + return makeconf
5037 +
5038 +def msg(mymsg,verblevel=1):
5039 + if verbosity>=verblevel:
5040 + print mymsg
5041 +
5042 +def pathcompare(path1,path2):
5043 + # Change double slashes to slash
5044 + path1 = re.sub(r"//",r"/",path1)
5045 + path2 = re.sub(r"//",r"/",path2)
5046 + # Removing ending slash
5047 + path1 = re.sub("/$","",path1)
5048 + path2 = re.sub("/$","",path2)
5049 +
5050 + if path1 == path2:
5051 + return 1
5052 + return 0
5053 +
5054 +def ismount(path):
5055 + "enhanced to handle bind mounts"
5056 + if os.path.ismount(path):
5057 + return 1
5058 + a=os.popen("mount")
5059 + mylines=a.readlines()
5060 + a.close()
5061 + for line in mylines:
5062 + mysplit=line.split()
5063 + if pathcompare(path,mysplit[2]):
5064 + return 1
5065 + return 0
5066 +
5067 +def addl_arg_parse(myspec,addlargs,requiredspec,validspec):
5068 + "helper function to help targets parse additional arguments"
5069 + global valid_config_file_values
5070 +
5071 + messages = []
5072 + for x in addlargs.keys():
5073 + if x not in validspec and x not in valid_config_file_values and x not in requiredspec:
5074 + messages.append("Argument \""+x+"\" not recognized.")
5075 + else:
5076 + myspec[x]=addlargs[x]
5077 +
5078 + for x in requiredspec:
5079 + if x not in myspec:
5080 + messages.append("Required argument \""+x+"\" not specified.")
5081 +
5082 + if messages:
5083 + raise CatalystError, '\n\tAlso: '.join(messages)
5084 +
5085 +def touch(myfile):
5086 + try:
5087 + myf=open(myfile,"w")
5088 + myf.close()
5089 + except IOError:
5090 + raise CatalystError, "Could not touch "+myfile+"."
5091 +
5092 +def countdown(secs=5, doing="Starting"):
5093 + if secs:
5094 + print ">>> Waiting",secs,"seconds before starting..."
5095 + print ">>> (Control-C to abort)...\n"+doing+" in: ",
5096 + ticks=range(secs)
5097 + ticks.reverse()
5098 + for sec in ticks:
5099 + sys.stdout.write(str(sec+1)+" ")
5100 + sys.stdout.flush()
5101 + time.sleep(1)
5102 + print
5103 +
5104 +def normpath(mypath):
5105 + TrailingSlash=False
5106 + if mypath[-1] == "/":
5107 + TrailingSlash=True
5108 + newpath = os.path.normpath(mypath)
5109 + if len(newpath) > 1:
5110 + if newpath[:2] == "//":
5111 + newpath = newpath[1:]
5112 + if TrailingSlash:
5113 + newpath=newpath+'/'
5114 + return newpath
5115 diff --git a/catalyst/modules/embedded_target.py b/catalyst/modules/embedded_target.py
5116 new file mode 100644
5117 index 0000000..f38ea00
5118 --- /dev/null
5119 +++ b/catalyst/modules/embedded_target.py
5120 @@ -0,0 +1,51 @@
5121 +"""
5122 +Enbedded target, similar to the stage2 target, builds upon a stage2 tarball.
5123 +
5124 +A stage2 tarball is unpacked, but instead
5125 +of building a stage3, it emerges @system into another directory
5126 +inside the stage2 system. This way, we do not have to emerge GCC/portage
5127 +into the staged system.
5128 +It may sound complicated but basically it runs
5129 +ROOT=/tmp/submerge emerge --something foo bar .
5130 +"""
5131 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
5132 +
5133 +import os,string,imp,types,shutil
5134 +from catalyst_support import *
5135 +from generic_stage_target import *
5136 +from stat import *
5137 +
5138 +class embedded_target(generic_stage_target):
5139 + """
5140 + Builder class for embedded target
5141 + """
5142 + def __init__(self,spec,addlargs):
5143 + self.required_values=[]
5144 + self.valid_values=[]
5145 + self.valid_values.extend(["embedded/empty","embedded/rm","embedded/unmerge","embedded/fs-prepare","embedded/fs-finish","embedded/mergeroot","embedded/packages","embedded/fs-type","embedded/runscript","boot/kernel","embedded/linuxrc"])
5146 + self.valid_values.extend(["embedded/use"])
5147 + if "embedded/fs-type" in addlargs:
5148 + self.valid_values.append("embedded/fs-ops")
5149 +
5150 + generic_stage_target.__init__(self,spec,addlargs)
5151 + self.set_build_kernel_vars(addlargs)
5152 +
5153 + def set_action_sequence(self):
5154 + self.settings["action_sequence"]=["dir_setup","unpack","unpack_snapshot",\
5155 + "config_profile_link","setup_confdir",\
5156 + "portage_overlay","bind","chroot_setup",\
5157 + "setup_environment","build_kernel","build_packages",\
5158 + "bootloader","root_overlay","fsscript","unmerge",\
5159 + "unbind","remove","empty","clean","capture","clear_autoresume"]
5160 +
5161 + def set_stage_path(self):
5162 + self.settings["stage_path"]=normpath(self.settings["chroot_path"]+"/tmp/mergeroot")
5163 + print "embedded stage path is "+self.settings["stage_path"]
5164 +
5165 + def set_root_path(self):
5166 + self.settings["root_path"]=normpath("/tmp/mergeroot")
5167 + print "embedded root path is "+self.settings["root_path"]
5168 +
5169 +def register(foo):
5170 + foo.update({"embedded":embedded_target})
5171 + return foo
5172 diff --git a/catalyst/modules/generic_stage_target.py b/catalyst/modules/generic_stage_target.py
5173 new file mode 100644
5174 index 0000000..63d919d
5175 --- /dev/null
5176 +++ b/catalyst/modules/generic_stage_target.py
5177 @@ -0,0 +1,1741 @@
5178 +import os,string,imp,types,shutil
5179 +from catalyst_support import *
5180 +from generic_target import *
5181 +from stat import *
5182 +import catalyst_lock
5183 +
5184 +
5185 +PORT_LOGDIR_CLEAN = \
5186 + 'find "${PORT_LOGDIR}" -type f ! -name "summary.log*" -mtime +30 -delete'
5187 +
5188 +TARGET_MOUNTS_DEFAULTS = {
5189 + "ccache": "/var/tmp/ccache",
5190 + "dev": "/dev",
5191 + "devpts": "/dev/pts",
5192 + "distdir": "/usr/portage/distfiles",
5193 + "icecream": "/usr/lib/icecc/bin",
5194 + "kerncache": "/tmp/kerncache",
5195 + "packagedir": "/usr/portage/packages",
5196 + "portdir": "/usr/portage",
5197 + "port_tmpdir": "/var/tmp/portage",
5198 + "port_logdir": "/var/log/portage",
5199 + "proc": "/proc",
5200 + "shm": "/dev/shm",
5201 + }
5202 +
5203 +SOURCE_MOUNTS_DEFAULTS = {
5204 + "dev": "/dev",
5205 + "devpts": "/dev/pts",
5206 + "distdir": "/usr/portage/distfiles",
5207 + "portdir": "/usr/portage",
5208 + "port_tmpdir": "tmpfs",
5209 + "proc": "/proc",
5210 + "shm": "shmfs",
5211 + }
5212 +
5213 +
5214 +class generic_stage_target(generic_target):
5215 + """
5216 + This class does all of the chroot setup, copying of files, etc. It is
5217 + the driver class for pretty much everything that Catalyst does.
5218 + """
5219 + def __init__(self,myspec,addlargs):
5220 + self.required_values.extend(["version_stamp","target","subarch",\
5221 + "rel_type","profile","snapshot","source_subpath"])
5222 +
5223 + self.valid_values.extend(["version_stamp","target","subarch",\
5224 + "rel_type","profile","snapshot","source_subpath","portage_confdir",\
5225 + "cflags","cxxflags","ldflags","cbuild","hostuse","portage_overlay",\
5226 + "distcc_hosts","makeopts","pkgcache_path","kerncache_path"])
5227 +
5228 + self.set_valid_build_kernel_vars(addlargs)
5229 + generic_target.__init__(self,myspec,addlargs)
5230 +
5231 + """
5232 + The semantics of subarchmap and machinemap changed a bit in 2.0.3 to
5233 + work better with vapier's CBUILD stuff. I've removed the "monolithic"
5234 + machinemap from this file and split up its contents amongst the
5235 + various arch/foo.py files.
5236 +
5237 + When register() is called on each module in the arch/ dir, it now
5238 + returns a tuple instead of acting on the subarchmap dict that is
5239 + passed to it. The tuple contains the values that were previously
5240 + added to subarchmap as well as a new list of CHOSTs that go along
5241 + with that arch. This allows us to build machinemap on the fly based
5242 + on the keys in subarchmap and the values of the 2nd list returned
5243 + (tmpmachinemap).
5244 +
5245 + Also, after talking with vapier. I have a slightly better idea of what
5246 + certain variables are used for and what they should be set to. Neither
5247 + 'buildarch' or 'hostarch' are used directly, so their value doesn't
5248 + really matter. They are just compared to determine if we are
5249 + cross-compiling. Because of this, they are just set to the name of the
5250 + module in arch/ that the subarch is part of to make things simpler.
5251 + The entire build process is still based off of 'subarch' like it was
5252 + previously. -agaffney
5253 + """
5254 +
5255 + self.archmap = {}
5256 + self.subarchmap = {}
5257 + machinemap = {}
5258 + arch_dir = self.settings["PythonDir"] + "/arch/"
5259 + for x in [x[:-3] for x in os.listdir(arch_dir) if x.endswith(".py")]:
5260 + if x == "__init__":
5261 + continue
5262 + try:
5263 + fh=open(arch_dir + x + ".py")
5264 + """
5265 + This next line loads the plugin as a module and assigns it to
5266 + archmap[x]
5267 + """
5268 + self.archmap[x]=imp.load_module(x,fh,"../arch/" + x + ".py",
5269 + (".py", "r", imp.PY_SOURCE))
5270 + """
5271 + This next line registers all the subarches supported in the
5272 + plugin
5273 + """
5274 + tmpsubarchmap, tmpmachinemap = self.archmap[x].register()
5275 + self.subarchmap.update(tmpsubarchmap)
5276 + for machine in tmpmachinemap:
5277 + machinemap[machine] = x
5278 + for subarch in tmpsubarchmap:
5279 + machinemap[subarch] = x
5280 + fh.close()
5281 + except IOError:
5282 + """
5283 + This message should probably change a bit, since everything in
5284 + the dir should load just fine. If it doesn't, it's probably a
5285 + syntax error in the module
5286 + """
5287 + msg("Can't find/load " + x + ".py plugin in " + arch_dir)
5288 +
5289 + if "chost" in self.settings:
5290 + hostmachine = self.settings["chost"].split("-")[0]
5291 + if hostmachine not in machinemap:
5292 + raise CatalystError, "Unknown host machine type "+hostmachine
5293 + self.settings["hostarch"]=machinemap[hostmachine]
5294 + else:
5295 + hostmachine = self.settings["subarch"]
5296 + if hostmachine in machinemap:
5297 + hostmachine = machinemap[hostmachine]
5298 + self.settings["hostarch"]=hostmachine
5299 + if "cbuild" in self.settings:
5300 + buildmachine = self.settings["cbuild"].split("-")[0]
5301 + else:
5302 + buildmachine = os.uname()[4]
5303 + if buildmachine not in machinemap:
5304 + raise CatalystError, "Unknown build machine type "+buildmachine
5305 + self.settings["buildarch"]=machinemap[buildmachine]
5306 + self.settings["crosscompile"]=(self.settings["hostarch"]!=\
5307 + self.settings["buildarch"])
5308 +
5309 + """ Call arch constructor, pass our settings """
5310 + try:
5311 + self.arch=self.subarchmap[self.settings["subarch"]](self.settings)
5312 + except KeyError:
5313 + print "Invalid subarch: "+self.settings["subarch"]
5314 + print "Choose one of the following:",
5315 + for x in self.subarchmap:
5316 + print x,
5317 + print
5318 + sys.exit(2)
5319 +
5320 + print "Using target:",self.settings["target"]
5321 + """ Print a nice informational message """
5322 + if self.settings["buildarch"]==self.settings["hostarch"]:
5323 + print "Building natively for",self.settings["hostarch"]
5324 + elif self.settings["crosscompile"]:
5325 + print "Cross-compiling on",self.settings["buildarch"],\
5326 + "for different machine type",self.settings["hostarch"]
5327 + else:
5328 + print "Building on",self.settings["buildarch"],\
5329 + "for alternate personality type",self.settings["hostarch"]
5330 +
5331 + """ This must be set first as other set_ options depend on this """
5332 + self.set_spec_prefix()
5333 +
5334 + """ Define all of our core variables """
5335 + self.set_target_profile()
5336 + self.set_target_subpath()
5337 + self.set_source_subpath()
5338 +
5339 + """ Set paths """
5340 + self.set_snapshot_path()
5341 + self.set_root_path()
5342 + self.set_source_path()
5343 + self.set_snapcache_path()
5344 + self.set_chroot_path()
5345 + self.set_autoresume_path()
5346 + self.set_dest_path()
5347 + self.set_stage_path()
5348 + self.set_target_path()
5349 +
5350 + self.set_controller_file()
5351 + self.set_action_sequence()
5352 + self.set_use()
5353 + self.set_cleanables()
5354 + self.set_iso_volume_id()
5355 + self.set_build_kernel_vars()
5356 + self.set_fsscript()
5357 + self.set_install_mask()
5358 + self.set_rcadd()
5359 + self.set_rcdel()
5360 + self.set_cdtar()
5361 + self.set_fstype()
5362 + self.set_fsops()
5363 + self.set_iso()
5364 + self.set_packages()
5365 + self.set_rm()
5366 + self.set_linuxrc()
5367 + self.set_busybox_config()
5368 + self.set_overlay()
5369 + self.set_portage_overlay()
5370 + self.set_root_overlay()
5371 +
5372 + """
5373 + This next line checks to make sure that the specified variables exist
5374 + on disk.
5375 + """
5376 + #pdb.set_trace()
5377 + file_locate(self.settings,["source_path","snapshot_path","distdir"],\
5378 + expand=0)
5379 + """ If we are using portage_confdir, check that as well. """
5380 + if "portage_confdir" in self.settings:
5381 + file_locate(self.settings,["portage_confdir"],expand=0)
5382 +
5383 + """ Setup our mount points """
5384 + # initialize our target mounts.
5385 + self.target_mounts = TARGET_MOUNTS_DEFAULTS.copy()
5386 +
5387 + self.mounts = ["proc", "dev", "portdir", "distdir", "port_tmpdir"]
5388 + # initialize our source mounts
5389 + self.mountmap = SOURCE_MOUNTS_DEFAULTS.copy()
5390 + # update them from settings
5391 + self.mountmap["distdir"] = self.settings["distdir"]
5392 + self.mountmap["portdir"] = normpath("/".join([
5393 + self.settings["snapshot_cache_path"],
5394 + self.settings["repo_name"],
5395 + ]))
5396 + if "SNAPCACHE" not in self.settings:
5397 + self.mounts.remove("portdir")
5398 + #self.mountmap["portdir"] = None
5399 + if os.uname()[0] == "Linux":
5400 + self.mounts.append("devpts")
5401 + self.mounts.append("shm")
5402 +
5403 + self.set_mounts()
5404 +
5405 + """
5406 + Configure any user specified options (either in catalyst.conf or on
5407 + the command line).
5408 + """
5409 + if "PKGCACHE" in self.settings:
5410 + self.set_pkgcache_path()
5411 + print "Location of the package cache is "+\
5412 + self.settings["pkgcache_path"]
5413 + self.mounts.append("packagedir")
5414 + self.mountmap["packagedir"] = self.settings["pkgcache_path"]
5415 +
5416 + if "KERNCACHE" in self.settings:
5417 + self.set_kerncache_path()
5418 + print "Location of the kerncache is "+\
5419 + self.settings["kerncache_path"]
5420 + self.mounts.append("kerncache")
5421 + self.mountmap["kerncache"] = self.settings["kerncache_path"]
5422 +
5423 + if "CCACHE" in self.settings:
5424 + if "CCACHE_DIR" in os.environ:
5425 + ccdir=os.environ["CCACHE_DIR"]
5426 + del os.environ["CCACHE_DIR"]
5427 + else:
5428 + ccdir="/root/.ccache"
5429 + if not os.path.isdir(ccdir):
5430 + raise CatalystError,\
5431 + "Compiler cache support can't be enabled (can't find "+\
5432 + ccdir+")"
5433 + self.mounts.append("ccache")
5434 + self.mountmap["ccache"] = ccdir
5435 + """ for the chroot: """
5436 + self.env["CCACHE_DIR"] = self.target_mounts["ccache"]
5437 +
5438 + if "ICECREAM" in self.settings:
5439 + self.mounts.append("icecream")
5440 + self.mountmap["icecream"] = self.settings["icecream"]
5441 + self.env["PATH"] = self.target_mounts["icecream"] + ":" + \
5442 + self.env["PATH"]
5443 +
5444 + if "port_logdir" in self.settings:
5445 + self.mounts.append("port_logdir")
5446 + self.mountmap["port_logdir"] = self.settings["port_logdir"]
5447 + self.env["PORT_LOGDIR"] = self.settings["port_logdir"]
5448 + self.env["PORT_LOGDIR_CLEAN"] = PORT_LOGDIR_CLEAN
5449 +
5450 + def override_cbuild(self):
5451 + if "CBUILD" in self.makeconf:
5452 + self.settings["CBUILD"]=self.makeconf["CBUILD"]
5453 +
5454 + def override_chost(self):
5455 + if "CHOST" in self.makeconf:
5456 + self.settings["CHOST"]=self.makeconf["CHOST"]
5457 +
5458 + def override_cflags(self):
5459 + if "CFLAGS" in self.makeconf:
5460 + self.settings["CFLAGS"]=self.makeconf["CFLAGS"]
5461 +
5462 + def override_cxxflags(self):
5463 + if "CXXFLAGS" in self.makeconf:
5464 + self.settings["CXXFLAGS"]=self.makeconf["CXXFLAGS"]
5465 +
5466 + def override_ldflags(self):
5467 + if "LDFLAGS" in self.makeconf:
5468 + self.settings["LDFLAGS"]=self.makeconf["LDFLAGS"]
5469 +
5470 + def set_install_mask(self):
5471 + if "install_mask" in self.settings:
5472 + if type(self.settings["install_mask"])!=types.StringType:
5473 + self.settings["install_mask"]=\
5474 + string.join(self.settings["install_mask"])
5475 +
5476 + def set_spec_prefix(self):
5477 + self.settings["spec_prefix"]=self.settings["target"]
5478 +
5479 + def set_target_profile(self):
5480 + self.settings["target_profile"]=self.settings["profile"]
5481 +
5482 + def set_target_subpath(self):
5483 + self.settings["target_subpath"]=self.settings["rel_type"]+"/"+\
5484 + self.settings["target"]+"-"+self.settings["subarch"]+"-"+\
5485 + self.settings["version_stamp"]
5486 +
5487 + def set_source_subpath(self):
5488 + if type(self.settings["source_subpath"])!=types.StringType:
5489 + raise CatalystError,\
5490 + "source_subpath should have been a string. Perhaps you have something wrong in your spec file?"
5491 +
5492 + def set_pkgcache_path(self):
5493 + if "pkgcache_path" in self.settings:
5494 + if type(self.settings["pkgcache_path"])!=types.StringType:
5495 + self.settings["pkgcache_path"]=\
5496 + normpath(string.join(self.settings["pkgcache_path"]))
5497 + else:
5498 + self.settings["pkgcache_path"]=\
5499 + normpath(self.settings["storedir"]+"/packages/"+\
5500 + self.settings["target_subpath"]+"/")
5501 +
5502 + def set_kerncache_path(self):
5503 + if "kerncache_path" in self.settings:
5504 + if type(self.settings["kerncache_path"])!=types.StringType:
5505 + self.settings["kerncache_path"]=\
5506 + normpath(string.join(self.settings["kerncache_path"]))
5507 + else:
5508 + self.settings["kerncache_path"]=normpath(self.settings["storedir"]+\
5509 + "/kerncache/"+self.settings["target_subpath"]+"/")
5510 +
5511 + def set_target_path(self):
5512 + self.settings["target_path"] = normpath(self.settings["storedir"] +
5513 + "/builds/" + self.settings["target_subpath"].rstrip('/') +
5514 + ".tar.bz2")
5515 + if "AUTORESUME" in self.settings\
5516 + and os.path.exists(self.settings["autoresume_path"]+\
5517 + "setup_target_path"):
5518 + print \
5519 + "Resume point detected, skipping target path setup operation..."
5520 + else:
5521 + """ First clean up any existing target stuff """
5522 + # XXX WTF are we removing the old tarball before we start building the
5523 + # XXX new one? If the build fails, you don't want to be left with
5524 + # XXX nothing at all
5525 +# if os.path.isfile(self.settings["target_path"]):
5526 +# cmd("rm -f "+self.settings["target_path"],\
5527 +# "Could not remove existing file: "\
5528 +# +self.settings["target_path"],env=self.env)
5529 + touch(self.settings["autoresume_path"]+"setup_target_path")
5530 +
5531 + if not os.path.exists(self.settings["storedir"]+"/builds/"):
5532 + os.makedirs(self.settings["storedir"]+"/builds/")
5533 +
5534 + def set_fsscript(self):
5535 + if self.settings["spec_prefix"]+"/fsscript" in self.settings:
5536 + self.settings["fsscript"]=\
5537 + self.settings[self.settings["spec_prefix"]+"/fsscript"]
5538 + del self.settings[self.settings["spec_prefix"]+"/fsscript"]
5539 +
5540 + def set_rcadd(self):
5541 + if self.settings["spec_prefix"]+"/rcadd" in self.settings:
5542 + self.settings["rcadd"]=\
5543 + self.settings[self.settings["spec_prefix"]+"/rcadd"]
5544 + del self.settings[self.settings["spec_prefix"]+"/rcadd"]
5545 +
5546 + def set_rcdel(self):
5547 + if self.settings["spec_prefix"]+"/rcdel" in self.settings:
5548 + self.settings["rcdel"]=\
5549 + self.settings[self.settings["spec_prefix"]+"/rcdel"]
5550 + del self.settings[self.settings["spec_prefix"]+"/rcdel"]
5551 +
5552 + def set_cdtar(self):
5553 + if self.settings["spec_prefix"]+"/cdtar" in self.settings:
5554 + self.settings["cdtar"]=\
5555 + normpath(self.settings[self.settings["spec_prefix"]+"/cdtar"])
5556 + del self.settings[self.settings["spec_prefix"]+"/cdtar"]
5557 +
5558 + def set_iso(self):
5559 + if self.settings["spec_prefix"]+"/iso" in self.settings:
5560 + if self.settings[self.settings["spec_prefix"]+"/iso"].startswith('/'):
5561 + self.settings["iso"]=\
5562 + normpath(self.settings[self.settings["spec_prefix"]+"/iso"])
5563 + else:
5564 + # This automatically prepends the build dir to the ISO output path
5565 + # if it doesn't start with a /
5566 + self.settings["iso"] = normpath(self.settings["storedir"] + \
5567 + "/builds/" + self.settings["rel_type"] + "/" + \
5568 + self.settings[self.settings["spec_prefix"]+"/iso"])
5569 + del self.settings[self.settings["spec_prefix"]+"/iso"]
5570 +
5571 + def set_fstype(self):
5572 + if self.settings["spec_prefix"]+"/fstype" in self.settings:
5573 + self.settings["fstype"]=\
5574 + self.settings[self.settings["spec_prefix"]+"/fstype"]
5575 + del self.settings[self.settings["spec_prefix"]+"/fstype"]
5576 +
5577 + if "fstype" not in self.settings:
5578 + self.settings["fstype"]="normal"
5579 + for x in self.valid_values:
5580 + if x == self.settings["spec_prefix"]+"/fstype":
5581 + print "\n"+self.settings["spec_prefix"]+\
5582 + "/fstype is being set to the default of \"normal\"\n"
5583 +
5584 + def set_fsops(self):
5585 + if "fstype" in self.settings:
5586 + self.valid_values.append("fsops")
5587 + if self.settings["spec_prefix"]+"/fsops" in self.settings:
5588 + self.settings["fsops"]=\
5589 + self.settings[self.settings["spec_prefix"]+"/fsops"]
5590 + del self.settings[self.settings["spec_prefix"]+"/fsops"]
5591 +
5592 + def set_source_path(self):
5593 + if "SEEDCACHE" in self.settings\
5594 + and os.path.isdir(normpath(self.settings["storedir"]+"/tmp/"+\
5595 + self.settings["source_subpath"]+"/")):
5596 + self.settings["source_path"]=normpath(self.settings["storedir"]+\
5597 + "/tmp/"+self.settings["source_subpath"]+"/")
5598 + else:
5599 + self.settings["source_path"] = normpath(self.settings["storedir"] +
5600 + "/builds/" + self.settings["source_subpath"].rstrip("/") +
5601 + ".tar.bz2")
5602 + if os.path.isfile(self.settings["source_path"]):
5603 + # XXX: Is this even necessary if the previous check passes?
5604 + if os.path.exists(self.settings["source_path"]):
5605 + self.settings["source_path_hash"]=\
5606 + generate_hash(self.settings["source_path"],\
5607 + hash_function=self.settings["hash_function"],\
5608 + verbose=False)
5609 + print "Source path set to "+self.settings["source_path"]
5610 + if os.path.isdir(self.settings["source_path"]):
5611 + print "\tIf this is not desired, remove this directory or turn off"
5612 + print "\tseedcache in the options of catalyst.conf the source path"
5613 + print "\twill then be "+\
5614 + normpath(self.settings["storedir"] + "/builds/" +
5615 + self.settings["source_subpath"].rstrip("/") + ".tar.bz2\n")
5616 +
5617 + def set_dest_path(self):
5618 + if "root_path" in self.settings:
5619 + self.settings["destpath"]=normpath(self.settings["chroot_path"]+\
5620 + self.settings["root_path"])
5621 + else:
5622 + self.settings["destpath"]=normpath(self.settings["chroot_path"])
5623 +
5624 + def set_cleanables(self):
5625 + self.settings["cleanables"]=["/etc/resolv.conf","/var/tmp/*","/tmp/*",\
5626 + "/root/*", self.settings["portdir"]]
5627 +
5628 + def set_snapshot_path(self):
5629 + self.settings["snapshot_path"] = normpath(self.settings["storedir"] +
5630 + "/snapshots/" + self.settings["snapshot_name"] +
5631 + self.settings["snapshot"].rstrip("/") + ".tar.xz")
5632 +
5633 + if os.path.exists(self.settings["snapshot_path"]):
5634 + self.settings["snapshot_path_hash"]=\
5635 + generate_hash(self.settings["snapshot_path"],\
5636 + hash_function=self.settings["hash_function"],verbose=False)
5637 + else:
5638 + self.settings["snapshot_path"]=normpath(self.settings["storedir"]+\
5639 + "/snapshots/" + self.settings["snapshot_name"] +
5640 + self.settings["snapshot"].rstrip("/") + ".tar.bz2")
5641 +
5642 + if os.path.exists(self.settings["snapshot_path"]):
5643 + self.settings["snapshot_path_hash"]=\
5644 + generate_hash(self.settings["snapshot_path"],\
5645 + hash_function=self.settings["hash_function"],verbose=False)
5646 +
5647 + def set_snapcache_path(self):
5648 + if "SNAPCACHE" in self.settings:
5649 + self.settings["snapshot_cache_path"]=\
5650 + normpath(self.settings["snapshot_cache"]+"/"+\
5651 + self.settings["snapshot"])
5652 + self.snapcache_lock=\
5653 + catalyst_lock.LockDir(self.settings["snapshot_cache_path"])
5654 + print "Caching snapshot to "+self.settings["snapshot_cache_path"]
5655 +
5656 + def set_chroot_path(self):
5657 + """
5658 + NOTE: the trailing slash has been removed
5659 + Things *could* break if you don't use a proper join()
5660 + """
5661 + self.settings["chroot_path"]=normpath(self.settings["storedir"]+\
5662 + "/tmp/"+self.settings["target_subpath"])
5663 + self.chroot_lock=catalyst_lock.LockDir(self.settings["chroot_path"])
5664 +
5665 + def set_autoresume_path(self):
5666 + self.settings["autoresume_path"]=normpath(self.settings["storedir"]+\
5667 + "/tmp/"+self.settings["rel_type"]+"/"+".autoresume-"+\
5668 + self.settings["target"]+"-"+self.settings["subarch"]+"-"+\
5669 + self.settings["version_stamp"]+"/")
5670 + if "AUTORESUME" in self.settings:
5671 + print "The autoresume path is " + self.settings["autoresume_path"]
5672 + if not os.path.exists(self.settings["autoresume_path"]):
5673 + os.makedirs(self.settings["autoresume_path"],0755)
5674 +
5675 + def set_controller_file(self):
5676 + self.settings["controller_file"]=normpath(self.settings["sharedir"]+\
5677 + "/targets/"+self.settings["target"]+"/"+self.settings["target"]+\
5678 + "-controller.sh")
5679 +
5680 + def set_iso_volume_id(self):
5681 + if self.settings["spec_prefix"]+"/volid" in self.settings:
5682 + self.settings["iso_volume_id"]=\
5683 + self.settings[self.settings["spec_prefix"]+"/volid"]
5684 + if len(self.settings["iso_volume_id"])>32:
5685 + raise CatalystError,\
5686 + "ISO volume ID must not exceed 32 characters."
5687 + else:
5688 + self.settings["iso_volume_id"]="catalyst "+self.settings["snapshot"]
5689 +
5690 + def set_action_sequence(self):
5691 + """ Default action sequence for run method """
5692 + self.settings["action_sequence"]=["unpack","unpack_snapshot",\
5693 + "setup_confdir","portage_overlay",\
5694 + "base_dirs","bind","chroot_setup","setup_environment",\
5695 + "run_local","preclean","unbind","clean"]
5696 +# if "TARBALL" in self.settings or \
5697 +# "FETCH" not in self.settings:
5698 + if "FETCH" not in self.settings:
5699 + self.settings["action_sequence"].append("capture")
5700 + self.settings["action_sequence"].append("clear_autoresume")
5701 +
5702 + def set_use(self):
5703 + if self.settings["spec_prefix"]+"/use" in self.settings:
5704 + self.settings["use"]=\
5705 + self.settings[self.settings["spec_prefix"]+"/use"]
5706 + del self.settings[self.settings["spec_prefix"]+"/use"]
5707 + if "use" not in self.settings:
5708 + self.settings["use"]=""
5709 + if type(self.settings["use"])==types.StringType:
5710 + self.settings["use"]=self.settings["use"].split()
5711 +
5712 + # Force bindist when options ask for it
5713 + if "BINDIST" in self.settings:
5714 + self.settings["use"].append("bindist")
5715 +
5716 + def set_stage_path(self):
5717 + self.settings["stage_path"]=normpath(self.settings["chroot_path"])
5718 +
5719 + def set_mounts(self):
5720 + pass
5721 +
5722 + def set_packages(self):
5723 + pass
5724 +
5725 + def set_rm(self):
5726 + if self.settings["spec_prefix"]+"/rm" in self.settings:
5727 + if type(self.settings[self.settings["spec_prefix"]+\
5728 + "/rm"])==types.StringType:
5729 + self.settings[self.settings["spec_prefix"]+"/rm"]=\
5730 + self.settings[self.settings["spec_prefix"]+"/rm"].split()
5731 +
5732 + def set_linuxrc(self):
5733 + if self.settings["spec_prefix"]+"/linuxrc" in self.settings:
5734 + if type(self.settings[self.settings["spec_prefix"]+\
5735 + "/linuxrc"])==types.StringType:
5736 + self.settings["linuxrc"]=\
5737 + self.settings[self.settings["spec_prefix"]+"/linuxrc"]
5738 + del self.settings[self.settings["spec_prefix"]+"/linuxrc"]
5739 +
5740 + def set_busybox_config(self):
5741 + if self.settings["spec_prefix"]+"/busybox_config" in self.settings:
5742 + if type(self.settings[self.settings["spec_prefix"]+\
5743 + "/busybox_config"])==types.StringType:
5744 + self.settings["busybox_config"]=\
5745 + self.settings[self.settings["spec_prefix"]+"/busybox_config"]
5746 + del self.settings[self.settings["spec_prefix"]+"/busybox_config"]
5747 +
5748 + def set_portage_overlay(self):
5749 + if "portage_overlay" in self.settings:
5750 + if type(self.settings["portage_overlay"])==types.StringType:
5751 + self.settings["portage_overlay"]=\
5752 + self.settings["portage_overlay"].split()
5753 + print "portage_overlay directories are set to: \""+\
5754 + string.join(self.settings["portage_overlay"])+"\""
5755 +
5756 + def set_overlay(self):
5757 + if self.settings["spec_prefix"]+"/overlay" in self.settings:
5758 + if type(self.settings[self.settings["spec_prefix"]+\
5759 + "/overlay"])==types.StringType:
5760 + self.settings[self.settings["spec_prefix"]+"/overlay"]=\
5761 + self.settings[self.settings["spec_prefix"]+\
5762 + "/overlay"].split()
5763 +
5764 + def set_root_overlay(self):
5765 + if self.settings["spec_prefix"]+"/root_overlay" in self.settings:
5766 + if type(self.settings[self.settings["spec_prefix"]+\
5767 + "/root_overlay"])==types.StringType:
5768 + self.settings[self.settings["spec_prefix"]+"/root_overlay"]=\
5769 + self.settings[self.settings["spec_prefix"]+\
5770 + "/root_overlay"].split()
5771 +
5772 + def set_root_path(self):
5773 + """ ROOT= variable for emerges """
5774 + self.settings["root_path"]="/"
5775 +
5776 + def set_valid_build_kernel_vars(self,addlargs):
5777 + if "boot/kernel" in addlargs:
5778 + if type(addlargs["boot/kernel"])==types.StringType:
5779 + loopy=[addlargs["boot/kernel"]]
5780 + else:
5781 + loopy=addlargs["boot/kernel"]
5782 +
5783 + for x in loopy:
5784 + self.valid_values.append("boot/kernel/"+x+"/aliases")
5785 + self.valid_values.append("boot/kernel/"+x+"/config")
5786 + self.valid_values.append("boot/kernel/"+x+"/console")
5787 + self.valid_values.append("boot/kernel/"+x+"/extraversion")
5788 + self.valid_values.append("boot/kernel/"+x+"/gk_action")
5789 + self.valid_values.append("boot/kernel/"+x+"/gk_kernargs")
5790 + self.valid_values.append("boot/kernel/"+x+"/initramfs_overlay")
5791 + self.valid_values.append("boot/kernel/"+x+"/machine_type")
5792 + self.valid_values.append("boot/kernel/"+x+"/sources")
5793 + self.valid_values.append("boot/kernel/"+x+"/softlevel")
5794 + self.valid_values.append("boot/kernel/"+x+"/use")
5795 + self.valid_values.append("boot/kernel/"+x+"/packages")
5796 + if "boot/kernel/"+x+"/packages" in addlargs:
5797 + if type(addlargs["boot/kernel/"+x+\
5798 + "/packages"])==types.StringType:
5799 + addlargs["boot/kernel/"+x+"/packages"]=\
5800 + [addlargs["boot/kernel/"+x+"/packages"]]
5801 +
5802 + def set_build_kernel_vars(self):
5803 + if self.settings["spec_prefix"]+"/gk_mainargs" in self.settings:
5804 + self.settings["gk_mainargs"]=\
5805 + self.settings[self.settings["spec_prefix"]+"/gk_mainargs"]
5806 + del self.settings[self.settings["spec_prefix"]+"/gk_mainargs"]
5807 +
5808 + def kill_chroot_pids(self):
5809 + print "Checking for processes running in chroot and killing them."
5810 +
5811 + """
5812 + Force environment variables to be exported so script can see them
5813 + """
5814 + self.setup_environment()
5815 +
5816 + if os.path.exists(self.settings["sharedir"]+\
5817 + "/targets/support/kill-chroot-pids.sh"):
5818 + cmd("/bin/bash "+self.settings["sharedir"]+\
5819 + "/targets/support/kill-chroot-pids.sh",\
5820 + "kill-chroot-pids script failed.",env=self.env)
5821 +
5822 + def mount_safety_check(self):
5823 + """
5824 + Check and verify that none of our paths in mypath are mounted. We don't
5825 + want to clean up with things still mounted, and this allows us to check.
5826 + Returns 1 on ok, 0 on "something is still mounted" case.
5827 + """
5828 +
5829 + if not os.path.exists(self.settings["chroot_path"]):
5830 + return
5831 +
5832 + print "self.mounts =", self.mounts
5833 + for x in self.mounts:
5834 + target = normpath(self.settings["chroot_path"] + self.target_mounts[x])
5835 + print "mount_safety_check() x =", x, target
5836 + if not os.path.exists(target):
5837 + continue
5838 +
5839 + if ismount(target):
5840 + """ Something is still mounted "" """
5841 + try:
5842 + print target + " is still mounted; performing auto-bind-umount...",
5843 + """ Try to umount stuff ourselves """
5844 + self.unbind()
5845 + if ismount(target):
5846 + raise CatalystError, "Auto-unbind failed for " + target
5847 + else:
5848 + print "Auto-unbind successful..."
5849 + except CatalystError:
5850 + raise CatalystError, "Unable to auto-unbind " + target
5851 +
5852 + def unpack(self):
5853 + unpack=True
5854 +
5855 + clst_unpack_hash=read_from_clst(self.settings["autoresume_path"]+\
5856 + "unpack")
5857 +
5858 + if "SEEDCACHE" in self.settings:
5859 + if os.path.isdir(self.settings["source_path"]):
5860 + """ SEEDCACHE Is a directory, use rsync """
5861 + unpack_cmd="rsync -a --delete "+self.settings["source_path"]+\
5862 + " "+self.settings["chroot_path"]
5863 + display_msg="\nStarting rsync from "+\
5864 + self.settings["source_path"]+"\nto "+\
5865 + self.settings["chroot_path"]+\
5866 + " (This may take some time) ...\n"
5867 + error_msg="Rsync of "+self.settings["source_path"]+" to "+\
5868 + self.settings["chroot_path"]+" failed."
5869 + else:
5870 + """ SEEDCACHE is a not a directory, try untar'ing """
5871 + print "Referenced SEEDCACHE does not appear to be a directory, trying to untar..."
5872 + display_msg="\nStarting tar extract from "+\
5873 + self.settings["source_path"]+"\nto "+\
5874 + self.settings["chroot_path"]+\
5875 + " (This may take some time) ...\n"
5876 + if "bz2" == self.settings["chroot_path"][-3:]:
5877 + unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
5878 + self.settings["chroot_path"]
5879 + else:
5880 + unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
5881 + self.settings["chroot_path"]
5882 + error_msg="Tarball extraction of "+\
5883 + self.settings["source_path"]+" to "+\
5884 + self.settings["chroot_path"]+" failed."
5885 + else:
5886 + """ No SEEDCACHE, use tar """
5887 + display_msg="\nStarting tar extract from "+\
5888 + self.settings["source_path"]+"\nto "+\
5889 + self.settings["chroot_path"]+\
5890 + " (This may take some time) ...\n"
5891 + if "bz2" == self.settings["chroot_path"][-3:]:
5892 + unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
5893 + self.settings["chroot_path"]
5894 + else:
5895 + unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
5896 + self.settings["chroot_path"]
5897 + error_msg="Tarball extraction of "+self.settings["source_path"]+\
5898 + " to "+self.settings["chroot_path"]+" failed."
5899 +
5900 + if "AUTORESUME" in self.settings:
5901 + if os.path.isdir(self.settings["source_path"]) \
5902 + and os.path.exists(self.settings["autoresume_path"]+"unpack"):
5903 + """ Autoresume is valid, SEEDCACHE is valid """
5904 + unpack=False
5905 + invalid_snapshot=False
5906 +
5907 + elif os.path.isfile(self.settings["source_path"]) \
5908 + and self.settings["source_path_hash"]==clst_unpack_hash:
5909 + """ Autoresume is valid, tarball is valid """
5910 + unpack=False
5911 + invalid_snapshot=True
5912 +
5913 + elif os.path.isdir(self.settings["source_path"]) \
5914 + and not os.path.exists(self.settings["autoresume_path"]+\
5915 + "unpack"):
5916 + """ Autoresume is invalid, SEEDCACHE """
5917 + unpack=True
5918 + invalid_snapshot=False
5919 +
5920 + elif os.path.isfile(self.settings["source_path"]) \
5921 + and self.settings["source_path_hash"]!=clst_unpack_hash:
5922 + """ Autoresume is invalid, tarball """
5923 + unpack=True
5924 + invalid_snapshot=True
5925 + else:
5926 + """ No autoresume, SEEDCACHE """
5927 + if "SEEDCACHE" in self.settings:
5928 + """ SEEDCACHE so let's run rsync and let it clean up """
5929 + if os.path.isdir(self.settings["source_path"]):
5930 + unpack=True
5931 + invalid_snapshot=False
5932 + elif os.path.isfile(self.settings["source_path"]):
5933 + """ Tarball so unpack and remove anything already there """
5934 + unpack=True
5935 + invalid_snapshot=True
5936 + """ No autoresume, no SEEDCACHE """
5937 + else:
5938 + """ Tarball so unpack and remove anything already there """
5939 + if os.path.isfile(self.settings["source_path"]):
5940 + unpack=True
5941 + invalid_snapshot=True
5942 + elif os.path.isdir(self.settings["source_path"]):
5943 + """ We should never reach this, so something is very wrong """
5944 + raise CatalystError,\
5945 + "source path is a dir but seedcache is not enabled"
5946 +
5947 + if unpack:
5948 + self.mount_safety_check()
5949 +
5950 + if invalid_snapshot:
5951 + if "AUTORESUME" in self.settings:
5952 + print "No Valid Resume point detected, cleaning up..."
5953 +
5954 + self.clear_autoresume()
5955 + self.clear_chroot()
5956 +
5957 + if not os.path.exists(self.settings["chroot_path"]):
5958 + os.makedirs(self.settings["chroot_path"])
5959 +
5960 + if not os.path.exists(self.settings["chroot_path"]+"/tmp"):
5961 + os.makedirs(self.settings["chroot_path"]+"/tmp",1777)
5962 +
5963 + if "PKGCACHE" in self.settings:
5964 + if not os.path.exists(self.settings["pkgcache_path"]):
5965 + os.makedirs(self.settings["pkgcache_path"],0755)
5966 +
5967 + if "KERNCACHE" in self.settings:
5968 + if not os.path.exists(self.settings["kerncache_path"]):
5969 + os.makedirs(self.settings["kerncache_path"],0755)
5970 +
5971 + print display_msg
5972 + cmd(unpack_cmd,error_msg,env=self.env)
5973 +
5974 + if "source_path_hash" in self.settings:
5975 + myf=open(self.settings["autoresume_path"]+"unpack","w")
5976 + myf.write(self.settings["source_path_hash"])
5977 + myf.close()
5978 + else:
5979 + touch(self.settings["autoresume_path"]+"unpack")
5980 + else:
5981 + print "Resume point detected, skipping unpack operation..."
5982 +
5983 + def unpack_snapshot(self):
5984 + unpack=True
5985 + snapshot_hash=read_from_clst(self.settings["autoresume_path"]+\
5986 + "unpack_portage")
5987 +
5988 + if "SNAPCACHE" in self.settings:
5989 + snapshot_cache_hash=\
5990 + read_from_clst(self.settings["snapshot_cache_path"]+\
5991 + "catalyst-hash")
5992 + destdir=self.settings["snapshot_cache_path"]
5993 + if "bz2" == self.settings["chroot_path"][-3:]:
5994 + unpack_cmd="tar -I lbzip2 -xpf "+self.settings["snapshot_path"]+" -C "+destdir
5995 + else:
5996 + unpack_cmd="tar xpf "+self.settings["snapshot_path"]+" -C "+destdir
5997 + unpack_errmsg="Error unpacking snapshot"
5998 + cleanup_msg="Cleaning up invalid snapshot cache at \n\t"+\
5999 + self.settings["snapshot_cache_path"]+\
6000 + " (This can take a long time)..."
6001 + cleanup_errmsg="Error removing existing snapshot cache directory."
6002 + self.snapshot_lock_object=self.snapcache_lock
6003 +
6004 + if self.settings["snapshot_path_hash"]==snapshot_cache_hash:
6005 + print "Valid snapshot cache, skipping unpack of portage tree..."
6006 + unpack=False
6007 + else:
6008 + destdir = normpath(self.settings["chroot_path"] + self.settings["portdir"])
6009 + cleanup_errmsg="Error removing existing snapshot directory."
6010 + cleanup_msg=\
6011 + "Cleaning up existing portage tree (This can take a long time)..."
6012 + if "bz2" == self.settings["chroot_path"][-3:]:
6013 + unpack_cmd="tar -I lbzip2 -xpf "+self.settings["snapshot_path"]+" -C "+\
6014 + self.settings["chroot_path"]+"/usr"
6015 + else:
6016 + unpack_cmd="tar xpf "+self.settings["snapshot_path"]+" -C "+\
6017 + self.settings["chroot_path"]+"/usr"
6018 + unpack_errmsg="Error unpacking snapshot"
6019 +
6020 + if "AUTORESUME" in self.settings \
6021 + and os.path.exists(self.settings["chroot_path"]+\
6022 + self.settings["portdir"]) \
6023 + and os.path.exists(self.settings["autoresume_path"]\
6024 + +"unpack_portage") \
6025 + and self.settings["snapshot_path_hash"] == snapshot_hash:
6026 + print \
6027 + "Valid Resume point detected, skipping unpack of portage tree..."
6028 + unpack=False
6029 +
6030 + if unpack:
6031 + if "SNAPCACHE" in self.settings:
6032 + self.snapshot_lock_object.write_lock()
6033 + if os.path.exists(destdir):
6034 + print cleanup_msg
6035 + cleanup_cmd="rm -rf "+destdir
6036 + cmd(cleanup_cmd,cleanup_errmsg,env=self.env)
6037 + if not os.path.exists(destdir):
6038 + os.makedirs(destdir,0755)
6039 +
6040 + print "Unpacking portage tree (This can take a long time) ..."
6041 + cmd(unpack_cmd,unpack_errmsg,env=self.env)
6042 +
6043 + if "SNAPCACHE" in self.settings:
6044 + myf=open(self.settings["snapshot_cache_path"]+"catalyst-hash","w")
6045 + myf.write(self.settings["snapshot_path_hash"])
6046 + myf.close()
6047 + else:
6048 + print "Setting snapshot autoresume point"
6049 + myf=open(self.settings["autoresume_path"]+"unpack_portage","w")
6050 + myf.write(self.settings["snapshot_path_hash"])
6051 + myf.close()
6052 +
6053 + if "SNAPCACHE" in self.settings:
6054 + self.snapshot_lock_object.unlock()
6055 +
6056 + def config_profile_link(self):
6057 + if "AUTORESUME" in self.settings \
6058 + and os.path.exists(self.settings["autoresume_path"]+\
6059 + "config_profile_link"):
6060 + print \
6061 + "Resume point detected, skipping config_profile_link operation..."
6062 + else:
6063 + # TODO: zmedico and I discussed making this a directory and pushing
6064 + # in a parent file, as well as other user-specified configuration.
6065 + print "Configuring profile link..."
6066 + cmd("rm -f "+self.settings["chroot_path"]+"/etc/portage/make.profile",\
6067 + "Error zapping profile link",env=self.env)
6068 + cmd("mkdir -p "+self.settings["chroot_path"]+"/etc/portage/")
6069 + cmd("ln -sf ../.." + self.settings["portdir"] + "/profiles/" + \
6070 + self.settings["target_profile"]+" "+\
6071 + self.settings["chroot_path"]+"/etc/portage/make.profile",\
6072 + "Error creating profile link",env=self.env)
6073 + touch(self.settings["autoresume_path"]+"config_profile_link")
6074 +
6075 + def setup_confdir(self):
6076 + if "AUTORESUME" in self.settings \
6077 + and os.path.exists(self.settings["autoresume_path"]+\
6078 + "setup_confdir"):
6079 + print "Resume point detected, skipping setup_confdir operation..."
6080 + else:
6081 + if "portage_confdir" in self.settings:
6082 + print "Configuring /etc/portage..."
6083 + cmd("rsync -a "+self.settings["portage_confdir"]+"/ "+\
6084 + self.settings["chroot_path"]+"/etc/portage/",\
6085 + "Error copying /etc/portage",env=self.env)
6086 + touch(self.settings["autoresume_path"]+"setup_confdir")
6087 +
6088 + def portage_overlay(self):
6089 + """ We copy the contents of our overlays to /usr/local/portage """
6090 + if "portage_overlay" in self.settings:
6091 + for x in self.settings["portage_overlay"]:
6092 + if os.path.exists(x):
6093 + print "Copying overlay dir " +x
6094 + cmd("mkdir -p "+self.settings["chroot_path"]+\
6095 + self.settings["local_overlay"],\
6096 + "Could not make portage_overlay dir",env=self.env)
6097 + cmd("cp -R "+x+"/* "+self.settings["chroot_path"]+\
6098 + self.settings["local_overlay"],\
6099 + "Could not copy portage_overlay",env=self.env)
6100 +
6101 + def root_overlay(self):
6102 + """ Copy over the root_overlay """
6103 + if self.settings["spec_prefix"]+"/root_overlay" in self.settings:
6104 + for x in self.settings[self.settings["spec_prefix"]+\
6105 + "/root_overlay"]:
6106 + if os.path.exists(x):
6107 + print "Copying root_overlay: "+x
6108 + cmd("rsync -a "+x+"/ "+\
6109 + self.settings["chroot_path"],\
6110 + self.settings["spec_prefix"]+"/root_overlay: "+x+\
6111 + " copy failed.",env=self.env)
6112 +
6113 + def base_dirs(self):
6114 + pass
6115 +
6116 + def bind(self):
6117 + for x in self.mounts:
6118 + #print "bind(); x =", x
6119 + target = normpath(self.settings["chroot_path"] + self.target_mounts[x])
6120 + if not os.path.exists(target):
6121 + os.makedirs(target, 0755)
6122 +
6123 + if not os.path.exists(self.mountmap[x]):
6124 + if self.mountmap[x] not in ["tmpfs", "shmfs"]:
6125 + os.makedirs(self.mountmap[x], 0755)
6126 +
6127 + src=self.mountmap[x]
6128 + #print "bind(); src =", src
6129 + if "SNAPCACHE" in self.settings and x == "portdir":
6130 + self.snapshot_lock_object.read_lock()
6131 + if os.uname()[0] == "FreeBSD":
6132 + if src == "/dev":
6133 + cmd = "mount -t devfs none " + target
6134 + retval=os.system(cmd)
6135 + else:
6136 + cmd = "mount_nullfs " + src + " " + target
6137 + retval=os.system(cmd)
6138 + else:
6139 + if src == "tmpfs":
6140 + if "var_tmpfs_portage" in self.settings:
6141 + cmd = "mount -t tmpfs -o size=" + \
6142 + self.settings["var_tmpfs_portage"] + "G " + \
6143 + src + " " + target
6144 + retval=os.system(cmd)
6145 + elif src == "shmfs":
6146 + cmd = "mount -t tmpfs -o noexec,nosuid,nodev shm " + target
6147 + retval=os.system(cmd)
6148 + else:
6149 + cmd = "mount --bind " + src + " " + target
6150 + #print "bind(); cmd =", cmd
6151 + retval=os.system(cmd)
6152 + if retval!=0:
6153 + self.unbind()
6154 + raise CatalystError,"Couldn't bind mount " + src
6155 +
6156 + def unbind(self):
6157 + ouch=0
6158 + mypath=self.settings["chroot_path"]
6159 + myrevmounts=self.mounts[:]
6160 + myrevmounts.reverse()
6161 + """ Unmount in reverse order for nested bind-mounts """
6162 + for x in myrevmounts:
6163 + target = normpath(mypath + self.target_mounts[x])
6164 + if not os.path.exists(target):
6165 + continue
6166 +
6167 + if not ismount(target):
6168 + continue
6169 +
6170 + retval=os.system("umount " + target)
6171 +
6172 + if retval!=0:
6173 + warn("First attempt to unmount: " + target + " failed.")
6174 + warn("Killing any pids still running in the chroot")
6175 +
6176 + self.kill_chroot_pids()
6177 +
6178 + retval2 = os.system("umount " + target)
6179 + if retval2!=0:
6180 + ouch=1
6181 + warn("Couldn't umount bind mount: " + target)
6182 +
6183 + if "SNAPCACHE" in self.settings and x == "/usr/portage":
6184 + try:
6185 + """
6186 + It's possible the snapshot lock object isn't created yet.
6187 + This is because mount safety check calls unbind before the
6188 + target is fully initialized
6189 + """
6190 + self.snapshot_lock_object.unlock()
6191 + except:
6192 + pass
6193 + if ouch:
6194 + """
6195 + if any bind mounts really failed, then we need to raise
6196 + this to potentially prevent an upcoming bash stage cleanup script
6197 + from wiping our bind mounts.
6198 + """
6199 + raise CatalystError,\
6200 + "Couldn't umount one or more bind-mounts; aborting for safety."
6201 +
6202 + def chroot_setup(self):
6203 + self.makeconf=read_makeconf(self.settings["chroot_path"]+\
6204 + "/etc/portage/make.conf")
6205 + self.override_cbuild()
6206 + self.override_chost()
6207 + self.override_cflags()
6208 + self.override_cxxflags()
6209 + self.override_ldflags()
6210 + if "AUTORESUME" in self.settings \
6211 + and os.path.exists(self.settings["autoresume_path"]+"chroot_setup"):
6212 + print "Resume point detected, skipping chroot_setup operation..."
6213 + else:
6214 + print "Setting up chroot..."
6215 +
6216 + #self.makeconf=read_makeconf(self.settings["chroot_path"]+"/etc/portage/make.conf")
6217 +
6218 + cmd("cp /etc/resolv.conf "+self.settings["chroot_path"]+"/etc",\
6219 + "Could not copy resolv.conf into place.",env=self.env)
6220 +
6221 + """ Copy over the envscript, if applicable """
6222 + if "ENVSCRIPT" in self.settings:
6223 + if not os.path.exists(self.settings["ENVSCRIPT"]):
6224 + raise CatalystError,\
6225 + "Can't find envscript "+self.settings["ENVSCRIPT"]
6226 +
6227 + print "\nWarning!!!!"
6228 + print "\tOverriding certain env variables may cause catastrophic failure."
6229 + print "\tIf your build fails look here first as the possible problem."
6230 + print "\tCatalyst assumes you know what you are doing when setting"
6231 + print "\t\tthese variables."
6232 + print "\tCatalyst Maintainers use VERY minimal envscripts if used at all"
6233 + print "\tYou have been warned\n"
6234 +
6235 + cmd("cp "+self.settings["ENVSCRIPT"]+" "+\
6236 + self.settings["chroot_path"]+"/tmp/envscript",\
6237 + "Could not copy envscript into place.",env=self.env)
6238 +
6239 + """
6240 + Copy over /etc/hosts from the host in case there are any
6241 + specialties in there
6242 + """
6243 + if os.path.exists(self.settings["chroot_path"]+"/etc/hosts"):
6244 + cmd("mv "+self.settings["chroot_path"]+"/etc/hosts "+\
6245 + self.settings["chroot_path"]+"/etc/hosts.catalyst",\
6246 + "Could not backup /etc/hosts",env=self.env)
6247 + cmd("cp /etc/hosts "+self.settings["chroot_path"]+"/etc/hosts",\
6248 + "Could not copy /etc/hosts",env=self.env)
6249 +
6250 + """ Modify and write out make.conf (for the chroot) """
6251 + cmd("rm -f "+self.settings["chroot_path"]+"/etc/portage/make.conf",\
6252 + "Could not remove "+self.settings["chroot_path"]+\
6253 + "/etc/portage/make.conf",env=self.env)
6254 + myf=open(self.settings["chroot_path"]+"/etc/portage/make.conf","w")
6255 + myf.write("# These settings were set by the catalyst build script that automatically\n# built this stage.\n")
6256 + myf.write("# Please consult /usr/share/portage/config/make.conf.example for a more\n# detailed example.\n")
6257 + if "CFLAGS" in self.settings:
6258 + myf.write('CFLAGS="'+self.settings["CFLAGS"]+'"\n')
6259 + if "CXXFLAGS" in self.settings:
6260 + if self.settings["CXXFLAGS"]!=self.settings["CFLAGS"]:
6261 + myf.write('CXXFLAGS="'+self.settings["CXXFLAGS"]+'"\n')
6262 + else:
6263 + myf.write('CXXFLAGS="${CFLAGS}"\n')
6264 + else:
6265 + myf.write('CXXFLAGS="${CFLAGS}"\n')
6266 +
6267 + if "LDFLAGS" in self.settings:
6268 + myf.write("# LDFLAGS is unsupported. USE AT YOUR OWN RISK!\n")
6269 + myf.write('LDFLAGS="'+self.settings["LDFLAGS"]+'"\n')
6270 + if "CBUILD" in self.settings:
6271 + myf.write("# This should not be changed unless you know exactly what you are doing. You\n# should probably be using a different stage, instead.\n")
6272 + myf.write('CBUILD="'+self.settings["CBUILD"]+'"\n')
6273 +
6274 + myf.write("# WARNING: Changing your CHOST is not something that should be done lightly.\n# Please consult http://www.gentoo.org/doc/en/change-chost.xml before changing.\n")
6275 + myf.write('CHOST="'+self.settings["CHOST"]+'"\n')
6276 +
6277 + """ Figure out what our USE vars are for building """
6278 + myusevars=[]
6279 + if "HOSTUSE" in self.settings:
6280 + myusevars.extend(self.settings["HOSTUSE"])
6281 +
6282 + if "use" in self.settings:
6283 + myusevars.extend(self.settings["use"])
6284 +
6285 + if myusevars:
6286 + myf.write("# These are the USE flags that were used in addition to what is provided by the\n# profile used for building.\n")
6287 + myusevars = sorted(set(myusevars))
6288 + myf.write('USE="'+string.join(myusevars)+'"\n')
6289 + if '-*' in myusevars:
6290 + print "\nWarning!!! "
6291 + print "\tThe use of -* in "+self.settings["spec_prefix"]+\
6292 + "/use will cause portage to ignore"
6293 + print "\tpackage.use in the profile and portage_confdir. You've been warned!"
6294 +
6295 + myf.write('PORTDIR="%s"\n' % self.settings['portdir'])
6296 + myf.write('DISTDIR="%s"\n' % self.settings['distdir'])
6297 + myf.write('PKGDIR="%s"\n' % self.settings['packagedir'])
6298 +
6299 + """ Setup the portage overlay """
6300 + if "portage_overlay" in self.settings:
6301 + myf.write('PORTDIR_OVERLAY="/usr/local/portage"\n')
6302 +
6303 + myf.close()
6304 + cmd("cp "+self.settings["chroot_path"]+"/etc/portage/make.conf "+\
6305 + self.settings["chroot_path"]+"/etc/portage/make.conf.catalyst",\
6306 + "Could not backup /etc/portage/make.conf",env=self.env)
6307 + touch(self.settings["autoresume_path"]+"chroot_setup")
6308 +
6309 + def fsscript(self):
6310 + if "AUTORESUME" in self.settings \
6311 + and os.path.exists(self.settings["autoresume_path"]+"fsscript"):
6312 + print "Resume point detected, skipping fsscript operation..."
6313 + else:
6314 + if "fsscript" in self.settings:
6315 + if os.path.exists(self.settings["controller_file"]):
6316 + cmd("/bin/bash "+self.settings["controller_file"]+\
6317 + " fsscript","fsscript script failed.",env=self.env)
6318 + touch(self.settings["autoresume_path"]+"fsscript")
6319 +
6320 + def rcupdate(self):
6321 + if "AUTORESUME" in self.settings \
6322 + and os.path.exists(self.settings["autoresume_path"]+"rcupdate"):
6323 + print "Resume point detected, skipping rcupdate operation..."
6324 + else:
6325 + if os.path.exists(self.settings["controller_file"]):
6326 + cmd("/bin/bash "+self.settings["controller_file"]+" rc-update",\
6327 + "rc-update script failed.",env=self.env)
6328 + touch(self.settings["autoresume_path"]+"rcupdate")
6329 +
6330 + def clean(self):
6331 + if "AUTORESUME" in self.settings \
6332 + and os.path.exists(self.settings["autoresume_path"]+"clean"):
6333 + print "Resume point detected, skipping clean operation..."
6334 + else:
6335 + for x in self.settings["cleanables"]:
6336 + print "Cleaning chroot: "+x+"... "
6337 + cmd("rm -rf "+self.settings["destpath"]+x,"Couldn't clean "+\
6338 + x,env=self.env)
6339 +
6340 + """ Put /etc/hosts back into place """
6341 + if os.path.exists(self.settings["chroot_path"]+"/etc/hosts.catalyst"):
6342 + cmd("mv -f "+self.settings["chroot_path"]+"/etc/hosts.catalyst "+\
6343 + self.settings["chroot_path"]+"/etc/hosts",\
6344 + "Could not replace /etc/hosts",env=self.env)
6345 +
6346 + """ Remove our overlay """
6347 + if os.path.exists(self.settings["chroot_path"] + self.settings["local_overlay"]):
6348 + cmd("rm -rf " + self.settings["chroot_path"] + self.settings["local_overlay"],
6349 + "Could not remove " + self.settings["local_overlay"], env=self.env)
6350 + cmd("sed -i '/^PORTDIR_OVERLAY/d' "+self.settings["chroot_path"]+\
6351 + "/etc/portage/make.conf",\
6352 + "Could not remove PORTDIR_OVERLAY from make.conf",env=self.env)
6353 +
6354 + """ Clean up old and obsoleted files in /etc """
6355 + if os.path.exists(self.settings["stage_path"]+"/etc"):
6356 + cmd("find "+self.settings["stage_path"]+\
6357 + "/etc -maxdepth 1 -name \"*-\" | xargs rm -f",\
6358 + "Could not remove stray files in /etc",env=self.env)
6359 +
6360 + if os.path.exists(self.settings["controller_file"]):
6361 + cmd("/bin/bash "+self.settings["controller_file"]+" clean",\
6362 + "clean script failed.",env=self.env)
6363 + touch(self.settings["autoresume_path"]+"clean")
6364 +
6365 + def empty(self):
6366 + if "AUTORESUME" in self.settings \
6367 + and os.path.exists(self.settings["autoresume_path"]+"empty"):
6368 + print "Resume point detected, skipping empty operation..."
6369 + else:
6370 + if self.settings["spec_prefix"]+"/empty" in self.settings:
6371 + if type(self.settings[self.settings["spec_prefix"]+\
6372 + "/empty"])==types.StringType:
6373 + self.settings[self.settings["spec_prefix"]+"/empty"]=\
6374 + self.settings[self.settings["spec_prefix"]+\
6375 + "/empty"].split()
6376 + for x in self.settings[self.settings["spec_prefix"]+"/empty"]:
6377 + myemp=self.settings["destpath"]+x
6378 + if not os.path.isdir(myemp) or os.path.islink(myemp):
6379 + print x,"not a directory or does not exist, skipping 'empty' operation."
6380 + continue
6381 + print "Emptying directory",x
6382 + """
6383 + stat the dir, delete the dir, recreate the dir and set
6384 + the proper perms and ownership
6385 + """
6386 + mystat=os.stat(myemp)
6387 + shutil.rmtree(myemp)
6388 + os.makedirs(myemp,0755)
6389 + os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
6390 + os.chmod(myemp,mystat[ST_MODE])
6391 + touch(self.settings["autoresume_path"]+"empty")
6392 +
6393 + def remove(self):
6394 + if "AUTORESUME" in self.settings \
6395 + and os.path.exists(self.settings["autoresume_path"]+"remove"):
6396 + print "Resume point detected, skipping remove operation..."
6397 + else:
6398 + if self.settings["spec_prefix"]+"/rm" in self.settings:
6399 + for x in self.settings[self.settings["spec_prefix"]+"/rm"]:
6400 + """
6401 + We're going to shell out for all these cleaning
6402 + operations, so we get easy glob handling.
6403 + """
6404 + print "livecd: removing "+x
6405 + os.system("rm -rf "+self.settings["chroot_path"]+x)
6406 + try:
6407 + if os.path.exists(self.settings["controller_file"]):
6408 + cmd("/bin/bash "+self.settings["controller_file"]+\
6409 + " clean","Clean failed.",env=self.env)
6410 + touch(self.settings["autoresume_path"]+"remove")
6411 + except:
6412 + self.unbind()
6413 + raise
6414 +
6415 + def preclean(self):
6416 + if "AUTORESUME" in self.settings \
6417 + and os.path.exists(self.settings["autoresume_path"]+"preclean"):
6418 + print "Resume point detected, skipping preclean operation..."
6419 + else:
6420 + try:
6421 + if os.path.exists(self.settings["controller_file"]):
6422 + cmd("/bin/bash "+self.settings["controller_file"]+\
6423 + " preclean","preclean script failed.",env=self.env)
6424 + touch(self.settings["autoresume_path"]+"preclean")
6425 +
6426 + except:
6427 + self.unbind()
6428 + raise CatalystError, "Build failed, could not execute preclean"
6429 +
6430 + def capture(self):
6431 + if "AUTORESUME" in self.settings \
6432 + and os.path.exists(self.settings["autoresume_path"]+"capture"):
6433 + print "Resume point detected, skipping capture operation..."
6434 + else:
6435 + """ Capture target in a tarball """
6436 + mypath=self.settings["target_path"].split("/")
6437 + """ Remove filename from path """
6438 + mypath=string.join(mypath[:-1],"/")
6439 +
6440 + """ Now make sure path exists """
6441 + if not os.path.exists(mypath):
6442 + os.makedirs(mypath)
6443 +
6444 + print "Creating stage tarball..."
6445 +
6446 + cmd("tar -I lbzip2 -cpf "+self.settings["target_path"]+" -C "+\
6447 + self.settings["stage_path"]+" .",\
6448 + "Couldn't create stage tarball",env=self.env)
6449 +
6450 + self.gen_contents_file(self.settings["target_path"])
6451 + self.gen_digest_file(self.settings["target_path"])
6452 +
6453 + touch(self.settings["autoresume_path"]+"capture")
6454 +
6455 + def run_local(self):
6456 + if "AUTORESUME" in self.settings \
6457 + and os.path.exists(self.settings["autoresume_path"]+"run_local"):
6458 + print "Resume point detected, skipping run_local operation..."
6459 + else:
6460 + try:
6461 + if os.path.exists(self.settings["controller_file"]):
6462 + cmd("/bin/bash "+self.settings["controller_file"]+" run",\
6463 + "run script failed.",env=self.env)
6464 + touch(self.settings["autoresume_path"]+"run_local")
6465 +
6466 + except CatalystError:
6467 + self.unbind()
6468 + raise CatalystError,"Stage build aborting due to error."
6469 +
6470 + def setup_environment(self):
6471 + """
6472 + Modify the current environment. This is an ugly hack that should be
6473 + fixed. We need this to use the os.system() call since we can't
6474 + specify our own environ
6475 + """
6476 + for x in self.settings.keys():
6477 + """ Sanitize var names by doing "s|/-.|_|g" """
6478 + varname="clst_"+string.replace(x,"/","_")
6479 + varname=string.replace(varname,"-","_")
6480 + varname=string.replace(varname,".","_")
6481 + if type(self.settings[x])==types.StringType:
6482 + """ Prefix to prevent namespace clashes """
6483 + #os.environ[varname]=self.settings[x]
6484 + self.env[varname]=self.settings[x]
6485 + elif type(self.settings[x])==types.ListType:
6486 + #os.environ[varname]=string.join(self.settings[x])
6487 + self.env[varname]=string.join(self.settings[x])
6488 + elif type(self.settings[x])==types.BooleanType:
6489 + if self.settings[x]:
6490 + self.env[varname]="true"
6491 + else:
6492 + self.env[varname]="false"
6493 + if "makeopts" in self.settings:
6494 + self.env["MAKEOPTS"]=self.settings["makeopts"]
6495 +
6496 + def run(self):
6497 + self.chroot_lock.write_lock()
6498 +
6499 + """ Kill any pids in the chroot "" """
6500 + self.kill_chroot_pids()
6501 +
6502 + """ Check for mounts right away and abort if we cannot unmount them """
6503 + self.mount_safety_check()
6504 +
6505 + if "CLEAR_AUTORESUME" in self.settings:
6506 + self.clear_autoresume()
6507 +
6508 + if "PURGETMPONLY" in self.settings:
6509 + self.purge()
6510 + return
6511 +
6512 + if "PURGEONLY" in self.settings:
6513 + self.purge()
6514 + return
6515 +
6516 + if "PURGE" in self.settings:
6517 + self.purge()
6518 +
6519 + for x in self.settings["action_sequence"]:
6520 + print "--- Running action sequence: "+x
6521 + sys.stdout.flush()
6522 + try:
6523 + apply(getattr(self,x))
6524 + except:
6525 + self.mount_safety_check()
6526 + raise
6527 +
6528 + self.chroot_lock.unlock()
6529 +
6530 + def unmerge(self):
6531 + if "AUTORESUME" in self.settings \
6532 + and os.path.exists(self.settings["autoresume_path"]+"unmerge"):
6533 + print "Resume point detected, skipping unmerge operation..."
6534 + else:
6535 + if self.settings["spec_prefix"]+"/unmerge" in self.settings:
6536 + if type(self.settings[self.settings["spec_prefix"]+\
6537 + "/unmerge"])==types.StringType:
6538 + self.settings[self.settings["spec_prefix"]+"/unmerge"]=\
6539 + [self.settings[self.settings["spec_prefix"]+"/unmerge"]]
6540 + myunmerge=\
6541 + self.settings[self.settings["spec_prefix"]+"/unmerge"][:]
6542 +
6543 + for x in range(0,len(myunmerge)):
6544 + """
6545 + Surround args with quotes for passing to bash, allows
6546 + things like "<" to remain intact
6547 + """
6548 + myunmerge[x]="'"+myunmerge[x]+"'"
6549 + myunmerge=string.join(myunmerge)
6550 +
6551 + """ Before cleaning, unmerge stuff """
6552 + try:
6553 + cmd("/bin/bash "+self.settings["controller_file"]+\
6554 + " unmerge "+ myunmerge,"Unmerge script failed.",\
6555 + env=self.env)
6556 + print "unmerge shell script"
6557 + except CatalystError:
6558 + self.unbind()
6559 + raise
6560 + touch(self.settings["autoresume_path"]+"unmerge")
6561 +
6562 + def target_setup(self):
6563 + if "AUTORESUME" in self.settings \
6564 + and os.path.exists(self.settings["autoresume_path"]+"target_setup"):
6565 + print "Resume point detected, skipping target_setup operation..."
6566 + else:
6567 + print "Setting up filesystems per filesystem type"
6568 + cmd("/bin/bash "+self.settings["controller_file"]+\
6569 + " target_image_setup "+ self.settings["target_path"],\
6570 + "target_image_setup script failed.",env=self.env)
6571 + touch(self.settings["autoresume_path"]+"target_setup")
6572 +
6573 + def setup_overlay(self):
6574 + if "AUTORESUME" in self.settings \
6575 + and os.path.exists(self.settings["autoresume_path"]+"setup_overlay"):
6576 + print "Resume point detected, skipping setup_overlay operation..."
6577 + else:
6578 + if self.settings["spec_prefix"]+"/overlay" in self.settings:
6579 + for x in self.settings[self.settings["spec_prefix"]+"/overlay"]:
6580 + if os.path.exists(x):
6581 + cmd("rsync -a "+x+"/ "+\
6582 + self.settings["target_path"],\
6583 + self.settings["spec_prefix"]+"overlay: "+x+\
6584 + " copy failed.",env=self.env)
6585 + touch(self.settings["autoresume_path"]+"setup_overlay")
6586 +
6587 + def create_iso(self):
6588 + if "AUTORESUME" in self.settings \
6589 + and os.path.exists(self.settings["autoresume_path"]+"create_iso"):
6590 + print "Resume point detected, skipping create_iso operation..."
6591 + else:
6592 + """ Create the ISO """
6593 + if "iso" in self.settings:
6594 + cmd("/bin/bash "+self.settings["controller_file"]+" iso "+\
6595 + self.settings["iso"],"ISO creation script failed.",\
6596 + env=self.env)
6597 + self.gen_contents_file(self.settings["iso"])
6598 + self.gen_digest_file(self.settings["iso"])
6599 + touch(self.settings["autoresume_path"]+"create_iso")
6600 + else:
6601 + print "WARNING: livecd/iso was not defined."
6602 + print "An ISO Image will not be created."
6603 +
6604 + def build_packages(self):
6605 + if "AUTORESUME" in self.settings \
6606 + and os.path.exists(self.settings["autoresume_path"]+\
6607 + "build_packages"):
6608 + print "Resume point detected, skipping build_packages operation..."
6609 + else:
6610 + if self.settings["spec_prefix"]+"/packages" in self.settings:
6611 + if "AUTORESUME" in self.settings \
6612 + and os.path.exists(self.settings["autoresume_path"]+\
6613 + "build_packages"):
6614 + print "Resume point detected, skipping build_packages operation..."
6615 + else:
6616 + mypack=\
6617 + list_bashify(self.settings[self.settings["spec_prefix"]\
6618 + +"/packages"])
6619 + try:
6620 + cmd("/bin/bash "+self.settings["controller_file"]+\
6621 + " build_packages "+mypack,\
6622 + "Error in attempt to build packages",env=self.env)
6623 + touch(self.settings["autoresume_path"]+"build_packages")
6624 + except CatalystError:
6625 + self.unbind()
6626 + raise CatalystError,self.settings["spec_prefix"]+\
6627 + "build aborting due to error."
6628 +
6629 + def build_kernel(self):
6630 + "Build all configured kernels"
6631 + if "AUTORESUME" in self.settings \
6632 + and os.path.exists(self.settings["autoresume_path"]+"build_kernel"):
6633 + print "Resume point detected, skipping build_kernel operation..."
6634 + else:
6635 + if "boot/kernel" in self.settings:
6636 + try:
6637 + mynames=self.settings["boot/kernel"]
6638 + if type(mynames)==types.StringType:
6639 + mynames=[mynames]
6640 + """
6641 + Execute the script that sets up the kernel build environment
6642 + """
6643 + cmd("/bin/bash "+self.settings["controller_file"]+\
6644 + " pre-kmerge ","Runscript pre-kmerge failed",\
6645 + env=self.env)
6646 + for kname in mynames:
6647 + self._build_kernel(kname=kname)
6648 + touch(self.settings["autoresume_path"]+"build_kernel")
6649 + except CatalystError:
6650 + self.unbind()
6651 + raise CatalystError,\
6652 + "build aborting due to kernel build error."
6653 +
6654 + def _build_kernel(self, kname):
6655 + "Build a single configured kernel by name"
6656 + if "AUTORESUME" in self.settings \
6657 + and os.path.exists(self.settings["autoresume_path"]\
6658 + +"build_kernel_"+kname):
6659 + print "Resume point detected, skipping build_kernel for "+kname+" operation..."
6660 + return
6661 + self._copy_kernel_config(kname=kname)
6662 +
6663 + """
6664 + If we need to pass special options to the bootloader
6665 + for this kernel put them into the environment
6666 + """
6667 + if "boot/kernel/"+kname+"/kernelopts" in self.settings:
6668 + myopts=self.settings["boot/kernel/"+kname+\
6669 + "/kernelopts"]
6670 +
6671 + if type(myopts) != types.StringType:
6672 + myopts = string.join(myopts)
6673 + self.env[kname+"_kernelopts"]=myopts
6674 +
6675 + else:
6676 + self.env[kname+"_kernelopts"]=""
6677 +
6678 + if "boot/kernel/"+kname+"/extraversion" not in self.settings:
6679 + self.settings["boot/kernel/"+kname+\
6680 + "/extraversion"]=""
6681 +
6682 + self.env["clst_kextraversion"]=\
6683 + self.settings["boot/kernel/"+kname+\
6684 + "/extraversion"]
6685 +
6686 + self._copy_initramfs_overlay(kname=kname)
6687 +
6688 + """ Execute the script that builds the kernel """
6689 + cmd("/bin/bash "+self.settings["controller_file"]+\
6690 + " kernel "+kname,\
6691 + "Runscript kernel build failed",env=self.env)
6692 +
6693 + if "boot/kernel/"+kname+"/initramfs_overlay" in self.settings:
6694 + if os.path.exists(self.settings["chroot_path"]+\
6695 + "/tmp/initramfs_overlay/"):
6696 + print "Cleaning up temporary overlay dir"
6697 + cmd("rm -R "+self.settings["chroot_path"]+\
6698 + "/tmp/initramfs_overlay/",env=self.env)
6699 +
6700 + touch(self.settings["autoresume_path"]+\
6701 + "build_kernel_"+kname)
6702 +
6703 + """
6704 + Execute the script that cleans up the kernel build
6705 + environment
6706 + """
6707 + cmd("/bin/bash "+self.settings["controller_file"]+\
6708 + " post-kmerge ",
6709 + "Runscript post-kmerge failed",env=self.env)
6710 +
6711 + def _copy_kernel_config(self, kname):
6712 + if "boot/kernel/"+kname+"/config" in self.settings:
6713 + if not os.path.exists(self.settings["boot/kernel/"+kname+"/config"]):
6714 + self.unbind()
6715 + raise CatalystError,\
6716 + "Can't find kernel config: "+\
6717 + self.settings["boot/kernel/"+kname+\
6718 + "/config"]
6719 +
6720 + try:
6721 + cmd("cp "+self.settings["boot/kernel/"+kname+\
6722 + "/config"]+" "+\
6723 + self.settings["chroot_path"]+"/var/tmp/"+\
6724 + kname+".config",\
6725 + "Couldn't copy kernel config: "+\
6726 + self.settings["boot/kernel/"+kname+\
6727 + "/config"],env=self.env)
6728 +
6729 + except CatalystError:
6730 + self.unbind()
6731 +
6732 + def _copy_initramfs_overlay(self, kname):
6733 + if "boot/kernel/"+kname+"/initramfs_overlay" in self.settings:
6734 + if os.path.exists(self.settings["boot/kernel/"+\
6735 + kname+"/initramfs_overlay"]):
6736 + print "Copying initramfs_overlay dir "+\
6737 + self.settings["boot/kernel/"+kname+\
6738 + "/initramfs_overlay"]
6739 +
6740 + cmd("mkdir -p "+\
6741 + self.settings["chroot_path"]+\
6742 + "/tmp/initramfs_overlay/"+\
6743 + self.settings["boot/kernel/"+kname+\
6744 + "/initramfs_overlay"],env=self.env)
6745 +
6746 + cmd("cp -R "+self.settings["boot/kernel/"+\
6747 + kname+"/initramfs_overlay"]+"/* "+\
6748 + self.settings["chroot_path"]+\
6749 + "/tmp/initramfs_overlay/"+\
6750 + self.settings["boot/kernel/"+kname+\
6751 + "/initramfs_overlay"],env=self.env)
6752 +
6753 + def bootloader(self):
6754 + if "AUTORESUME" in self.settings \
6755 + and os.path.exists(self.settings["autoresume_path"]+"bootloader"):
6756 + print "Resume point detected, skipping bootloader operation..."
6757 + else:
6758 + try:
6759 + cmd("/bin/bash "+self.settings["controller_file"]+\
6760 + " bootloader " + self.settings["target_path"],\
6761 + "Bootloader script failed.",env=self.env)
6762 + touch(self.settings["autoresume_path"]+"bootloader")
6763 + except CatalystError:
6764 + self.unbind()
6765 + raise CatalystError,"Script aborting due to error."
6766 +
6767 + def livecd_update(self):
6768 + if "AUTORESUME" in self.settings \
6769 + and os.path.exists(self.settings["autoresume_path"]+\
6770 + "livecd_update"):
6771 + print "Resume point detected, skipping build_packages operation..."
6772 + else:
6773 + try:
6774 + cmd("/bin/bash "+self.settings["controller_file"]+\
6775 + " livecd-update","livecd-update failed.",env=self.env)
6776 + touch(self.settings["autoresume_path"]+"livecd_update")
6777 +
6778 + except CatalystError:
6779 + self.unbind()
6780 + raise CatalystError,"build aborting due to livecd_update error."
6781 +
6782 + def clear_chroot(self):
6783 + myemp=self.settings["chroot_path"]
6784 + if os.path.isdir(myemp):
6785 + print "Emptying directory",myemp
6786 + """
6787 + stat the dir, delete the dir, recreate the dir and set
6788 + the proper perms and ownership
6789 + """
6790 + mystat=os.stat(myemp)
6791 + #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
6792 + """ There's no easy way to change flags recursively in python """
6793 + if os.uname()[0] == "FreeBSD":
6794 + os.system("chflags -R noschg "+myemp)
6795 + shutil.rmtree(myemp)
6796 + os.makedirs(myemp,0755)
6797 + os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
6798 + os.chmod(myemp,mystat[ST_MODE])
6799 +
6800 + def clear_packages(self):
6801 + if "PKGCACHE" in self.settings:
6802 + print "purging the pkgcache ..."
6803 +
6804 + myemp=self.settings["pkgcache_path"]
6805 + if os.path.isdir(myemp):
6806 + print "Emptying directory",myemp
6807 + """
6808 + stat the dir, delete the dir, recreate the dir and set
6809 + the proper perms and ownership
6810 + """
6811 + mystat=os.stat(myemp)
6812 + #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
6813 + shutil.rmtree(myemp)
6814 + os.makedirs(myemp,0755)
6815 + os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
6816 + os.chmod(myemp,mystat[ST_MODE])
6817 +
6818 + def clear_kerncache(self):
6819 + if "KERNCACHE" in self.settings:
6820 + print "purging the kerncache ..."
6821 +
6822 + myemp=self.settings["kerncache_path"]
6823 + if os.path.isdir(myemp):
6824 + print "Emptying directory",myemp
6825 + """
6826 + stat the dir, delete the dir, recreate the dir and set
6827 + the proper perms and ownership
6828 + """
6829 + mystat=os.stat(myemp)
6830 + #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
6831 + shutil.rmtree(myemp)
6832 + os.makedirs(myemp,0755)
6833 + os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
6834 + os.chmod(myemp,mystat[ST_MODE])
6835 +
6836 + def clear_autoresume(self):
6837 + """ Clean resume points since they are no longer needed """
6838 + if "AUTORESUME" in self.settings:
6839 + print "Removing AutoResume Points: ..."
6840 + myemp=self.settings["autoresume_path"]
6841 + if os.path.isdir(myemp):
6842 + if "AUTORESUME" in self.settings:
6843 + print "Emptying directory",myemp
6844 + """
6845 + stat the dir, delete the dir, recreate the dir and set
6846 + the proper perms and ownership
6847 + """
6848 + mystat=os.stat(myemp)
6849 + if os.uname()[0] == "FreeBSD":
6850 + cmd("chflags -R noschg "+myemp,\
6851 + "Could not remove immutable flag for file "\
6852 + +myemp)
6853 + #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env-self.env)
6854 + shutil.rmtree(myemp)
6855 + os.makedirs(myemp,0755)
6856 + os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
6857 + os.chmod(myemp,mystat[ST_MODE])
6858 +
6859 + def gen_contents_file(self,file):
6860 + if os.path.exists(file+".CONTENTS"):
6861 + os.remove(file+".CONTENTS")
6862 + if "contents" in self.settings:
6863 + if os.path.exists(file):
6864 + myf=open(file+".CONTENTS","w")
6865 + keys={}
6866 + for i in self.settings["contents"].split():
6867 + keys[i]=1
6868 + array=keys.keys()
6869 + array.sort()
6870 + for j in array:
6871 + contents=generate_contents(file,contents_function=j,\
6872 + verbose="VERBOSE" in self.settings)
6873 + if contents:
6874 + myf.write(contents)
6875 + myf.close()
6876 +
6877 + def gen_digest_file(self,file):
6878 + if os.path.exists(file+".DIGESTS"):
6879 + os.remove(file+".DIGESTS")
6880 + if "digests" in self.settings:
6881 + if os.path.exists(file):
6882 + myf=open(file+".DIGESTS","w")
6883 + keys={}
6884 + for i in self.settings["digests"].split():
6885 + keys[i]=1
6886 + array=keys.keys()
6887 + array.sort()
6888 + for f in [file, file+'.CONTENTS']:
6889 + if os.path.exists(f):
6890 + if "all" in array:
6891 + for k in hash_map.keys():
6892 + hash=generate_hash(f,hash_function=k,verbose=\
6893 + "VERBOSE" in self.settings)
6894 + myf.write(hash)
6895 + else:
6896 + for j in array:
6897 + hash=generate_hash(f,hash_function=j,verbose=\
6898 + "VERBOSE" in self.settings)
6899 + myf.write(hash)
6900 + myf.close()
6901 +
6902 + def purge(self):
6903 + countdown(10,"Purging Caches ...")
6904 + if any(k in self.settings for k in ("PURGE","PURGEONLY","PURGETMPONLY")):
6905 + print "clearing autoresume ..."
6906 + self.clear_autoresume()
6907 +
6908 + print "clearing chroot ..."
6909 + self.clear_chroot()
6910 +
6911 + if "PURGETMPONLY" not in self.settings:
6912 + print "clearing package cache ..."
6913 + self.clear_packages()
6914 +
6915 + print "clearing kerncache ..."
6916 + self.clear_kerncache()
6917 +
6918 +# vim: ts=4 sw=4 sta et sts=4 ai
6919 diff --git a/catalyst/modules/generic_target.py b/catalyst/modules/generic_target.py
6920 new file mode 100644
6921 index 0000000..fe96bd7
6922 --- /dev/null
6923 +++ b/catalyst/modules/generic_target.py
6924 @@ -0,0 +1,11 @@
6925 +from catalyst_support import *
6926 +
6927 +class generic_target:
6928 + """
6929 + The toplevel class for generic_stage_target. This is about as generic as we get.
6930 + """
6931 + def __init__(self,myspec,addlargs):
6932 + addl_arg_parse(myspec,addlargs,self.required_values,self.valid_values)
6933 + self.settings=myspec
6934 + self.env={}
6935 + self.env["PATH"]="/bin:/sbin:/usr/bin:/usr/sbin"
6936 diff --git a/catalyst/modules/grp_target.py b/catalyst/modules/grp_target.py
6937 new file mode 100644
6938 index 0000000..6941522
6939 --- /dev/null
6940 +++ b/catalyst/modules/grp_target.py
6941 @@ -0,0 +1,118 @@
6942 +"""
6943 +Gentoo Reference Platform (GRP) target
6944 +"""
6945 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
6946 +
6947 +import os,types,glob
6948 +from catalyst_support import *
6949 +from generic_stage_target import *
6950 +
6951 +class grp_target(generic_stage_target):
6952 + """
6953 + The builder class for GRP (Gentoo Reference Platform) builds.
6954 + """
6955 + def __init__(self,spec,addlargs):
6956 + self.required_values=["version_stamp","target","subarch",\
6957 + "rel_type","profile","snapshot","source_subpath"]
6958 +
6959 + self.valid_values=self.required_values[:]
6960 + self.valid_values.extend(["grp/use"])
6961 + if "grp" not in addlargs:
6962 + raise CatalystError,"Required value \"grp\" not specified in spec."
6963 +
6964 + self.required_values.extend(["grp"])
6965 + if type(addlargs["grp"])==types.StringType:
6966 + addlargs["grp"]=[addlargs["grp"]]
6967 +
6968 + if "grp/use" in addlargs:
6969 + if type(addlargs["grp/use"])==types.StringType:
6970 + addlargs["grp/use"]=[addlargs["grp/use"]]
6971 +
6972 + for x in addlargs["grp"]:
6973 + self.required_values.append("grp/"+x+"/packages")
6974 + self.required_values.append("grp/"+x+"/type")
6975 +
6976 + generic_stage_target.__init__(self,spec,addlargs)
6977 +
6978 + def set_target_path(self):
6979 + self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"]+"/")
6980 + if "AUTORESUME" in self.settings \
6981 + and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
6982 + print "Resume point detected, skipping target path setup operation..."
6983 + else:
6984 + # first clean up any existing target stuff
6985 + #if os.path.isdir(self.settings["target_path"]):
6986 + #cmd("rm -rf "+self.settings["target_path"],
6987 + #"Could not remove existing directory: "+self.settings["target_path"],env=self.env)
6988 + if not os.path.exists(self.settings["target_path"]):
6989 + os.makedirs(self.settings["target_path"])
6990 +
6991 + touch(self.settings["autoresume_path"]+"setup_target_path")
6992 +
6993 + def run_local(self):
6994 + for pkgset in self.settings["grp"]:
6995 + # example call: "grp.sh run pkgset cd1 xmms vim sys-apps/gleep"
6996 + mypackages=list_bashify(self.settings["grp/"+pkgset+"/packages"])
6997 + try:
6998 + cmd("/bin/bash "+self.settings["controller_file"]+" run "+self.settings["grp/"+pkgset+"/type"]\
6999 + +" "+pkgset+" "+mypackages,env=self.env)
7000 +
7001 + except CatalystError:
7002 + self.unbind()
7003 + raise CatalystError,"GRP build aborting due to error."
7004 +
7005 + def set_use(self):
7006 + generic_stage_target.set_use(self)
7007 + if "BINDIST" in self.settings:
7008 + if "use" in self.settings:
7009 + self.settings["use"].append("bindist")
7010 + else:
7011 + self.settings["use"]=["bindist"]
7012 +
7013 + def set_mounts(self):
7014 + self.mounts.append("/tmp/grp")
7015 + self.mountmap["/tmp/grp"]=self.settings["target_path"]
7016 +
7017 + def generate_digests(self):
7018 + for pkgset in self.settings["grp"]:
7019 + if self.settings["grp/"+pkgset+"/type"] == "pkgset":
7020 + destdir=normpath(self.settings["target_path"]+"/"+pkgset+"/All")
7021 + print "Digesting files in the pkgset....."
7022 + digests=glob.glob(destdir+'/*.DIGESTS')
7023 + for i in digests:
7024 + if os.path.exists(i):
7025 + os.remove(i)
7026 +
7027 + files=os.listdir(destdir)
7028 + #ignore files starting with '.' using list comprehension
7029 + files=[filename for filename in files if filename[0] != '.']
7030 + for i in files:
7031 + if os.path.isfile(normpath(destdir+"/"+i)):
7032 + self.gen_contents_file(normpath(destdir+"/"+i))
7033 + self.gen_digest_file(normpath(destdir+"/"+i))
7034 + else:
7035 + destdir=normpath(self.settings["target_path"]+"/"+pkgset)
7036 + print "Digesting files in the srcset....."
7037 +
7038 + digests=glob.glob(destdir+'/*.DIGESTS')
7039 + for i in digests:
7040 + if os.path.exists(i):
7041 + os.remove(i)
7042 +
7043 + files=os.listdir(destdir)
7044 + #ignore files starting with '.' using list comprehension
7045 + files=[filename for filename in files if filename[0] != '.']
7046 + for i in files:
7047 + if os.path.isfile(normpath(destdir+"/"+i)):
7048 + #self.gen_contents_file(normpath(destdir+"/"+i))
7049 + self.gen_digest_file(normpath(destdir+"/"+i))
7050 +
7051 + def set_action_sequence(self):
7052 + self.settings["action_sequence"]=["unpack","unpack_snapshot",\
7053 + "config_profile_link","setup_confdir","portage_overlay","bind","chroot_setup",\
7054 + "setup_environment","run_local","unbind",\
7055 + "generate_digests","clear_autoresume"]
7056 +
7057 +def register(foo):
7058 + foo.update({"grp":grp_target})
7059 + return foo
7060 diff --git a/catalyst/modules/livecd_stage1_target.py b/catalyst/modules/livecd_stage1_target.py
7061 new file mode 100644
7062 index 0000000..59de9bb
7063 --- /dev/null
7064 +++ b/catalyst/modules/livecd_stage1_target.py
7065 @@ -0,0 +1,75 @@
7066 +"""
7067 +LiveCD stage1 target
7068 +"""
7069 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7070 +
7071 +from catalyst_support import *
7072 +from generic_stage_target import *
7073 +
7074 +class livecd_stage1_target(generic_stage_target):
7075 + """
7076 + Builder class for LiveCD stage1.
7077 + """
7078 + def __init__(self,spec,addlargs):
7079 + self.required_values=["livecd/packages"]
7080 + self.valid_values=self.required_values[:]
7081 +
7082 + self.valid_values.extend(["livecd/use"])
7083 + generic_stage_target.__init__(self,spec,addlargs)
7084 +
7085 + def set_action_sequence(self):
7086 + self.settings["action_sequence"]=["unpack","unpack_snapshot",\
7087 + "config_profile_link","setup_confdir","portage_overlay",\
7088 + "bind","chroot_setup","setup_environment","build_packages",\
7089 + "unbind", "clean","clear_autoresume"]
7090 +
7091 + def set_target_path(self):
7092 + self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"])
7093 + if "AUTORESUME" in self.settings \
7094 + and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
7095 + print "Resume point detected, skipping target path setup operation..."
7096 + else:
7097 + # first clean up any existing target stuff
7098 + if os.path.exists(self.settings["target_path"]):
7099 + cmd("rm -rf "+self.settings["target_path"],\
7100 + "Could not remove existing directory: "+self.settings["target_path"],env=self.env)
7101 + touch(self.settings["autoresume_path"]+"setup_target_path")
7102 +
7103 + if not os.path.exists(self.settings["target_path"]):
7104 + os.makedirs(self.settings["target_path"])
7105 +
7106 + def set_target_path(self):
7107 + pass
7108 +
7109 + def set_spec_prefix(self):
7110 + self.settings["spec_prefix"]="livecd"
7111 +
7112 + def set_use(self):
7113 + generic_stage_target.set_use(self)
7114 + if "use" in self.settings:
7115 + self.settings["use"].append("livecd")
7116 + if "BINDIST" in self.settings:
7117 + self.settings["use"].append("bindist")
7118 + else:
7119 + self.settings["use"]=["livecd"]
7120 + if "BINDIST" in self.settings:
7121 + self.settings["use"].append("bindist")
7122 +
7123 + def set_packages(self):
7124 + generic_stage_target.set_packages(self)
7125 + if self.settings["spec_prefix"]+"/packages" in self.settings:
7126 + if type(self.settings[self.settings["spec_prefix"]+"/packages"]) == types.StringType:
7127 + self.settings[self.settings["spec_prefix"]+"/packages"] = \
7128 + self.settings[self.settings["spec_prefix"]+"/packages"].split()
7129 + self.settings[self.settings["spec_prefix"]+"/packages"].append("app-misc/livecd-tools")
7130 +
7131 + def set_pkgcache_path(self):
7132 + if "pkgcache_path" in self.settings:
7133 + if type(self.settings["pkgcache_path"]) != types.StringType:
7134 + self.settings["pkgcache_path"]=normpath(string.join(self.settings["pkgcache_path"]))
7135 + else:
7136 + generic_stage_target.set_pkgcache_path(self)
7137 +
7138 +def register(foo):
7139 + foo.update({"livecd-stage1":livecd_stage1_target})
7140 + return foo
7141 diff --git a/catalyst/modules/livecd_stage2_target.py b/catalyst/modules/livecd_stage2_target.py
7142 new file mode 100644
7143 index 0000000..c74c16d
7144 --- /dev/null
7145 +++ b/catalyst/modules/livecd_stage2_target.py
7146 @@ -0,0 +1,148 @@
7147 +"""
7148 +LiveCD stage2 target, builds upon previous LiveCD stage1 tarball
7149 +"""
7150 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7151 +
7152 +import os,string,types,stat,shutil
7153 +from catalyst_support import *
7154 +from generic_stage_target import *
7155 +
7156 +class livecd_stage2_target(generic_stage_target):
7157 + """
7158 + Builder class for a LiveCD stage2 build.
7159 + """
7160 + def __init__(self,spec,addlargs):
7161 + self.required_values=["boot/kernel"]
7162 +
7163 + self.valid_values=[]
7164 +
7165 + self.valid_values.extend(self.required_values)
7166 + self.valid_values.extend(["livecd/cdtar","livecd/empty","livecd/rm",\
7167 + "livecd/unmerge","livecd/iso","livecd/gk_mainargs","livecd/type",\
7168 + "livecd/readme","livecd/motd","livecd/overlay",\
7169 + "livecd/modblacklist","livecd/splash_theme","livecd/rcadd",\
7170 + "livecd/rcdel","livecd/fsscript","livecd/xinitrc",\
7171 + "livecd/root_overlay","livecd/users","portage_overlay",\
7172 + "livecd/fstype","livecd/fsops","livecd/linuxrc","livecd/bootargs",\
7173 + "gamecd/conf","livecd/xdm","livecd/xsession","livecd/volid"])
7174 +
7175 + generic_stage_target.__init__(self,spec,addlargs)
7176 + if "livecd/type" not in self.settings:
7177 + self.settings["livecd/type"] = "generic-livecd"
7178 +
7179 + file_locate(self.settings, ["cdtar","controller_file"])
7180 +
7181 + def set_source_path(self):
7182 + self.settings["source_path"] = normpath(self.settings["storedir"] +
7183 + "/builds/" + self.settings["source_subpath"].rstrip("/") +
7184 + ".tar.bz2")
7185 + if os.path.isfile(self.settings["source_path"]):
7186 + self.settings["source_path_hash"]=generate_hash(self.settings["source_path"])
7187 + else:
7188 + self.settings["source_path"]=normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/")
7189 + if not os.path.exists(self.settings["source_path"]):
7190 + raise CatalystError,"Source Path: "+self.settings["source_path"]+" does not exist."
7191 +
7192 + def set_spec_prefix(self):
7193 + self.settings["spec_prefix"]="livecd"
7194 +
7195 + def set_target_path(self):
7196 + self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"]+"/")
7197 + if "AUTORESUME" in self.settings \
7198 + and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
7199 + print "Resume point detected, skipping target path setup operation..."
7200 + else:
7201 + # first clean up any existing target stuff
7202 + if os.path.isdir(self.settings["target_path"]):
7203 + cmd("rm -rf "+self.settings["target_path"],
7204 + "Could not remove existing directory: "+self.settings["target_path"],env=self.env)
7205 + touch(self.settings["autoresume_path"]+"setup_target_path")
7206 + if not os.path.exists(self.settings["target_path"]):
7207 + os.makedirs(self.settings["target_path"])
7208 +
7209 + def run_local(self):
7210 + # what modules do we want to blacklist?
7211 + if "livecd/modblacklist" in self.settings:
7212 + try:
7213 + myf=open(self.settings["chroot_path"]+"/etc/modprobe.d/blacklist.conf","a")
7214 + except:
7215 + self.unbind()
7216 + raise CatalystError,"Couldn't open "+self.settings["chroot_path"]+"/etc/modprobe.d/blacklist.conf."
7217 +
7218 + myf.write("\n#Added by Catalyst:")
7219 + # workaround until config.py is using configparser
7220 + if isinstance(self.settings["livecd/modblacklist"], str):
7221 + self.settings["livecd/modblacklist"] = self.settings["livecd/modblacklist"].split()
7222 + for x in self.settings["livecd/modblacklist"]:
7223 + myf.write("\nblacklist "+x)
7224 + myf.close()
7225 +
7226 + def unpack(self):
7227 + unpack=True
7228 + display_msg=None
7229 +
7230 + clst_unpack_hash=read_from_clst(self.settings["autoresume_path"]+"unpack")
7231 +
7232 + if os.path.isdir(self.settings["source_path"]):
7233 + unpack_cmd="rsync -a --delete "+self.settings["source_path"]+" "+self.settings["chroot_path"]
7234 + display_msg="\nStarting rsync from "+self.settings["source_path"]+"\nto "+\
7235 + self.settings["chroot_path"]+" (This may take some time) ...\n"
7236 + error_msg="Rsync of "+self.settings["source_path"]+" to "+self.settings["chroot_path"]+" failed."
7237 + invalid_snapshot=False
7238 +
7239 + if "AUTORESUME" in self.settings:
7240 + if os.path.isdir(self.settings["source_path"]) and \
7241 + os.path.exists(self.settings["autoresume_path"]+"unpack"):
7242 + print "Resume point detected, skipping unpack operation..."
7243 + unpack=False
7244 + elif "source_path_hash" in self.settings:
7245 + if self.settings["source_path_hash"] != clst_unpack_hash:
7246 + invalid_snapshot=True
7247 +
7248 + if unpack:
7249 + self.mount_safety_check()
7250 + if invalid_snapshot:
7251 + print "No Valid Resume point detected, cleaning up ..."
7252 + #os.remove(self.settings["autoresume_path"]+"dir_setup")
7253 + self.clear_autoresume()
7254 + self.clear_chroot()
7255 + #self.dir_setup()
7256 +
7257 + if not os.path.exists(self.settings["chroot_path"]):
7258 + os.makedirs(self.settings["chroot_path"])
7259 +
7260 + if not os.path.exists(self.settings["chroot_path"]+"/tmp"):
7261 + os.makedirs(self.settings["chroot_path"]+"/tmp",1777)
7262 +
7263 + if "PKGCACHE" in self.settings:
7264 + if not os.path.exists(self.settings["pkgcache_path"]):
7265 + os.makedirs(self.settings["pkgcache_path"],0755)
7266 +
7267 + if not display_msg:
7268 + raise CatalystError,"Could not find appropriate source. Please check the 'source_subpath' setting in the spec file."
7269 +
7270 + print display_msg
7271 + cmd(unpack_cmd,error_msg,env=self.env)
7272 +
7273 + if "source_path_hash" in self.settings:
7274 + myf=open(self.settings["autoresume_path"]+"unpack","w")
7275 + myf.write(self.settings["source_path_hash"])
7276 + myf.close()
7277 + else:
7278 + touch(self.settings["autoresume_path"]+"unpack")
7279 +
7280 + def set_action_sequence(self):
7281 + self.settings["action_sequence"]=["unpack","unpack_snapshot",\
7282 + "config_profile_link","setup_confdir","portage_overlay",\
7283 + "bind","chroot_setup","setup_environment","run_local",\
7284 + "build_kernel"]
7285 + if "FETCH" not in self.settings:
7286 + self.settings["action_sequence"] += ["bootloader","preclean",\
7287 + "livecd_update","root_overlay","fsscript","rcupdate","unmerge",\
7288 + "unbind","remove","empty","target_setup",\
7289 + "setup_overlay","create_iso"]
7290 + self.settings["action_sequence"].append("clear_autoresume")
7291 +
7292 +def register(foo):
7293 + foo.update({"livecd-stage2":livecd_stage2_target})
7294 + return foo
7295 diff --git a/catalyst/modules/netboot2_target.py b/catalyst/modules/netboot2_target.py
7296 new file mode 100644
7297 index 0000000..1ab7e7d
7298 --- /dev/null
7299 +++ b/catalyst/modules/netboot2_target.py
7300 @@ -0,0 +1,166 @@
7301 +"""
7302 +netboot target, version 2
7303 +"""
7304 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7305 +
7306 +import os,string,types
7307 +from catalyst_support import *
7308 +from generic_stage_target import *
7309 +
7310 +class netboot2_target(generic_stage_target):
7311 + """
7312 + Builder class for a netboot build, version 2
7313 + """
7314 + def __init__(self,spec,addlargs):
7315 + self.required_values=[
7316 + "boot/kernel"
7317 + ]
7318 + self.valid_values=self.required_values[:]
7319 + self.valid_values.extend([
7320 + "netboot2/packages",
7321 + "netboot2/use",
7322 + "netboot2/extra_files",
7323 + "netboot2/overlay",
7324 + "netboot2/busybox_config",
7325 + "netboot2/root_overlay",
7326 + "netboot2/linuxrc"
7327 + ])
7328 +
7329 + try:
7330 + if "netboot2/packages" in addlargs:
7331 + if type(addlargs["netboot2/packages"]) == types.StringType:
7332 + loopy=[addlargs["netboot2/packages"]]
7333 + else:
7334 + loopy=addlargs["netboot2/packages"]
7335 +
7336 + for x in loopy:
7337 + self.valid_values.append("netboot2/packages/"+x+"/files")
7338 + except:
7339 + raise CatalystError,"configuration error in netboot2/packages."
7340 +
7341 + generic_stage_target.__init__(self,spec,addlargs)
7342 + self.set_build_kernel_vars()
7343 + self.settings["merge_path"]=normpath("/tmp/image/")
7344 +
7345 + def set_target_path(self):
7346 + self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+\
7347 + self.settings["target_subpath"]+"/")
7348 + if "AUTORESUME" in self.settings \
7349 + and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
7350 + print "Resume point detected, skipping target path setup operation..."
7351 + else:
7352 + # first clean up any existing target stuff
7353 + if os.path.isfile(self.settings["target_path"]):
7354 + cmd("rm -f "+self.settings["target_path"], \
7355 + "Could not remove existing file: "+self.settings["target_path"],env=self.env)
7356 + touch(self.settings["autoresume_path"]+"setup_target_path")
7357 +
7358 + if not os.path.exists(self.settings["storedir"]+"/builds/"):
7359 + os.makedirs(self.settings["storedir"]+"/builds/")
7360 +
7361 + def copy_files_to_image(self):
7362 + # copies specific files from the buildroot to merge_path
7363 + myfiles=[]
7364 +
7365 + # check for autoresume point
7366 + if "AUTORESUME" in self.settings \
7367 + and os.path.exists(self.settings["autoresume_path"]+"copy_files_to_image"):
7368 + print "Resume point detected, skipping target path setup operation..."
7369 + else:
7370 + if "netboot2/packages" in self.settings:
7371 + if type(self.settings["netboot2/packages"]) == types.StringType:
7372 + loopy=[self.settings["netboot2/packages"]]
7373 + else:
7374 + loopy=self.settings["netboot2/packages"]
7375 +
7376 + for x in loopy:
7377 + if "netboot2/packages/"+x+"/files" in self.settings:
7378 + if type(self.settings["netboot2/packages/"+x+"/files"]) == types.ListType:
7379 + myfiles.extend(self.settings["netboot2/packages/"+x+"/files"])
7380 + else:
7381 + myfiles.append(self.settings["netboot2/packages/"+x+"/files"])
7382 +
7383 + if "netboot2/extra_files" in self.settings:
7384 + if type(self.settings["netboot2/extra_files"]) == types.ListType:
7385 + myfiles.extend(self.settings["netboot2/extra_files"])
7386 + else:
7387 + myfiles.append(self.settings["netboot2/extra_files"])
7388 +
7389 + try:
7390 + cmd("/bin/bash "+self.settings["controller_file"]+\
7391 + " image " + list_bashify(myfiles),env=self.env)
7392 + except CatalystError:
7393 + self.unbind()
7394 + raise CatalystError,"Failed to copy files to image!"
7395 +
7396 + touch(self.settings["autoresume_path"]+"copy_files_to_image")
7397 +
7398 + def setup_overlay(self):
7399 + if "AUTORESUME" in self.settings \
7400 + and os.path.exists(self.settings["autoresume_path"]+"setup_overlay"):
7401 + print "Resume point detected, skipping setup_overlay operation..."
7402 + else:
7403 + if "netboot2/overlay" in self.settings:
7404 + for x in self.settings["netboot2/overlay"]:
7405 + if os.path.exists(x):
7406 + cmd("rsync -a "+x+"/ "+\
7407 + self.settings["chroot_path"] + self.settings["merge_path"], "netboot2/overlay: "+x+" copy failed.",env=self.env)
7408 + touch(self.settings["autoresume_path"]+"setup_overlay")
7409 +
7410 + def move_kernels(self):
7411 + # we're done, move the kernels to builds/*
7412 + # no auto resume here as we always want the
7413 + # freshest images moved
7414 + try:
7415 + cmd("/bin/bash "+self.settings["controller_file"]+\
7416 + " final",env=self.env)
7417 + print ">>> Netboot Build Finished!"
7418 + except CatalystError:
7419 + self.unbind()
7420 + raise CatalystError,"Failed to move kernel images!"
7421 +
7422 + def remove(self):
7423 + if "AUTORESUME" in self.settings \
7424 + and os.path.exists(self.settings["autoresume_path"]+"remove"):
7425 + print "Resume point detected, skipping remove operation..."
7426 + else:
7427 + if self.settings["spec_prefix"]+"/rm" in self.settings:
7428 + for x in self.settings[self.settings["spec_prefix"]+"/rm"]:
7429 + # we're going to shell out for all these cleaning operations,
7430 + # so we get easy glob handling
7431 + print "netboot2: removing " + x
7432 + os.system("rm -rf " + self.settings["chroot_path"] + self.settings["merge_path"] + x)
7433 +
7434 + def empty(self):
7435 + if "AUTORESUME" in self.settings \
7436 + and os.path.exists(self.settings["autoresume_path"]+"empty"):
7437 + print "Resume point detected, skipping empty operation..."
7438 + else:
7439 + if "netboot2/empty" in self.settings:
7440 + if type(self.settings["netboot2/empty"])==types.StringType:
7441 + self.settings["netboot2/empty"]=self.settings["netboot2/empty"].split()
7442 + for x in self.settings["netboot2/empty"]:
7443 + myemp=self.settings["chroot_path"] + self.settings["merge_path"] + x
7444 + if not os.path.isdir(myemp):
7445 + print x,"not a directory or does not exist, skipping 'empty' operation."
7446 + continue
7447 + print "Emptying directory", x
7448 + # stat the dir, delete the dir, recreate the dir and set
7449 + # the proper perms and ownership
7450 + mystat=os.stat(myemp)
7451 + shutil.rmtree(myemp)
7452 + os.makedirs(myemp,0755)
7453 + os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
7454 + os.chmod(myemp,mystat[ST_MODE])
7455 + touch(self.settings["autoresume_path"]+"empty")
7456 +
7457 + def set_action_sequence(self):
7458 + self.settings["action_sequence"]=["unpack","unpack_snapshot","config_profile_link",
7459 + "setup_confdir","portage_overlay","bind","chroot_setup",\
7460 + "setup_environment","build_packages","root_overlay",\
7461 + "copy_files_to_image","setup_overlay","build_kernel","move_kernels",\
7462 + "remove","empty","unbind","clean","clear_autoresume"]
7463 +
7464 +def register(foo):
7465 + foo.update({"netboot2":netboot2_target})
7466 + return foo
7467 diff --git a/catalyst/modules/netboot_target.py b/catalyst/modules/netboot_target.py
7468 new file mode 100644
7469 index 0000000..ff2c81f
7470 --- /dev/null
7471 +++ b/catalyst/modules/netboot_target.py
7472 @@ -0,0 +1,128 @@
7473 +"""
7474 +netboot target, version 1
7475 +"""
7476 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7477 +
7478 +import os,string,types
7479 +from catalyst_support import *
7480 +from generic_stage_target import *
7481 +
7482 +class netboot_target(generic_stage_target):
7483 + """
7484 + Builder class for a netboot build.
7485 + """
7486 + def __init__(self,spec,addlargs):
7487 + self.valid_values = [
7488 + "netboot/kernel/sources",
7489 + "netboot/kernel/config",
7490 + "netboot/kernel/prebuilt",
7491 +
7492 + "netboot/busybox_config",
7493 +
7494 + "netboot/extra_files",
7495 + "netboot/packages"
7496 + ]
7497 + self.required_values=[]
7498 +
7499 + try:
7500 + if "netboot/packages" in addlargs:
7501 + if type(addlargs["netboot/packages"]) == types.StringType:
7502 + loopy=[addlargs["netboot/packages"]]
7503 + else:
7504 + loopy=addlargs["netboot/packages"]
7505 +
7506 + # for x in loopy:
7507 + # self.required_values.append("netboot/packages/"+x+"/files")
7508 + except:
7509 + raise CatalystError,"configuration error in netboot/packages."
7510 +
7511 + generic_stage_target.__init__(self,spec,addlargs)
7512 + self.set_build_kernel_vars(addlargs)
7513 + if "netboot/busybox_config" in addlargs:
7514 + file_locate(self.settings, ["netboot/busybox_config"])
7515 +
7516 + # Custom Kernel Tarball --- use that instead ...
7517 +
7518 + # unless the user wants specific CFLAGS/CXXFLAGS, let's use -Os
7519 +
7520 + for envvar in "CFLAGS", "CXXFLAGS":
7521 + if envvar not in os.environ and envvar not in addlargs:
7522 + self.settings[envvar] = "-Os -pipe"
7523 +
7524 + def set_root_path(self):
7525 + # ROOT= variable for emerges
7526 + self.settings["root_path"]=normpath("/tmp/image")
7527 + print "netboot root path is "+self.settings["root_path"]
7528 +
7529 +# def build_packages(self):
7530 +# # build packages
7531 +# if "netboot/packages" in self.settings:
7532 +# mypack=list_bashify(self.settings["netboot/packages"])
7533 +# try:
7534 +# cmd("/bin/bash "+self.settings["controller_file"]+" packages "+mypack,env=self.env)
7535 +# except CatalystError:
7536 +# self.unbind()
7537 +# raise CatalystError,"netboot build aborting due to error."
7538 +
7539 + def build_busybox(self):
7540 + # build busybox
7541 + if "netboot/busybox_config" in self.settings:
7542 + mycmd = self.settings["netboot/busybox_config"]
7543 + else:
7544 + mycmd = ""
7545 + try:
7546 + cmd("/bin/bash "+self.settings["controller_file"]+" busybox "+ mycmd,env=self.env)
7547 + except CatalystError:
7548 + self.unbind()
7549 + raise CatalystError,"netboot build aborting due to error."
7550 +
7551 + def copy_files_to_image(self):
7552 + # create image
7553 + myfiles=[]
7554 + if "netboot/packages" in self.settings:
7555 + if type(self.settings["netboot/packages"]) == types.StringType:
7556 + loopy=[self.settings["netboot/packages"]]
7557 + else:
7558 + loopy=self.settings["netboot/packages"]
7559 +
7560 + for x in loopy:
7561 + if "netboot/packages/"+x+"/files" in self.settings:
7562 + if type(self.settings["netboot/packages/"+x+"/files"]) == types.ListType:
7563 + myfiles.extend(self.settings["netboot/packages/"+x+"/files"])
7564 + else:
7565 + myfiles.append(self.settings["netboot/packages/"+x+"/files"])
7566 +
7567 + if "netboot/extra_files" in self.settings:
7568 + if type(self.settings["netboot/extra_files"]) == types.ListType:
7569 + myfiles.extend(self.settings["netboot/extra_files"])
7570 + else:
7571 + myfiles.append(self.settings["netboot/extra_files"])
7572 +
7573 + try:
7574 + cmd("/bin/bash "+self.settings["controller_file"]+\
7575 + " image " + list_bashify(myfiles),env=self.env)
7576 + except CatalystError:
7577 + self.unbind()
7578 + raise CatalystError,"netboot build aborting due to error."
7579 +
7580 + def create_netboot_files(self):
7581 + # finish it all up
7582 + try:
7583 + cmd("/bin/bash "+self.settings["controller_file"]+" finish",env=self.env)
7584 + except CatalystError:
7585 + self.unbind()
7586 + raise CatalystError,"netboot build aborting due to error."
7587 +
7588 + # end
7589 + print "netboot: build finished !"
7590 +
7591 + def set_action_sequence(self):
7592 + self.settings["action_sequence"]=["unpack","unpack_snapshot",
7593 + "config_profile_link","setup_confdir","bind","chroot_setup",\
7594 + "setup_environment","build_packages","build_busybox",\
7595 + "build_kernel","copy_files_to_image",\
7596 + "clean","create_netboot_files","unbind","clear_autoresume"]
7597 +
7598 +def register(foo):
7599 + foo.update({"netboot":netboot_target})
7600 + return foo
7601 diff --git a/catalyst/modules/snapshot_target.py b/catalyst/modules/snapshot_target.py
7602 new file mode 100644
7603 index 0000000..ba1bab5
7604 --- /dev/null
7605 +++ b/catalyst/modules/snapshot_target.py
7606 @@ -0,0 +1,91 @@
7607 +"""
7608 +Snapshot target
7609 +"""
7610 +
7611 +import os
7612 +from catalyst_support import *
7613 +from generic_stage_target import *
7614 +
7615 +class snapshot_target(generic_stage_target):
7616 + """
7617 + Builder class for snapshots.
7618 + """
7619 + def __init__(self,myspec,addlargs):
7620 + self.required_values=["version_stamp","target"]
7621 + self.valid_values=["version_stamp","target"]
7622 +
7623 + generic_target.__init__(self,myspec,addlargs)
7624 + self.settings=myspec
7625 + self.settings["target_subpath"]="portage"
7626 + st=self.settings["storedir"]
7627 + self.settings["snapshot_path"] = normpath(st + "/snapshots/"
7628 + + self.settings["snapshot_name"]
7629 + + self.settings["version_stamp"] + ".tar.bz2")
7630 + self.settings["tmp_path"]=normpath(st+"/tmp/"+self.settings["target_subpath"])
7631 +
7632 + def setup(self):
7633 + x=normpath(self.settings["storedir"]+"/snapshots")
7634 + if not os.path.exists(x):
7635 + os.makedirs(x)
7636 +
7637 + def mount_safety_check(self):
7638 + pass
7639 +
7640 + def run(self):
7641 + if "PURGEONLY" in self.settings:
7642 + self.purge()
7643 + return
7644 +
7645 + if "PURGE" in self.settings:
7646 + self.purge()
7647 +
7648 + self.setup()
7649 + print "Creating Portage tree snapshot "+self.settings["version_stamp"]+\
7650 + " from "+self.settings["portdir"]+"..."
7651 +
7652 + mytmp=self.settings["tmp_path"]
7653 + if not os.path.exists(mytmp):
7654 + os.makedirs(mytmp)
7655 +
7656 + cmd("rsync -a --delete --exclude /packages/ --exclude /distfiles/ " +
7657 + "--exclude /local/ --exclude CVS/ --exclude .svn --filter=H_**/files/digest-* " +
7658 + self.settings["portdir"] + "/ " + mytmp + "/%s/" % self.settings["repo_name"],
7659 + "Snapshot failure", env=self.env)
7660 +
7661 + print "Compressing Portage snapshot tarball..."
7662 + cmd("tar -I lbzip2 -cf " + self.settings["snapshot_path"] + " -C " +
7663 + mytmp + " " + self.settings["repo_name"],
7664 + "Snapshot creation failure",env=self.env)
7665 +
7666 + self.gen_contents_file(self.settings["snapshot_path"])
7667 + self.gen_digest_file(self.settings["snapshot_path"])
7668 +
7669 + self.cleanup()
7670 + print "snapshot: complete!"
7671 +
7672 + def kill_chroot_pids(self):
7673 + pass
7674 +
7675 + def cleanup(self):
7676 + print "Cleaning up..."
7677 +
7678 + def purge(self):
7679 + myemp=self.settings["tmp_path"]
7680 + if os.path.isdir(myemp):
7681 + print "Emptying directory",myemp
7682 + """
7683 + stat the dir, delete the dir, recreate the dir and set
7684 + the proper perms and ownership
7685 + """
7686 + mystat=os.stat(myemp)
7687 + """ There's no easy way to change flags recursively in python """
7688 + if os.uname()[0] == "FreeBSD":
7689 + os.system("chflags -R noschg "+myemp)
7690 + shutil.rmtree(myemp)
7691 + os.makedirs(myemp,0755)
7692 + os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
7693 + os.chmod(myemp,mystat[ST_MODE])
7694 +
7695 +def register(foo):
7696 + foo.update({"snapshot":snapshot_target})
7697 + return foo
7698 diff --git a/catalyst/modules/stage1_target.py b/catalyst/modules/stage1_target.py
7699 new file mode 100644
7700 index 0000000..5f4ffa0
7701 --- /dev/null
7702 +++ b/catalyst/modules/stage1_target.py
7703 @@ -0,0 +1,97 @@
7704 +"""
7705 +stage1 target
7706 +"""
7707 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7708 +
7709 +from catalyst_support import *
7710 +from generic_stage_target import *
7711 +
7712 +class stage1_target(generic_stage_target):
7713 + """
7714 + Builder class for a stage1 installation tarball build.
7715 + """
7716 + def __init__(self,spec,addlargs):
7717 + self.required_values=[]
7718 + self.valid_values=["chost"]
7719 + self.valid_values.extend(["update_seed","update_seed_command"])
7720 + generic_stage_target.__init__(self,spec,addlargs)
7721 +
7722 + def set_stage_path(self):
7723 + self.settings["stage_path"]=normpath(self.settings["chroot_path"]+self.settings["root_path"])
7724 + print "stage1 stage path is "+self.settings["stage_path"]
7725 +
7726 + def set_root_path(self):
7727 + # sets the root path, relative to 'chroot_path', of the stage1 root
7728 + self.settings["root_path"]=normpath("/tmp/stage1root")
7729 + print "stage1 root path is "+self.settings["root_path"]
7730 +
7731 + def set_cleanables(self):
7732 + generic_stage_target.set_cleanables(self)
7733 + self.settings["cleanables"].extend([\
7734 + "/usr/share/zoneinfo", "/etc/portage/package*"])
7735 +
7736 + # XXX: How do these override_foo() functions differ from the ones in generic_stage_target and why aren't they in stage3_target?
7737 +
7738 + def override_chost(self):
7739 + if "chost" in self.settings:
7740 + self.settings["CHOST"]=list_to_string(self.settings["chost"])
7741 +
7742 + def override_cflags(self):
7743 + if "cflags" in self.settings:
7744 + self.settings["CFLAGS"]=list_to_string(self.settings["cflags"])
7745 +
7746 + def override_cxxflags(self):
7747 + if "cxxflags" in self.settings:
7748 + self.settings["CXXFLAGS"]=list_to_string(self.settings["cxxflags"])
7749 +
7750 + def override_ldflags(self):
7751 + if "ldflags" in self.settings:
7752 + self.settings["LDFLAGS"]=list_to_string(self.settings["ldflags"])
7753 +
7754 + def set_portage_overlay(self):
7755 + generic_stage_target.set_portage_overlay(self)
7756 + if "portage_overlay" in self.settings:
7757 + print "\nWARNING !!!!!"
7758 + print "\tUsing an portage overlay for earlier stages could cause build issues."
7759 + print "\tIf you break it, you buy it. Don't complain to us about it."
7760 + print "\tDont say we did not warn you\n"
7761 +
7762 + def base_dirs(self):
7763 + if os.uname()[0] == "FreeBSD":
7764 + # baselayout no longer creates the .keep files in proc and dev for FreeBSD as it
7765 + # would create them too late...we need them earlier before bind mounting filesystems
7766 + # since proc and dev are not writeable, so...create them here
7767 + if not os.path.exists(self.settings["stage_path"]+"/proc"):
7768 + os.makedirs(self.settings["stage_path"]+"/proc")
7769 + if not os.path.exists(self.settings["stage_path"]+"/dev"):
7770 + os.makedirs(self.settings["stage_path"]+"/dev")
7771 + if not os.path.isfile(self.settings["stage_path"]+"/proc/.keep"):
7772 + try:
7773 + proc_keepfile = open(self.settings["stage_path"]+"/proc/.keep","w")
7774 + proc_keepfile.write('')
7775 + proc_keepfile.close()
7776 + except IOError:
7777 + print "!!! Failed to create %s" % (self.settings["stage_path"]+"/dev/.keep")
7778 + if not os.path.isfile(self.settings["stage_path"]+"/dev/.keep"):
7779 + try:
7780 + dev_keepfile = open(self.settings["stage_path"]+"/dev/.keep","w")
7781 + dev_keepfile.write('')
7782 + dev_keepfile.close()
7783 + except IOError:
7784 + print "!!! Failed to create %s" % (self.settings["stage_path"]+"/dev/.keep")
7785 + else:
7786 + pass
7787 +
7788 + def set_mounts(self):
7789 + # stage_path/proc probably doesn't exist yet, so create it
7790 + if not os.path.exists(self.settings["stage_path"]+"/proc"):
7791 + os.makedirs(self.settings["stage_path"]+"/proc")
7792 +
7793 + # alter the mount mappings to bind mount proc onto it
7794 + self.mounts.append("stage1root/proc")
7795 + self.target_mounts["stage1root/proc"] = "/tmp/stage1root/proc"
7796 + self.mountmap["stage1root/proc"] = "/proc"
7797 +
7798 +def register(foo):
7799 + foo.update({"stage1":stage1_target})
7800 + return foo
7801 diff --git a/catalyst/modules/stage2_target.py b/catalyst/modules/stage2_target.py
7802 new file mode 100644
7803 index 0000000..803ec59
7804 --- /dev/null
7805 +++ b/catalyst/modules/stage2_target.py
7806 @@ -0,0 +1,66 @@
7807 +"""
7808 +stage2 target, builds upon previous stage1 tarball
7809 +"""
7810 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7811 +
7812 +from catalyst_support import *
7813 +from generic_stage_target import *
7814 +
7815 +class stage2_target(generic_stage_target):
7816 + """
7817 + Builder class for a stage2 installation tarball build.
7818 + """
7819 + def __init__(self,spec,addlargs):
7820 + self.required_values=[]
7821 + self.valid_values=["chost"]
7822 + generic_stage_target.__init__(self,spec,addlargs)
7823 +
7824 + def set_source_path(self):
7825 + if "SEEDCACHE" in self.settings and os.path.isdir(normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/tmp/stage1root/")):
7826 + self.settings["source_path"]=normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/tmp/stage1root/")
7827 + else:
7828 + self.settings["source_path"] = normpath(self.settings["storedir"] +
7829 + "/builds/" + self.settings["source_subpath"].rstrip("/") +
7830 + ".tar.bz2")
7831 + if os.path.isfile(self.settings["source_path"]):
7832 + if os.path.exists(self.settings["source_path"]):
7833 + # XXX: Is this even necessary if the previous check passes?
7834 + self.settings["source_path_hash"]=generate_hash(self.settings["source_path"],\
7835 + hash_function=self.settings["hash_function"],verbose=False)
7836 + print "Source path set to "+self.settings["source_path"]
7837 + if os.path.isdir(self.settings["source_path"]):
7838 + print "\tIf this is not desired, remove this directory or turn of seedcache in the options of catalyst.conf"
7839 + print "\tthe source path will then be " + \
7840 + normpath(self.settings["storedir"] + "/builds/" + \
7841 + self.settings["source_subpath"].restrip("/") + ".tar.bz2\n")
7842 +
7843 + # XXX: How do these override_foo() functions differ from the ones in
7844 + # generic_stage_target and why aren't they in stage3_target?
7845 +
7846 + def override_chost(self):
7847 + if "chost" in self.settings:
7848 + self.settings["CHOST"]=list_to_string(self.settings["chost"])
7849 +
7850 + def override_cflags(self):
7851 + if "cflags" in self.settings:
7852 + self.settings["CFLAGS"]=list_to_string(self.settings["cflags"])
7853 +
7854 + def override_cxxflags(self):
7855 + if "cxxflags" in self.settings:
7856 + self.settings["CXXFLAGS"]=list_to_string(self.settings["cxxflags"])
7857 +
7858 + def override_ldflags(self):
7859 + if "ldflags" in self.settings:
7860 + self.settings["LDFLAGS"]=list_to_string(self.settings["ldflags"])
7861 +
7862 + def set_portage_overlay(self):
7863 + generic_stage_target.set_portage_overlay(self)
7864 + if "portage_overlay" in self.settings:
7865 + print "\nWARNING !!!!!"
7866 + print "\tUsing an portage overlay for earlier stages could cause build issues."
7867 + print "\tIf you break it, you buy it. Don't complain to us about it."
7868 + print "\tDont say we did not warn you\n"
7869 +
7870 +def register(foo):
7871 + foo.update({"stage2":stage2_target})
7872 + return foo
7873 diff --git a/catalyst/modules/stage3_target.py b/catalyst/modules/stage3_target.py
7874 new file mode 100644
7875 index 0000000..4d3a008
7876 --- /dev/null
7877 +++ b/catalyst/modules/stage3_target.py
7878 @@ -0,0 +1,31 @@
7879 +"""
7880 +stage3 target, builds upon previous stage2/stage3 tarball
7881 +"""
7882 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7883 +
7884 +from catalyst_support import *
7885 +from generic_stage_target import *
7886 +
7887 +class stage3_target(generic_stage_target):
7888 + """
7889 + Builder class for a stage3 installation tarball build.
7890 + """
7891 + def __init__(self,spec,addlargs):
7892 + self.required_values=[]
7893 + self.valid_values=[]
7894 + generic_stage_target.__init__(self,spec,addlargs)
7895 +
7896 + def set_portage_overlay(self):
7897 + generic_stage_target.set_portage_overlay(self)
7898 + if "portage_overlay" in self.settings:
7899 + print "\nWARNING !!!!!"
7900 + print "\tUsing an overlay for earlier stages could cause build issues."
7901 + print "\tIf you break it, you buy it. Don't complain to us about it."
7902 + print "\tDont say we did not warn you\n"
7903 +
7904 + def set_cleanables(self):
7905 + generic_stage_target.set_cleanables(self)
7906 +
7907 +def register(foo):
7908 + foo.update({"stage3":stage3_target})
7909 + return foo
7910 diff --git a/catalyst/modules/stage4_target.py b/catalyst/modules/stage4_target.py
7911 new file mode 100644
7912 index 0000000..ce41b2d
7913 --- /dev/null
7914 +++ b/catalyst/modules/stage4_target.py
7915 @@ -0,0 +1,43 @@
7916 +"""
7917 +stage4 target, builds upon previous stage3/stage4 tarball
7918 +"""
7919 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7920 +
7921 +from catalyst_support import *
7922 +from generic_stage_target import *
7923 +
7924 +class stage4_target(generic_stage_target):
7925 + """
7926 + Builder class for stage4.
7927 + """
7928 + def __init__(self,spec,addlargs):
7929 + self.required_values=["stage4/packages"]
7930 + self.valid_values=self.required_values[:]
7931 + self.valid_values.extend(["stage4/use","boot/kernel",\
7932 + "stage4/root_overlay","stage4/fsscript",\
7933 + "stage4/gk_mainargs","splash_theme",\
7934 + "portage_overlay","stage4/rcadd","stage4/rcdel",\
7935 + "stage4/linuxrc","stage4/unmerge","stage4/rm","stage4/empty"])
7936 + generic_stage_target.__init__(self,spec,addlargs)
7937 +
7938 + def set_cleanables(self):
7939 + self.settings["cleanables"]=["/var/tmp/*","/tmp/*"]
7940 +
7941 + def set_action_sequence(self):
7942 + self.settings["action_sequence"]=["unpack","unpack_snapshot",\
7943 + "config_profile_link","setup_confdir","portage_overlay",\
7944 + "bind","chroot_setup","setup_environment","build_packages",\
7945 + "build_kernel","bootloader","root_overlay","fsscript",\
7946 + "preclean","rcupdate","unmerge","unbind","remove","empty",\
7947 + "clean"]
7948 +
7949 +# if "TARBALL" in self.settings or \
7950 +# "FETCH" not in self.settings:
7951 + if "FETCH" not in self.settings:
7952 + self.settings["action_sequence"].append("capture")
7953 + self.settings["action_sequence"].append("clear_autoresume")
7954 +
7955 +def register(foo):
7956 + foo.update({"stage4":stage4_target})
7957 + return foo
7958 +
7959 diff --git a/catalyst/modules/tinderbox_target.py b/catalyst/modules/tinderbox_target.py
7960 new file mode 100644
7961 index 0000000..ca55610
7962 --- /dev/null
7963 +++ b/catalyst/modules/tinderbox_target.py
7964 @@ -0,0 +1,48 @@
7965 +"""
7966 +Tinderbox target
7967 +"""
7968 +# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
7969 +
7970 +from catalyst_support import *
7971 +from generic_stage_target import *
7972 +
7973 +class tinderbox_target(generic_stage_target):
7974 + """
7975 + Builder class for the tinderbox target
7976 + """
7977 + def __init__(self,spec,addlargs):
7978 + self.required_values=["tinderbox/packages"]
7979 + self.valid_values=self.required_values[:]
7980 + self.valid_values.extend(["tinderbox/use"])
7981 + generic_stage_target.__init__(self,spec,addlargs)
7982 +
7983 + def run_local(self):
7984 + # tinderbox
7985 + # example call: "grp.sh run xmms vim sys-apps/gleep"
7986 + try:
7987 + if os.path.exists(self.settings["controller_file"]):
7988 + cmd("/bin/bash "+self.settings["controller_file"]+" run "+\
7989 + list_bashify(self.settings["tinderbox/packages"]),"run script failed.",env=self.env)
7990 +
7991 + except CatalystError:
7992 + self.unbind()
7993 + raise CatalystError,"Tinderbox aborting due to error."
7994 +
7995 + def set_cleanables(self):
7996 + self.settings['cleanables'] = [
7997 + '/etc/resolv.conf',
7998 + '/var/tmp/*',
7999 + '/root/*',
8000 + self.settings['portdir'],
8001 + ]
8002 +
8003 + def set_action_sequence(self):
8004 + #Default action sequence for run method
8005 + self.settings["action_sequence"]=["unpack","unpack_snapshot",\
8006 + "config_profile_link","setup_confdir","bind","chroot_setup",\
8007 + "setup_environment","run_local","preclean","unbind","clean",\
8008 + "clear_autoresume"]
8009 +
8010 +def register(foo):
8011 + foo.update({"tinderbox":tinderbox_target})
8012 + return foo
8013 diff --git a/catalyst/util.py b/catalyst/util.py
8014 new file mode 100644
8015 index 0000000..ff12086
8016 --- /dev/null
8017 +++ b/catalyst/util.py
8018 @@ -0,0 +1,14 @@
8019 +"""
8020 +Collection of utility functions for catalyst
8021 +"""
8022 +
8023 +import sys, traceback
8024 +
8025 +def capture_traceback():
8026 + etype, value, tb = sys.exc_info()
8027 + s = [x.strip() for x in traceback.format_exception(etype, value, tb)]
8028 + return s
8029 +
8030 +def print_traceback():
8031 + for x in capture_traceback():
8032 + print x
8033 diff --git a/modules/__init__.py b/modules/__init__.py
8034 deleted file mode 100644
8035 index e69de29..0000000
8036 diff --git a/modules/builder.py b/modules/builder.py
8037 deleted file mode 100644
8038 index ad27d78..0000000
8039 --- a/modules/builder.py
8040 +++ /dev/null
8041 @@ -1,20 +0,0 @@
8042 -
8043 -class generic:
8044 - def __init__(self,myspec):
8045 - self.settings=myspec
8046 -
8047 - def mount_safety_check(self):
8048 - """
8049 - Make sure that no bind mounts exist in chrootdir (to use before
8050 - cleaning the directory, to make sure we don't wipe the contents of
8051 - a bind mount
8052 - """
8053 - pass
8054 -
8055 - def mount_all(self):
8056 - """do all bind mounts"""
8057 - pass
8058 -
8059 - def umount_all(self):
8060 - """unmount all bind mounts"""
8061 - pass
8062 diff --git a/modules/catalyst/__init__.py b/modules/catalyst/__init__.py
8063 deleted file mode 100644
8064 index e69de29..0000000
8065 diff --git a/modules/catalyst/config.py b/modules/catalyst/config.py
8066 deleted file mode 100644
8067 index 726bf74..0000000
8068 --- a/modules/catalyst/config.py
8069 +++ /dev/null
8070 @@ -1,122 +0,0 @@
8071 -import re
8072 -from modules.catalyst_support import *
8073 -
8074 -class ParserBase:
8075 -
8076 - filename = ""
8077 - lines = None
8078 - values = None
8079 - key_value_separator = "="
8080 - multiple_values = False
8081 - empty_values = True
8082 -
8083 - def __getitem__(self, key):
8084 - return self.values[key]
8085 -
8086 - def get_values(self):
8087 - return self.values
8088 -
8089 - def dump(self):
8090 - dump = ""
8091 - for x in self.values.keys():
8092 - dump += x + " = " + repr(self.values[x]) + "\n"
8093 - return dump
8094 -
8095 - def parse_file(self, filename):
8096 - try:
8097 - myf = open(filename, "r")
8098 - except:
8099 - raise CatalystError, "Could not open file " + filename
8100 - self.lines = myf.readlines()
8101 - myf.close()
8102 - self.filename = filename
8103 - self.parse()
8104 -
8105 - def parse_lines(self, lines):
8106 - self.lines = lines
8107 - self.parse()
8108 -
8109 - def parse(self):
8110 - values = {}
8111 - cur_array = []
8112 -
8113 - trailing_comment=re.compile('\s*#.*$')
8114 - white_space=re.compile('\s+')
8115 -
8116 - for x, myline in enumerate(self.lines):
8117 - myline = myline.strip()
8118 -
8119 - # Force the line to be clean
8120 - # Remove Comments ( anything following # )
8121 - myline = trailing_comment.sub("", myline)
8122 -
8123 - # Skip any blank lines
8124 - if not myline: continue
8125 -
8126 - # Look for separator
8127 - msearch = myline.find(self.key_value_separator)
8128 -
8129 - # If separator found assume its a new key
8130 - if msearch != -1:
8131 - # Split on the first occurence of the separator creating two strings in the array mobjs
8132 - mobjs = myline.split(self.key_value_separator, 1)
8133 - mobjs[1] = mobjs[1].strip().strip('"')
8134 -
8135 -# # Check that this key doesn't exist already in the spec
8136 -# if mobjs[0] in values:
8137 -# raise Exception("You have a duplicate key (" + mobjs[0] + ") in your spec. Please fix it")
8138 -
8139 - # Start a new array using the first element of mobjs
8140 - cur_array = [mobjs[0]]
8141 - if mobjs[1]:
8142 - if self.multiple_values:
8143 - # split on white space creating additional array elements
8144 -# subarray = white_space.split(mobjs[1])
8145 - subarray = mobjs[1].split()
8146 - cur_array += subarray
8147 - else:
8148 - cur_array += [mobjs[1]]
8149 -
8150 - # Else add on to the last key we were working on
8151 - else:
8152 - if self.multiple_values:
8153 -# mobjs = white_space.split(myline)
8154 -# cur_array += mobjs
8155 - cur_array += myline.split()
8156 - else:
8157 - raise CatalystError, "Syntax error: " + x
8158 -
8159 - # XXX: Do we really still need this "single value is a string" behavior?
8160 - if len(cur_array) == 2:
8161 - values[cur_array[0]] = cur_array[1]
8162 - else:
8163 - values[cur_array[0]] = cur_array[1:]
8164 -
8165 - if not self.empty_values:
8166 - for x in values.keys():
8167 - # Delete empty key pairs
8168 - if not values[x]:
8169 - print "\n\tWARNING: No value set for key " + x + "...deleting"
8170 - del values[x]
8171 -
8172 - self.values = values
8173 -
8174 -class SpecParser(ParserBase):
8175 -
8176 - key_value_separator = ':'
8177 - multiple_values = True
8178 - empty_values = False
8179 -
8180 - def __init__(self, filename=""):
8181 - if filename:
8182 - self.parse_file(filename)
8183 -
8184 -class ConfigParser(ParserBase):
8185 -
8186 - key_value_separator = '='
8187 - multiple_values = False
8188 - empty_values = True
8189 -
8190 - def __init__(self, filename=""):
8191 - if filename:
8192 - self.parse_file(filename)
8193 diff --git a/modules/catalyst/util.py b/modules/catalyst/util.py
8194 deleted file mode 100644
8195 index ff12086..0000000
8196 --- a/modules/catalyst/util.py
8197 +++ /dev/null
8198 @@ -1,14 +0,0 @@
8199 -"""
8200 -Collection of utility functions for catalyst
8201 -"""
8202 -
8203 -import sys, traceback
8204 -
8205 -def capture_traceback():
8206 - etype, value, tb = sys.exc_info()
8207 - s = [x.strip() for x in traceback.format_exception(etype, value, tb)]
8208 - return s
8209 -
8210 -def print_traceback():
8211 - for x in capture_traceback():
8212 - print x
8213 diff --git a/modules/catalyst_lock.py b/modules/catalyst_lock.py
8214 deleted file mode 100644
8215 index 5311cf8..0000000
8216 --- a/modules/catalyst_lock.py
8217 +++ /dev/null
8218 @@ -1,468 +0,0 @@
8219 -#!/usr/bin/python
8220 -import os
8221 -import fcntl
8222 -import errno
8223 -import sys
8224 -import string
8225 -import time
8226 -from catalyst_support import *
8227 -
8228 -def writemsg(mystr):
8229 - sys.stderr.write(mystr)
8230 - sys.stderr.flush()
8231 -
8232 -class LockDir:
8233 - locking_method=fcntl.flock
8234 - lock_dirs_in_use=[]
8235 - die_on_failed_lock=True
8236 - def __del__(self):
8237 - self.clean_my_hardlocks()
8238 - self.delete_lock_from_path_list()
8239 - if self.islocked():
8240 - self.fcntl_unlock()
8241 -
8242 - def __init__(self,lockdir):
8243 - self.locked=False
8244 - self.myfd=None
8245 - self.set_gid(250)
8246 - self.locking_method=LockDir.locking_method
8247 - self.set_lockdir(lockdir)
8248 - self.set_lockfilename(".catalyst_lock")
8249 - self.set_lockfile()
8250 -
8251 - if LockDir.lock_dirs_in_use.count(lockdir)>0:
8252 - raise "This directory already associated with a lock object"
8253 - else:
8254 - LockDir.lock_dirs_in_use.append(lockdir)
8255 -
8256 - self.hardlock_paths={}
8257 -
8258 - def delete_lock_from_path_list(self):
8259 - i=0
8260 - try:
8261 - if LockDir.lock_dirs_in_use:
8262 - for x in LockDir.lock_dirs_in_use:
8263 - if LockDir.lock_dirs_in_use[i] == self.lockdir:
8264 - del LockDir.lock_dirs_in_use[i]
8265 - break
8266 - i=i+1
8267 - except AttributeError:
8268 - pass
8269 -
8270 - def islocked(self):
8271 - if self.locked:
8272 - return True
8273 - else:
8274 - return False
8275 -
8276 - def set_gid(self,gid):
8277 - if not self.islocked():
8278 -# if "DEBUG" in self.settings:
8279 -# print "setting gid to", gid
8280 - self.gid=gid
8281 -
8282 - def set_lockdir(self,lockdir):
8283 - if not os.path.exists(lockdir):
8284 - os.makedirs(lockdir)
8285 - if os.path.isdir(lockdir):
8286 - if not self.islocked():
8287 - if lockdir[-1] == "/":
8288 - lockdir=lockdir[:-1]
8289 - self.lockdir=normpath(lockdir)
8290 -# if "DEBUG" in self.settings:
8291 -# print "setting lockdir to", self.lockdir
8292 - else:
8293 - raise "the lock object needs a path to a dir"
8294 -
8295 - def set_lockfilename(self,lockfilename):
8296 - if not self.islocked():
8297 - self.lockfilename=lockfilename
8298 -# if "DEBUG" in self.settings:
8299 -# print "setting lockfilename to", self.lockfilename
8300 -
8301 - def set_lockfile(self):
8302 - if not self.islocked():
8303 - self.lockfile=normpath(self.lockdir+'/'+self.lockfilename)
8304 -# if "DEBUG" in self.settings:
8305 -# print "setting lockfile to", self.lockfile
8306 -
8307 - def read_lock(self):
8308 - if not self.locking_method == "HARDLOCK":
8309 - self.fcntl_lock("read")
8310 - else:
8311 - print "HARDLOCKING doesnt support shared-read locks"
8312 - print "using exclusive write locks"
8313 - self.hard_lock()
8314 -
8315 - def write_lock(self):
8316 - if not self.locking_method == "HARDLOCK":
8317 - self.fcntl_lock("write")
8318 - else:
8319 - self.hard_lock()
8320 -
8321 - def unlock(self):
8322 - if not self.locking_method == "HARDLOCK":
8323 - self.fcntl_unlock()
8324 - else:
8325 - self.hard_unlock()
8326 -
8327 - def fcntl_lock(self,locktype):
8328 - if self.myfd==None:
8329 - if not os.path.exists(os.path.dirname(self.lockdir)):
8330 - raise DirectoryNotFound, os.path.dirname(self.lockdir)
8331 - if not os.path.exists(self.lockfile):
8332 - old_mask=os.umask(000)
8333 - self.myfd = os.open(self.lockfile, os.O_CREAT|os.O_RDWR,0660)
8334 - try:
8335 - if os.stat(self.lockfile).st_gid != self.gid:
8336 - os.chown(self.lockfile,os.getuid(),self.gid)
8337 - except SystemExit, e:
8338 - raise
8339 - except OSError, e:
8340 - if e[0] == 2: #XXX: No such file or directory
8341 - return self.fcntl_locking(locktype)
8342 - else:
8343 - writemsg("Cannot chown a lockfile. This could cause inconvenience later.\n")
8344 -
8345 - os.umask(old_mask)
8346 - else:
8347 - self.myfd = os.open(self.lockfile, os.O_CREAT|os.O_RDWR,0660)
8348 -
8349 - try:
8350 - if locktype == "read":
8351 - self.locking_method(self.myfd,fcntl.LOCK_SH|fcntl.LOCK_NB)
8352 - else:
8353 - self.locking_method(self.myfd,fcntl.LOCK_EX|fcntl.LOCK_NB)
8354 - except IOError, e:
8355 - if "errno" not in dir(e):
8356 - raise
8357 - if e.errno == errno.EAGAIN:
8358 - if not LockDir.die_on_failed_lock:
8359 - # Resource temp unavailable; eg, someone beat us to the lock.
8360 - writemsg("waiting for lock on %s\n" % self.lockfile)
8361 -
8362 - # Try for the exclusive or shared lock again.
8363 - if locktype == "read":
8364 - self.locking_method(self.myfd,fcntl.LOCK_SH)
8365 - else:
8366 - self.locking_method(self.myfd,fcntl.LOCK_EX)
8367 - else:
8368 - raise LockInUse,self.lockfile
8369 - elif e.errno == errno.ENOLCK:
8370 - pass
8371 - else:
8372 - raise
8373 - if not os.path.exists(self.lockfile):
8374 - os.close(self.myfd)
8375 - self.myfd=None
8376 - #writemsg("lockfile recurse\n")
8377 - self.fcntl_lock(locktype)
8378 - else:
8379 - self.locked=True
8380 - #writemsg("Lockfile obtained\n")
8381 -
8382 - def fcntl_unlock(self):
8383 - import fcntl
8384 - unlinkfile = 1
8385 - if not os.path.exists(self.lockfile):
8386 - print "lockfile does not exist '%s'" % self.lockfile
8387 - if (self.myfd != None):
8388 - try:
8389 - os.close(myfd)
8390 - self.myfd=None
8391 - except:
8392 - pass
8393 - return False
8394 -
8395 - try:
8396 - if self.myfd == None:
8397 - self.myfd = os.open(self.lockfile, os.O_WRONLY,0660)
8398 - unlinkfile = 1
8399 - self.locking_method(self.myfd,fcntl.LOCK_UN)
8400 - except SystemExit, e:
8401 - raise
8402 - except Exception, e:
8403 - os.close(self.myfd)
8404 - self.myfd=None
8405 - raise IOError, "Failed to unlock file '%s'\n" % self.lockfile
8406 - try:
8407 - # This sleep call was added to allow other processes that are
8408 - # waiting for a lock to be able to grab it before it is deleted.
8409 - # lockfile() already accounts for this situation, however, and
8410 - # the sleep here adds more time than is saved overall, so am
8411 - # commenting until it is proved necessary.
8412 - #time.sleep(0.0001)
8413 - if unlinkfile:
8414 - InUse=False
8415 - try:
8416 - self.locking_method(self.myfd,fcntl.LOCK_EX|fcntl.LOCK_NB)
8417 - except:
8418 - print "Read lock may be in effect. skipping lockfile delete..."
8419 - InUse=True
8420 - # We won the lock, so there isn't competition for it.
8421 - # We can safely delete the file.
8422 - #writemsg("Got the lockfile...\n")
8423 - #writemsg("Unlinking...\n")
8424 - self.locking_method(self.myfd,fcntl.LOCK_UN)
8425 - if not InUse:
8426 - os.unlink(self.lockfile)
8427 - os.close(self.myfd)
8428 - self.myfd=None
8429 -# if "DEBUG" in self.settings:
8430 -# print "Unlinked lockfile..."
8431 - except SystemExit, e:
8432 - raise
8433 - except Exception, e:
8434 - # We really don't care... Someone else has the lock.
8435 - # So it is their problem now.
8436 - print "Failed to get lock... someone took it."
8437 - print str(e)
8438 -
8439 - # Why test lockfilename? Because we may have been handed an
8440 - # fd originally, and the caller might not like having their
8441 - # open fd closed automatically on them.
8442 - #if type(lockfilename) == types.StringType:
8443 - # os.close(myfd)
8444 -
8445 - if (self.myfd != None):
8446 - os.close(self.myfd)
8447 - self.myfd=None
8448 - self.locked=False
8449 - time.sleep(.0001)
8450 -
8451 - def hard_lock(self,max_wait=14400):
8452 - """Does the NFS, hardlink shuffle to ensure locking on the disk.
8453 - We create a PRIVATE lockfile, that is just a placeholder on the disk.
8454 - Then we HARDLINK the real lockfile to that private file.
8455 - If our file can 2 references, then we have the lock. :)
8456 - Otherwise we lather, rise, and repeat.
8457 - We default to a 4 hour timeout.
8458 - """
8459 -
8460 - self.myhardlock = self.hardlock_name(self.lockdir)
8461 -
8462 - start_time = time.time()
8463 - reported_waiting = False
8464 -
8465 - while(time.time() < (start_time + max_wait)):
8466 - # We only need it to exist.
8467 - self.myfd = os.open(self.myhardlock, os.O_CREAT|os.O_RDWR,0660)
8468 - os.close(self.myfd)
8469 -
8470 - self.add_hardlock_file_to_cleanup()
8471 - if not os.path.exists(self.myhardlock):
8472 - raise FileNotFound, "Created lockfile is missing: %(filename)s" % {"filename":self.myhardlock}
8473 - try:
8474 - res = os.link(self.myhardlock, self.lockfile)
8475 - except SystemExit, e:
8476 - raise
8477 - except Exception, e:
8478 -# if "DEBUG" in self.settings:
8479 -# print "lockfile(): Hardlink: Link failed."
8480 -# print "Exception: ",e
8481 - pass
8482 -
8483 - if self.hardlink_is_mine(self.myhardlock, self.lockfile):
8484 - # We have the lock.
8485 - if reported_waiting:
8486 - print
8487 - return True
8488 -
8489 - if reported_waiting:
8490 - writemsg(".")
8491 - else:
8492 - reported_waiting = True
8493 - print
8494 - print "Waiting on (hardlink) lockfile: (one '.' per 3 seconds)"
8495 - print "Lockfile: " + self.lockfile
8496 - time.sleep(3)
8497 -
8498 - os.unlink(self.myhardlock)
8499 - return False
8500 -
8501 - def hard_unlock(self):
8502 - try:
8503 - if os.path.exists(self.myhardlock):
8504 - os.unlink(self.myhardlock)
8505 - if os.path.exists(self.lockfile):
8506 - os.unlink(self.lockfile)
8507 - except SystemExit, e:
8508 - raise
8509 - except:
8510 - writemsg("Something strange happened to our hardlink locks.\n")
8511 -
8512 - def add_hardlock_file_to_cleanup(self):
8513 - #mypath = self.normpath(path)
8514 - if os.path.isdir(self.lockdir) and os.path.isfile(self.myhardlock):
8515 - self.hardlock_paths[self.lockdir]=self.myhardlock
8516 -
8517 - def remove_hardlock_file_from_cleanup(self):
8518 - if self.lockdir in self.hardlock_paths:
8519 - del self.hardlock_paths[self.lockdir]
8520 - print self.hardlock_paths
8521 -
8522 - def hardlock_name(self, path):
8523 - mypath=path+"/.hardlock-"+os.uname()[1]+"-"+str(os.getpid())
8524 - newpath = os.path.normpath(mypath)
8525 - if len(newpath) > 1:
8526 - if newpath[1] == "/":
8527 - newpath = "/"+newpath.lstrip("/")
8528 - return newpath
8529 -
8530 - def hardlink_is_mine(self,link,lock):
8531 - import stat
8532 - try:
8533 - myhls = os.stat(link)
8534 - mylfs = os.stat(lock)
8535 - except SystemExit, e:
8536 - raise
8537 - except:
8538 - myhls = None
8539 - mylfs = None
8540 -
8541 - if myhls:
8542 - if myhls[stat.ST_NLINK] == 2:
8543 - return True
8544 - if mylfs:
8545 - if mylfs[stat.ST_INO] == myhls[stat.ST_INO]:
8546 - return True
8547 - return False
8548 -
8549 - def hardlink_active(lock):
8550 - if not os.path.exists(lock):
8551 - return False
8552 -
8553 - def clean_my_hardlocks(self):
8554 - try:
8555 - for x in self.hardlock_paths.keys():
8556 - self.hardlock_cleanup(x)
8557 - except AttributeError:
8558 - pass
8559 -
8560 - def hardlock_cleanup(self,path):
8561 - mypid = str(os.getpid())
8562 - myhost = os.uname()[1]
8563 - mydl = os.listdir(path)
8564 - results = []
8565 - mycount = 0
8566 -
8567 - mylist = {}
8568 - for x in mydl:
8569 - filepath=path+"/"+x
8570 - if os.path.isfile(filepath):
8571 - parts = filepath.split(".hardlock-")
8572 - if len(parts) == 2:
8573 - filename = parts[0]
8574 - hostpid = parts[1].split("-")
8575 - host = "-".join(hostpid[:-1])
8576 - pid = hostpid[-1]
8577 - if filename not in mylist:
8578 - mylist[filename] = {}
8579 -
8580 - if host not in mylist[filename]:
8581 - mylist[filename][host] = []
8582 - mylist[filename][host].append(pid)
8583 - mycount += 1
8584 - else:
8585 - mylist[filename][host].append(pid)
8586 - mycount += 1
8587 -
8588 -
8589 - results.append("Found %(count)s locks" % {"count":mycount})
8590 - for x in mylist.keys():
8591 - if myhost in mylist[x]:
8592 - mylockname = self.hardlock_name(x)
8593 - if self.hardlink_is_mine(mylockname, self.lockfile) or \
8594 - not os.path.exists(self.lockfile):
8595 - for y in mylist[x].keys():
8596 - for z in mylist[x][y]:
8597 - filename = x+".hardlock-"+y+"-"+z
8598 - if filename == mylockname:
8599 - self.hard_unlock()
8600 - continue
8601 - try:
8602 - # We're sweeping through, unlinking everyone's locks.
8603 - os.unlink(filename)
8604 - results.append("Unlinked: " + filename)
8605 - except SystemExit, e:
8606 - raise
8607 - except Exception,e:
8608 - pass
8609 - try:
8610 - os.unlink(x)
8611 - results.append("Unlinked: " + x)
8612 - os.unlink(mylockname)
8613 - results.append("Unlinked: " + mylockname)
8614 - except SystemExit, e:
8615 - raise
8616 - except Exception,e:
8617 - pass
8618 - else:
8619 - try:
8620 - os.unlink(mylockname)
8621 - results.append("Unlinked: " + mylockname)
8622 - except SystemExit, e:
8623 - raise
8624 - except Exception,e:
8625 - pass
8626 - return results
8627 -
8628 -if __name__ == "__main__":
8629 -
8630 - def lock_work():
8631 - print
8632 - for i in range(1,6):
8633 - print i,time.time()
8634 - time.sleep(1)
8635 - print
8636 - def normpath(mypath):
8637 - newpath = os.path.normpath(mypath)
8638 - if len(newpath) > 1:
8639 - if newpath[1] == "/":
8640 - newpath = "/"+newpath.lstrip("/")
8641 - return newpath
8642 -
8643 - print "Lock 5 starting"
8644 - import time
8645 - Lock1=LockDir("/tmp/lock_path")
8646 - Lock1.write_lock()
8647 - print "Lock1 write lock"
8648 -
8649 - lock_work()
8650 -
8651 - Lock1.unlock()
8652 - print "Lock1 unlock"
8653 -
8654 - Lock1.read_lock()
8655 - print "Lock1 read lock"
8656 -
8657 - lock_work()
8658 -
8659 - Lock1.unlock()
8660 - print "Lock1 unlock"
8661 -
8662 - Lock1.read_lock()
8663 - print "Lock1 read lock"
8664 -
8665 - Lock1.write_lock()
8666 - print "Lock1 write lock"
8667 -
8668 - lock_work()
8669 -
8670 - Lock1.unlock()
8671 - print "Lock1 unlock"
8672 -
8673 - Lock1.read_lock()
8674 - print "Lock1 read lock"
8675 -
8676 - lock_work()
8677 -
8678 - Lock1.unlock()
8679 - print "Lock1 unlock"
8680 -
8681 -#Lock1.write_lock()
8682 -#time.sleep(2)
8683 -#Lock1.unlock()
8684 - ##Lock1.write_lock()
8685 - #time.sleep(2)
8686 - #Lock1.unlock()
8687 diff --git a/modules/catalyst_support.py b/modules/catalyst_support.py
8688 deleted file mode 100644
8689 index 316dfa3..0000000
8690 --- a/modules/catalyst_support.py
8691 +++ /dev/null
8692 @@ -1,718 +0,0 @@
8693 -
8694 -import sys,string,os,types,re,signal,traceback,time
8695 -#import md5,sha
8696 -selinux_capable = False
8697 -#userpriv_capable = (os.getuid() == 0)
8698 -#fakeroot_capable = False
8699 -BASH_BINARY = "/bin/bash"
8700 -
8701 -try:
8702 - import resource
8703 - max_fd_limit=resource.getrlimit(RLIMIT_NOFILE)
8704 -except SystemExit, e:
8705 - raise
8706 -except:
8707 - # hokay, no resource module.
8708 - max_fd_limit=256
8709 -
8710 -# pids this process knows of.
8711 -spawned_pids = []
8712 -
8713 -try:
8714 - import urllib
8715 -except SystemExit, e:
8716 - raise
8717 -
8718 -def cleanup(pids,block_exceptions=True):
8719 - """function to go through and reap the list of pids passed to it"""
8720 - global spawned_pids
8721 - if type(pids) == int:
8722 - pids = [pids]
8723 - for x in pids:
8724 - try:
8725 - os.kill(x,signal.SIGTERM)
8726 - if os.waitpid(x,os.WNOHANG)[1] == 0:
8727 - # feisty bugger, still alive.
8728 - os.kill(x,signal.SIGKILL)
8729 - os.waitpid(x,0)
8730 -
8731 - except OSError, oe:
8732 - if block_exceptions:
8733 - pass
8734 - if oe.errno not in (10,3):
8735 - raise oe
8736 - except SystemExit:
8737 - raise
8738 - except Exception:
8739 - if block_exceptions:
8740 - pass
8741 - try: spawned_pids.remove(x)
8742 - except IndexError: pass
8743 -
8744 -
8745 -
8746 -# a function to turn a string of non-printable characters into a string of
8747 -# hex characters
8748 -def hexify(str):
8749 - hexStr = string.hexdigits
8750 - r = ''
8751 - for ch in str:
8752 - i = ord(ch)
8753 - r = r + hexStr[(i >> 4) & 0xF] + hexStr[i & 0xF]
8754 - return r
8755 -# hexify()
8756 -
8757 -def generate_contents(file,contents_function="auto",verbose=False):
8758 - try:
8759 - _ = contents_function
8760 - if _ == 'auto' and file.endswith('.iso'):
8761 - _ = 'isoinfo-l'
8762 - if (_ in ['tar-tv','auto']):
8763 - if file.endswith('.tgz') or file.endswith('.tar.gz'):
8764 - _ = 'tar-tvz'
8765 - elif file.endswith('.tbz2') or file.endswith('.tar.bz2'):
8766 - _ = 'tar-tvj'
8767 - elif file.endswith('.tar'):
8768 - _ = 'tar-tv'
8769 -
8770 - if _ == 'auto':
8771 - warn('File %r has unknown type for automatic detection.' % (file, ))
8772 - return None
8773 - else:
8774 - contents_function = _
8775 - _ = contents_map[contents_function]
8776 - return _[0](file,_[1],verbose)
8777 - except:
8778 - raise CatalystError,\
8779 - "Error generating contents, is appropriate utility (%s) installed on your system?" \
8780 - % (contents_function, )
8781 -
8782 -def calc_contents(file,cmd,verbose):
8783 - args={ 'file': file }
8784 - cmd=cmd % dict(args)
8785 - a=os.popen(cmd)
8786 - mylines=a.readlines()
8787 - a.close()
8788 - result="".join(mylines)
8789 - if verbose:
8790 - print result
8791 - return result
8792 -
8793 -# This has map must be defined after the function calc_content
8794 -# It is possible to call different functions from this but they must be defined
8795 -# before hash_map
8796 -# Key,function,cmd
8797 -contents_map={
8798 - # 'find' is disabled because it requires the source path, which is not
8799 - # always available
8800 - #"find" :[calc_contents,"find %(path)s"],
8801 - "tar-tv":[calc_contents,"tar tvf %(file)s"],
8802 - "tar-tvz":[calc_contents,"tar tvzf %(file)s"],
8803 - "tar-tvj":[calc_contents,"tar -I lbzip2 -tvf %(file)s"],
8804 - "isoinfo-l":[calc_contents,"isoinfo -l -i %(file)s"],
8805 - # isoinfo-f should be a last resort only
8806 - "isoinfo-f":[calc_contents,"isoinfo -f -i %(file)s"],
8807 -}
8808 -
8809 -def generate_hash(file,hash_function="crc32",verbose=False):
8810 - try:
8811 - return hash_map[hash_function][0](file,hash_map[hash_function][1],hash_map[hash_function][2],\
8812 - hash_map[hash_function][3],verbose)
8813 - except:
8814 - raise CatalystError,"Error generating hash, is appropriate utility installed on your system?"
8815 -
8816 -def calc_hash(file,cmd,cmd_args,id_string="MD5",verbose=False):
8817 - a=os.popen(cmd+" "+cmd_args+" "+file)
8818 - mylines=a.readlines()
8819 - a.close()
8820 - mylines=mylines[0].split()
8821 - result=mylines[0]
8822 - if verbose:
8823 - print id_string+" (%s) = %s" % (file, result)
8824 - return result
8825 -
8826 -def calc_hash2(file,cmd,cmd_args,id_string="MD5",verbose=False):
8827 - a=os.popen(cmd+" "+cmd_args+" "+file)
8828 - header=a.readline()
8829 - mylines=a.readline().split()
8830 - hash=mylines[0]
8831 - short_file=os.path.split(mylines[1])[1]
8832 - a.close()
8833 - result=header+hash+" "+short_file+"\n"
8834 - if verbose:
8835 - print header+" (%s) = %s" % (short_file, result)
8836 - return result
8837 -
8838 -# This has map must be defined after the function calc_hash
8839 -# It is possible to call different functions from this but they must be defined
8840 -# before hash_map
8841 -# Key,function,cmd,cmd_args,Print string
8842 -hash_map={
8843 - "adler32":[calc_hash2,"shash","-a ADLER32","ADLER32"],\
8844 - "crc32":[calc_hash2,"shash","-a CRC32","CRC32"],\
8845 - "crc32b":[calc_hash2,"shash","-a CRC32B","CRC32B"],\
8846 - "gost":[calc_hash2,"shash","-a GOST","GOST"],\
8847 - "haval128":[calc_hash2,"shash","-a HAVAL128","HAVAL128"],\
8848 - "haval160":[calc_hash2,"shash","-a HAVAL160","HAVAL160"],\
8849 - "haval192":[calc_hash2,"shash","-a HAVAL192","HAVAL192"],\
8850 - "haval224":[calc_hash2,"shash","-a HAVAL224","HAVAL224"],\
8851 - "haval256":[calc_hash2,"shash","-a HAVAL256","HAVAL256"],\
8852 - "md2":[calc_hash2,"shash","-a MD2","MD2"],\
8853 - "md4":[calc_hash2,"shash","-a MD4","MD4"],\
8854 - "md5":[calc_hash2,"shash","-a MD5","MD5"],\
8855 - "ripemd128":[calc_hash2,"shash","-a RIPEMD128","RIPEMD128"],\
8856 - "ripemd160":[calc_hash2,"shash","-a RIPEMD160","RIPEMD160"],\
8857 - "ripemd256":[calc_hash2,"shash","-a RIPEMD256","RIPEMD256"],\
8858 - "ripemd320":[calc_hash2,"shash","-a RIPEMD320","RIPEMD320"],\
8859 - "sha1":[calc_hash2,"shash","-a SHA1","SHA1"],\
8860 - "sha224":[calc_hash2,"shash","-a SHA224","SHA224"],\
8861 - "sha256":[calc_hash2,"shash","-a SHA256","SHA256"],\
8862 - "sha384":[calc_hash2,"shash","-a SHA384","SHA384"],\
8863 - "sha512":[calc_hash2,"shash","-a SHA512","SHA512"],\
8864 - "snefru128":[calc_hash2,"shash","-a SNEFRU128","SNEFRU128"],\
8865 - "snefru256":[calc_hash2,"shash","-a SNEFRU256","SNEFRU256"],\
8866 - "tiger":[calc_hash2,"shash","-a TIGER","TIGER"],\
8867 - "tiger128":[calc_hash2,"shash","-a TIGER128","TIGER128"],\
8868 - "tiger160":[calc_hash2,"shash","-a TIGER160","TIGER160"],\
8869 - "whirlpool":[calc_hash2,"shash","-a WHIRLPOOL","WHIRLPOOL"],\
8870 - }
8871 -
8872 -def read_from_clst(file):
8873 - line = ''
8874 - myline = ''
8875 - try:
8876 - myf=open(file,"r")
8877 - except:
8878 - return -1
8879 - #raise CatalystError, "Could not open file "+file
8880 - for line in myf.readlines():
8881 - #line = string.replace(line, "\n", "") # drop newline
8882 - myline = myline + line
8883 - myf.close()
8884 - return myline
8885 -# read_from_clst
8886 -
8887 -# these should never be touched
8888 -required_build_targets=["generic_target","generic_stage_target"]
8889 -
8890 -# new build types should be added here
8891 -valid_build_targets=["stage1_target","stage2_target","stage3_target","stage4_target","grp_target",
8892 - "livecd_stage1_target","livecd_stage2_target","embedded_target",
8893 - "tinderbox_target","snapshot_target","netboot_target","netboot2_target"]
8894 -
8895 -required_config_file_values=["storedir","sharedir","distdir","portdir"]
8896 -valid_config_file_values=required_config_file_values[:]
8897 -valid_config_file_values.append("PKGCACHE")
8898 -valid_config_file_values.append("KERNCACHE")
8899 -valid_config_file_values.append("CCACHE")
8900 -valid_config_file_values.append("DISTCC")
8901 -valid_config_file_values.append("ICECREAM")
8902 -valid_config_file_values.append("ENVSCRIPT")
8903 -valid_config_file_values.append("AUTORESUME")
8904 -valid_config_file_values.append("FETCH")
8905 -valid_config_file_values.append("CLEAR_AUTORESUME")
8906 -valid_config_file_values.append("options")
8907 -valid_config_file_values.append("DEBUG")
8908 -valid_config_file_values.append("VERBOSE")
8909 -valid_config_file_values.append("PURGE")
8910 -valid_config_file_values.append("PURGEONLY")
8911 -valid_config_file_values.append("SNAPCACHE")
8912 -valid_config_file_values.append("snapshot_cache")
8913 -valid_config_file_values.append("hash_function")
8914 -valid_config_file_values.append("digests")
8915 -valid_config_file_values.append("contents")
8916 -valid_config_file_values.append("SEEDCACHE")
8917 -
8918 -verbosity=1
8919 -
8920 -def list_bashify(mylist):
8921 - if type(mylist)==types.StringType:
8922 - mypack=[mylist]
8923 - else:
8924 - mypack=mylist[:]
8925 - for x in range(0,len(mypack)):
8926 - # surround args with quotes for passing to bash,
8927 - # allows things like "<" to remain intact
8928 - mypack[x]="'"+mypack[x]+"'"
8929 - mypack=string.join(mypack)
8930 - return mypack
8931 -
8932 -def list_to_string(mylist):
8933 - if type(mylist)==types.StringType:
8934 - mypack=[mylist]
8935 - else:
8936 - mypack=mylist[:]
8937 - for x in range(0,len(mypack)):
8938 - # surround args with quotes for passing to bash,
8939 - # allows things like "<" to remain intact
8940 - mypack[x]=mypack[x]
8941 - mypack=string.join(mypack)
8942 - return mypack
8943 -
8944 -class CatalystError(Exception):
8945 - def __init__(self, message):
8946 - if message:
8947 - (type,value)=sys.exc_info()[:2]
8948 - if value!=None:
8949 - print
8950 - print traceback.print_exc(file=sys.stdout)
8951 - print
8952 - print "!!! catalyst: "+message
8953 - print
8954 -
8955 -class LockInUse(Exception):
8956 - def __init__(self, message):
8957 - if message:
8958 - #(type,value)=sys.exc_info()[:2]
8959 - #if value!=None:
8960 - #print
8961 - #kprint traceback.print_exc(file=sys.stdout)
8962 - print
8963 - print "!!! catalyst lock file in use: "+message
8964 - print
8965 -
8966 -def die(msg=None):
8967 - warn(msg)
8968 - sys.exit(1)
8969 -
8970 -def warn(msg):
8971 - print "!!! catalyst: "+msg
8972 -
8973 -def find_binary(myc):
8974 - """look through the environmental path for an executable file named whatever myc is"""
8975 - # this sucks. badly.
8976 - p=os.getenv("PATH")
8977 - if p == None:
8978 - return None
8979 - for x in p.split(":"):
8980 - #if it exists, and is executable
8981 - if os.path.exists("%s/%s" % (x,myc)) and os.stat("%s/%s" % (x,myc))[0] & 0x0248:
8982 - return "%s/%s" % (x,myc)
8983 - return None
8984 -
8985 -def spawn_bash(mycommand,env={},debug=False,opt_name=None,**keywords):
8986 - """spawn mycommand as an arguement to bash"""
8987 - args=[BASH_BINARY]
8988 - if not opt_name:
8989 - opt_name=mycommand.split()[0]
8990 - if "BASH_ENV" not in env:
8991 - env["BASH_ENV"] = "/etc/spork/is/not/valid/profile.env"
8992 - if debug:
8993 - args.append("-x")
8994 - args.append("-c")
8995 - args.append(mycommand)
8996 - return spawn(args,env=env,opt_name=opt_name,**keywords)
8997 -
8998 -#def spawn_get_output(mycommand,spawn_type=spawn,raw_exit_code=False,emulate_gso=True, \
8999 -# collect_fds=[1],fd_pipes=None,**keywords):
9000 -
9001 -def spawn_get_output(mycommand,raw_exit_code=False,emulate_gso=True, \
9002 - collect_fds=[1],fd_pipes=None,**keywords):
9003 - """call spawn, collecting the output to fd's specified in collect_fds list
9004 - emulate_gso is a compatability hack to emulate commands.getstatusoutput's return, minus the
9005 - requirement it always be a bash call (spawn_type controls the actual spawn call), and minus the
9006 - 'lets let log only stdin and let stderr slide by'.
9007 -
9008 - emulate_gso was deprecated from the day it was added, so convert your code over.
9009 - spawn_type is the passed in function to call- typically spawn_bash, spawn, spawn_sandbox, or spawn_fakeroot"""
9010 - global selinux_capable
9011 - pr,pw=os.pipe()
9012 -
9013 - #if type(spawn_type) not in [types.FunctionType, types.MethodType]:
9014 - # s="spawn_type must be passed a function, not",type(spawn_type),spawn_type
9015 - # raise Exception,s
9016 -
9017 - if fd_pipes==None:
9018 - fd_pipes={}
9019 - fd_pipes[0] = 0
9020 -
9021 - for x in collect_fds:
9022 - fd_pipes[x] = pw
9023 - keywords["returnpid"]=True
9024 -
9025 - mypid=spawn_bash(mycommand,fd_pipes=fd_pipes,**keywords)
9026 - os.close(pw)
9027 - if type(mypid) != types.ListType:
9028 - os.close(pr)
9029 - return [mypid, "%s: No such file or directory" % mycommand.split()[0]]
9030 -
9031 - fd=os.fdopen(pr,"r")
9032 - mydata=fd.readlines()
9033 - fd.close()
9034 - if emulate_gso:
9035 - mydata=string.join(mydata)
9036 - if len(mydata) and mydata[-1] == "\n":
9037 - mydata=mydata[:-1]
9038 - retval=os.waitpid(mypid[0],0)[1]
9039 - cleanup(mypid)
9040 - if raw_exit_code:
9041 - return [retval,mydata]
9042 - retval=process_exit_code(retval)
9043 - return [retval, mydata]
9044 -
9045 -# base spawn function
9046 -def spawn(mycommand,env={},raw_exit_code=False,opt_name=None,fd_pipes=None,returnpid=False,\
9047 - uid=None,gid=None,groups=None,umask=None,logfile=None,path_lookup=True,\
9048 - selinux_context=None, raise_signals=False, func_call=False):
9049 - """base fork/execve function.
9050 - mycommand is the desired command- if you need a command to execute in a bash/sandbox/fakeroot
9051 - environment, use the appropriate spawn call. This is a straight fork/exec code path.
9052 - Can either have a tuple, or a string passed in. If uid/gid/groups/umask specified, it changes
9053 - the forked process to said value. If path_lookup is on, a non-absolute command will be converted
9054 - to an absolute command, otherwise it returns None.
9055 -
9056 - selinux_context is the desired context, dependant on selinux being available.
9057 - opt_name controls the name the processor goes by.
9058 - fd_pipes controls which file descriptor numbers are left open in the forked process- it's a dict of
9059 - current fd's raw fd #, desired #.
9060 -
9061 - func_call is a boolean for specifying to execute a python function- use spawn_func instead.
9062 - raise_signals is questionable. Basically throw an exception if signal'd. No exception is thrown
9063 - if raw_input is on.
9064 -
9065 - logfile overloads the specified fd's to write to a tee process which logs to logfile
9066 - returnpid returns the relevant pids (a list, including the logging process if logfile is on).
9067 -
9068 - non-returnpid calls to spawn will block till the process has exited, returning the exitcode/signal
9069 - raw_exit_code controls whether the actual waitpid result is returned, or intrepretted."""
9070 -
9071 - myc=''
9072 - if not func_call:
9073 - if type(mycommand)==types.StringType:
9074 - mycommand=mycommand.split()
9075 - myc = mycommand[0]
9076 - if not os.access(myc, os.X_OK):
9077 - if not path_lookup:
9078 - return None
9079 - myc = find_binary(myc)
9080 - if myc == None:
9081 - return None
9082 - mypid=[]
9083 - if logfile:
9084 - pr,pw=os.pipe()
9085 - mypid.extend(spawn(('tee','-i','-a',logfile),returnpid=True,fd_pipes={0:pr,1:1,2:2}))
9086 - retval=os.waitpid(mypid[-1],os.WNOHANG)[1]
9087 - if retval != 0:
9088 - # he's dead jim.
9089 - if raw_exit_code:
9090 - return retval
9091 - return process_exit_code(retval)
9092 -
9093 - if fd_pipes == None:
9094 - fd_pipes={}
9095 - fd_pipes[0] = 0
9096 - fd_pipes[1]=pw
9097 - fd_pipes[2]=pw
9098 -
9099 - if not opt_name:
9100 - opt_name = mycommand[0]
9101 - myargs=[opt_name]
9102 - myargs.extend(mycommand[1:])
9103 - global spawned_pids
9104 - mypid.append(os.fork())
9105 - if mypid[-1] != 0:
9106 - #log the bugger.
9107 - spawned_pids.extend(mypid)
9108 -
9109 - if mypid[-1] == 0:
9110 - if func_call:
9111 - spawned_pids = []
9112 -
9113 - # this may look ugly, but basically it moves file descriptors around to ensure no
9114 - # handles that are needed are accidentally closed during the final dup2 calls.
9115 - trg_fd=[]
9116 - if type(fd_pipes)==types.DictType:
9117 - src_fd=[]
9118 - k=fd_pipes.keys()
9119 - k.sort()
9120 -
9121 - #build list of which fds will be where, and where they are at currently
9122 - for x in k:
9123 - trg_fd.append(x)
9124 - src_fd.append(fd_pipes[x])
9125 -
9126 - # run through said list dup'ing descriptors so that they won't be waxed
9127 - # by other dup calls.
9128 - for x in range(0,len(trg_fd)):
9129 - if trg_fd[x] == src_fd[x]:
9130 - continue
9131 - if trg_fd[x] in src_fd[x+1:]:
9132 - new=os.dup2(trg_fd[x],max(src_fd) + 1)
9133 - os.close(trg_fd[x])
9134 - try:
9135 - while True:
9136 - src_fd[s.index(trg_fd[x])]=new
9137 - except SystemExit, e:
9138 - raise
9139 - except:
9140 - pass
9141 -
9142 - # transfer the fds to their final pre-exec position.
9143 - for x in range(0,len(trg_fd)):
9144 - if trg_fd[x] != src_fd[x]:
9145 - os.dup2(src_fd[x], trg_fd[x])
9146 - else:
9147 - trg_fd=[0,1,2]
9148 -
9149 - # wax all open descriptors that weren't requested be left open.
9150 - for x in range(0,max_fd_limit):
9151 - if x not in trg_fd:
9152 - try:
9153 - os.close(x)
9154 - except SystemExit, e:
9155 - raise
9156 - except:
9157 - pass
9158 -
9159 - # note this order must be preserved- can't change gid/groups if you change uid first.
9160 - if selinux_capable and selinux_context:
9161 - import selinux
9162 - selinux.setexec(selinux_context)
9163 - if gid:
9164 - os.setgid(gid)
9165 - if groups:
9166 - os.setgroups(groups)
9167 - if uid:
9168 - os.setuid(uid)
9169 - if umask:
9170 - os.umask(umask)
9171 - else:
9172 - os.umask(022)
9173 -
9174 - try:
9175 - #print "execing", myc, myargs
9176 - if func_call:
9177 - # either use a passed in func for interpretting the results, or return if no exception.
9178 - # note the passed in list, and dict are expanded.
9179 - if len(mycommand) == 4:
9180 - os._exit(mycommand[3](mycommand[0](*mycommand[1],**mycommand[2])))
9181 - try:
9182 - mycommand[0](*mycommand[1],**mycommand[2])
9183 - except Exception,e:
9184 - print "caught exception",e," in forked func",mycommand[0]
9185 - sys.exit(0)
9186 -
9187 - #os.execvp(myc,myargs)
9188 - os.execve(myc,myargs,env)
9189 - except SystemExit, e:
9190 - raise
9191 - except Exception, e:
9192 - if not func_call:
9193 - raise str(e)+":\n "+myc+" "+string.join(myargs)
9194 - print "func call failed"
9195 -
9196 - # If the execve fails, we need to report it, and exit
9197 - # *carefully* --- report error here
9198 - os._exit(1)
9199 - sys.exit(1)
9200 - return # should never get reached
9201 -
9202 - # if we were logging, kill the pipes.
9203 - if logfile:
9204 - os.close(pr)
9205 - os.close(pw)
9206 -
9207 - if returnpid:
9208 - return mypid
9209 -
9210 - # loop through pids (typically one, unless logging), either waiting on their death, or waxing them
9211 - # if the main pid (mycommand) returned badly.
9212 - while len(mypid):
9213 - retval=os.waitpid(mypid[-1],0)[1]
9214 - if retval != 0:
9215 - cleanup(mypid[0:-1],block_exceptions=False)
9216 - # at this point we've killed all other kid pids generated via this call.
9217 - # return now.
9218 - if raw_exit_code:
9219 - return retval
9220 - return process_exit_code(retval,throw_signals=raise_signals)
9221 - else:
9222 - mypid.pop(-1)
9223 - cleanup(mypid)
9224 - return 0
9225 -
9226 -def cmd(mycmd,myexc="",env={}):
9227 - try:
9228 - sys.stdout.flush()
9229 - retval=spawn_bash(mycmd,env)
9230 - if retval != 0:
9231 - raise CatalystError,myexc
9232 - except:
9233 - raise
9234 -
9235 -def process_exit_code(retval,throw_signals=False):
9236 - """process a waitpid returned exit code, returning exit code if it exit'd, or the
9237 - signal if it died from signalling
9238 - if throw_signals is on, it raises a SystemExit if the process was signaled.
9239 - This is intended for usage with threads, although at the moment you can't signal individual
9240 - threads in python, only the master thread, so it's a questionable option."""
9241 - if (retval & 0xff)==0:
9242 - return retval >> 8 # return exit code
9243 - else:
9244 - if throw_signals:
9245 - #use systemexit, since portage is stupid about exception catching.
9246 - raise SystemExit()
9247 - return (retval & 0xff) << 8 # interrupted by signal
9248 -
9249 -def file_locate(settings,filelist,expand=1):
9250 - #if expand=1, non-absolute paths will be accepted and
9251 - # expanded to os.getcwd()+"/"+localpath if file exists
9252 - for myfile in filelist:
9253 - if myfile not in settings:
9254 - #filenames such as cdtar are optional, so we don't assume the variable is defined.
9255 - pass
9256 - else:
9257 - if len(settings[myfile])==0:
9258 - raise CatalystError, "File variable \""+myfile+"\" has a length of zero (not specified.)"
9259 - if settings[myfile][0]=="/":
9260 - if not os.path.exists(settings[myfile]):
9261 - raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]
9262 - elif expand and os.path.exists(os.getcwd()+"/"+settings[myfile]):
9263 - settings[myfile]=os.getcwd()+"/"+settings[myfile]
9264 - else:
9265 - raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]+" (2nd try)"
9266 -"""
9267 -Spec file format:
9268 -
9269 -The spec file format is a very simple and easy-to-use format for storing data. Here's an example
9270 -file:
9271 -
9272 -item1: value1
9273 -item2: foo bar oni
9274 -item3:
9275 - meep
9276 - bark
9277 - gleep moop
9278 -
9279 -This file would be interpreted as defining three items: item1, item2 and item3. item1 would contain
9280 -the string value "value1". Item2 would contain an ordered list [ "foo", "bar", "oni" ]. item3
9281 -would contain an ordered list as well: [ "meep", "bark", "gleep", "moop" ]. It's important to note
9282 -that the order of multiple-value items is preserved, but the order that the items themselves are
9283 -defined are not preserved. In other words, "foo", "bar", "oni" ordering is preserved but "item1"
9284 -"item2" "item3" ordering is not, as the item strings are stored in a dictionary (hash).
9285 -"""
9286 -
9287 -def parse_makeconf(mylines):
9288 - mymakeconf={}
9289 - pos=0
9290 - pat=re.compile("([0-9a-zA-Z_]*)=(.*)")
9291 - while pos<len(mylines):
9292 - if len(mylines[pos])<=1:
9293 - #skip blanks
9294 - pos += 1
9295 - continue
9296 - if mylines[pos][0] in ["#"," ","\t"]:
9297 - #skip indented lines, comments
9298 - pos += 1
9299 - continue
9300 - else:
9301 - myline=mylines[pos]
9302 - mobj=pat.match(myline)
9303 - pos += 1
9304 - if mobj.group(2):
9305 - clean_string = re.sub(r"\"",r"",mobj.group(2))
9306 - mymakeconf[mobj.group(1)]=clean_string
9307 - return mymakeconf
9308 -
9309 -def read_makeconf(mymakeconffile):
9310 - if os.path.exists(mymakeconffile):
9311 - try:
9312 - try:
9313 - import snakeoil.fileutils
9314 - return snakeoil.fileutils.read_bash_dict(mymakeconffile, sourcing_command="source")
9315 - except ImportError:
9316 - try:
9317 - import portage.util
9318 - return portage.util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
9319 - except:
9320 - try:
9321 - import portage_util
9322 - return portage_util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
9323 - except ImportError:
9324 - myf=open(mymakeconffile,"r")
9325 - mylines=myf.readlines()
9326 - myf.close()
9327 - return parse_makeconf(mylines)
9328 - except:
9329 - raise CatalystError, "Could not parse make.conf file "+mymakeconffile
9330 - else:
9331 - makeconf={}
9332 - return makeconf
9333 -
9334 -def msg(mymsg,verblevel=1):
9335 - if verbosity>=verblevel:
9336 - print mymsg
9337 -
9338 -def pathcompare(path1,path2):
9339 - # Change double slashes to slash
9340 - path1 = re.sub(r"//",r"/",path1)
9341 - path2 = re.sub(r"//",r"/",path2)
9342 - # Removing ending slash
9343 - path1 = re.sub("/$","",path1)
9344 - path2 = re.sub("/$","",path2)
9345 -
9346 - if path1 == path2:
9347 - return 1
9348 - return 0
9349 -
9350 -def ismount(path):
9351 - "enhanced to handle bind mounts"
9352 - if os.path.ismount(path):
9353 - return 1
9354 - a=os.popen("mount")
9355 - mylines=a.readlines()
9356 - a.close()
9357 - for line in mylines:
9358 - mysplit=line.split()
9359 - if pathcompare(path,mysplit[2]):
9360 - return 1
9361 - return 0
9362 -
9363 -def addl_arg_parse(myspec,addlargs,requiredspec,validspec):
9364 - "helper function to help targets parse additional arguments"
9365 - global valid_config_file_values
9366 -
9367 - messages = []
9368 - for x in addlargs.keys():
9369 - if x not in validspec and x not in valid_config_file_values and x not in requiredspec:
9370 - messages.append("Argument \""+x+"\" not recognized.")
9371 - else:
9372 - myspec[x]=addlargs[x]
9373 -
9374 - for x in requiredspec:
9375 - if x not in myspec:
9376 - messages.append("Required argument \""+x+"\" not specified.")
9377 -
9378 - if messages:
9379 - raise CatalystError, '\n\tAlso: '.join(messages)
9380 -
9381 -def touch(myfile):
9382 - try:
9383 - myf=open(myfile,"w")
9384 - myf.close()
9385 - except IOError:
9386 - raise CatalystError, "Could not touch "+myfile+"."
9387 -
9388 -def countdown(secs=5, doing="Starting"):
9389 - if secs:
9390 - print ">>> Waiting",secs,"seconds before starting..."
9391 - print ">>> (Control-C to abort)...\n"+doing+" in: ",
9392 - ticks=range(secs)
9393 - ticks.reverse()
9394 - for sec in ticks:
9395 - sys.stdout.write(str(sec+1)+" ")
9396 - sys.stdout.flush()
9397 - time.sleep(1)
9398 - print
9399 -
9400 -def normpath(mypath):
9401 - TrailingSlash=False
9402 - if mypath[-1] == "/":
9403 - TrailingSlash=True
9404 - newpath = os.path.normpath(mypath)
9405 - if len(newpath) > 1:
9406 - if newpath[:2] == "//":
9407 - newpath = newpath[1:]
9408 - if TrailingSlash:
9409 - newpath=newpath+'/'
9410 - return newpath
9411 diff --git a/modules/embedded_target.py b/modules/embedded_target.py
9412 deleted file mode 100644
9413 index f38ea00..0000000
9414 --- a/modules/embedded_target.py
9415 +++ /dev/null
9416 @@ -1,51 +0,0 @@
9417 -"""
9418 -Enbedded target, similar to the stage2 target, builds upon a stage2 tarball.
9419 -
9420 -A stage2 tarball is unpacked, but instead
9421 -of building a stage3, it emerges @system into another directory
9422 -inside the stage2 system. This way, we do not have to emerge GCC/portage
9423 -into the staged system.
9424 -It may sound complicated but basically it runs
9425 -ROOT=/tmp/submerge emerge --something foo bar .
9426 -"""
9427 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
9428 -
9429 -import os,string,imp,types,shutil
9430 -from catalyst_support import *
9431 -from generic_stage_target import *
9432 -from stat import *
9433 -
9434 -class embedded_target(generic_stage_target):
9435 - """
9436 - Builder class for embedded target
9437 - """
9438 - def __init__(self,spec,addlargs):
9439 - self.required_values=[]
9440 - self.valid_values=[]
9441 - self.valid_values.extend(["embedded/empty","embedded/rm","embedded/unmerge","embedded/fs-prepare","embedded/fs-finish","embedded/mergeroot","embedded/packages","embedded/fs-type","embedded/runscript","boot/kernel","embedded/linuxrc"])
9442 - self.valid_values.extend(["embedded/use"])
9443 - if "embedded/fs-type" in addlargs:
9444 - self.valid_values.append("embedded/fs-ops")
9445 -
9446 - generic_stage_target.__init__(self,spec,addlargs)
9447 - self.set_build_kernel_vars(addlargs)
9448 -
9449 - def set_action_sequence(self):
9450 - self.settings["action_sequence"]=["dir_setup","unpack","unpack_snapshot",\
9451 - "config_profile_link","setup_confdir",\
9452 - "portage_overlay","bind","chroot_setup",\
9453 - "setup_environment","build_kernel","build_packages",\
9454 - "bootloader","root_overlay","fsscript","unmerge",\
9455 - "unbind","remove","empty","clean","capture","clear_autoresume"]
9456 -
9457 - def set_stage_path(self):
9458 - self.settings["stage_path"]=normpath(self.settings["chroot_path"]+"/tmp/mergeroot")
9459 - print "embedded stage path is "+self.settings["stage_path"]
9460 -
9461 - def set_root_path(self):
9462 - self.settings["root_path"]=normpath("/tmp/mergeroot")
9463 - print "embedded root path is "+self.settings["root_path"]
9464 -
9465 -def register(foo):
9466 - foo.update({"embedded":embedded_target})
9467 - return foo
9468 diff --git a/modules/generic_stage_target.py b/modules/generic_stage_target.py
9469 deleted file mode 100644
9470 index a5b52b0..0000000
9471 --- a/modules/generic_stage_target.py
9472 +++ /dev/null
9473 @@ -1,1740 +0,0 @@
9474 -import os,string,imp,types,shutil
9475 -from catalyst_support import *
9476 -from generic_target import *
9477 -from stat import *
9478 -import catalyst_lock
9479 -
9480 -
9481 -PORT_LOGDIR_CLEAN = \
9482 - 'find "${PORT_LOGDIR}" -type f ! -name "summary.log*" -mtime +30 -delete'
9483 -
9484 -TARGET_MOUNTS_DEFAULTS = {
9485 - "ccache": "/var/tmp/ccache",
9486 - "dev": "/dev",
9487 - "devpts": "/dev/pts",
9488 - "distdir": "/usr/portage/distfiles",
9489 - "icecream": "/usr/lib/icecc/bin",
9490 - "kerncache": "/tmp/kerncache",
9491 - "packagedir": "/usr/portage/packages",
9492 - "portdir": "/usr/portage",
9493 - "port_tmpdir": "/var/tmp/portage",
9494 - "port_logdir": "/var/log/portage",
9495 - "proc": "/proc",
9496 - "shm": "/dev/shm",
9497 - }
9498 -
9499 -SOURCE_MOUNTS_DEFAULTS = {
9500 - "dev": "/dev",
9501 - "devpts": "/dev/pts",
9502 - "distdir": "/usr/portage/distfiles",
9503 - "portdir": "/usr/portage",
9504 - "port_tmpdir": "tmpfs",
9505 - "proc": "/proc",
9506 - "shm": "shmfs",
9507 - }
9508 -
9509 -
9510 -class generic_stage_target(generic_target):
9511 - """
9512 - This class does all of the chroot setup, copying of files, etc. It is
9513 - the driver class for pretty much everything that Catalyst does.
9514 - """
9515 - def __init__(self,myspec,addlargs):
9516 - self.required_values.extend(["version_stamp","target","subarch",\
9517 - "rel_type","profile","snapshot","source_subpath"])
9518 -
9519 - self.valid_values.extend(["version_stamp","target","subarch",\
9520 - "rel_type","profile","snapshot","source_subpath","portage_confdir",\
9521 - "cflags","cxxflags","ldflags","cbuild","hostuse","portage_overlay",\
9522 - "distcc_hosts","makeopts","pkgcache_path","kerncache_path"])
9523 -
9524 - self.set_valid_build_kernel_vars(addlargs)
9525 - generic_target.__init__(self,myspec,addlargs)
9526 -
9527 - """
9528 - The semantics of subarchmap and machinemap changed a bit in 2.0.3 to
9529 - work better with vapier's CBUILD stuff. I've removed the "monolithic"
9530 - machinemap from this file and split up its contents amongst the
9531 - various arch/foo.py files.
9532 -
9533 - When register() is called on each module in the arch/ dir, it now
9534 - returns a tuple instead of acting on the subarchmap dict that is
9535 - passed to it. The tuple contains the values that were previously
9536 - added to subarchmap as well as a new list of CHOSTs that go along
9537 - with that arch. This allows us to build machinemap on the fly based
9538 - on the keys in subarchmap and the values of the 2nd list returned
9539 - (tmpmachinemap).
9540 -
9541 - Also, after talking with vapier. I have a slightly better idea of what
9542 - certain variables are used for and what they should be set to. Neither
9543 - 'buildarch' or 'hostarch' are used directly, so their value doesn't
9544 - really matter. They are just compared to determine if we are
9545 - cross-compiling. Because of this, they are just set to the name of the
9546 - module in arch/ that the subarch is part of to make things simpler.
9547 - The entire build process is still based off of 'subarch' like it was
9548 - previously. -agaffney
9549 - """
9550 -
9551 - self.archmap = {}
9552 - self.subarchmap = {}
9553 - machinemap = {}
9554 - for x in [x[:-3] for x in os.listdir(self.settings["sharedir"]+\
9555 - "/arch/") if x.endswith(".py")]:
9556 - try:
9557 - fh=open(self.settings["sharedir"]+"/arch/"+x+".py")
9558 - """
9559 - This next line loads the plugin as a module and assigns it to
9560 - archmap[x]
9561 - """
9562 - self.archmap[x]=imp.load_module(x,fh,"arch/"+x+\
9563 - ".py",(".py","r",imp.PY_SOURCE))
9564 - """
9565 - This next line registers all the subarches supported in the
9566 - plugin
9567 - """
9568 - tmpsubarchmap, tmpmachinemap = self.archmap[x].register()
9569 - self.subarchmap.update(tmpsubarchmap)
9570 - for machine in tmpmachinemap:
9571 - machinemap[machine] = x
9572 - for subarch in tmpsubarchmap:
9573 - machinemap[subarch] = x
9574 - fh.close()
9575 - except IOError:
9576 - """
9577 - This message should probably change a bit, since everything in
9578 - the dir should load just fine. If it doesn't, it's probably a
9579 - syntax error in the module
9580 - """
9581 - msg("Can't find/load "+x+".py plugin in "+\
9582 - self.settings["sharedir"]+"/arch/")
9583 -
9584 - if "chost" in self.settings:
9585 - hostmachine = self.settings["chost"].split("-")[0]
9586 - if hostmachine not in machinemap:
9587 - raise CatalystError, "Unknown host machine type "+hostmachine
9588 - self.settings["hostarch"]=machinemap[hostmachine]
9589 - else:
9590 - hostmachine = self.settings["subarch"]
9591 - if hostmachine in machinemap:
9592 - hostmachine = machinemap[hostmachine]
9593 - self.settings["hostarch"]=hostmachine
9594 - if "cbuild" in self.settings:
9595 - buildmachine = self.settings["cbuild"].split("-")[0]
9596 - else:
9597 - buildmachine = os.uname()[4]
9598 - if buildmachine not in machinemap:
9599 - raise CatalystError, "Unknown build machine type "+buildmachine
9600 - self.settings["buildarch"]=machinemap[buildmachine]
9601 - self.settings["crosscompile"]=(self.settings["hostarch"]!=\
9602 - self.settings["buildarch"])
9603 -
9604 - """ Call arch constructor, pass our settings """
9605 - try:
9606 - self.arch=self.subarchmap[self.settings["subarch"]](self.settings)
9607 - except KeyError:
9608 - print "Invalid subarch: "+self.settings["subarch"]
9609 - print "Choose one of the following:",
9610 - for x in self.subarchmap:
9611 - print x,
9612 - print
9613 - sys.exit(2)
9614 -
9615 - print "Using target:",self.settings["target"]
9616 - """ Print a nice informational message """
9617 - if self.settings["buildarch"]==self.settings["hostarch"]:
9618 - print "Building natively for",self.settings["hostarch"]
9619 - elif self.settings["crosscompile"]:
9620 - print "Cross-compiling on",self.settings["buildarch"],\
9621 - "for different machine type",self.settings["hostarch"]
9622 - else:
9623 - print "Building on",self.settings["buildarch"],\
9624 - "for alternate personality type",self.settings["hostarch"]
9625 -
9626 - """ This must be set first as other set_ options depend on this """
9627 - self.set_spec_prefix()
9628 -
9629 - """ Define all of our core variables """
9630 - self.set_target_profile()
9631 - self.set_target_subpath()
9632 - self.set_source_subpath()
9633 -
9634 - """ Set paths """
9635 - self.set_snapshot_path()
9636 - self.set_root_path()
9637 - self.set_source_path()
9638 - self.set_snapcache_path()
9639 - self.set_chroot_path()
9640 - self.set_autoresume_path()
9641 - self.set_dest_path()
9642 - self.set_stage_path()
9643 - self.set_target_path()
9644 -
9645 - self.set_controller_file()
9646 - self.set_action_sequence()
9647 - self.set_use()
9648 - self.set_cleanables()
9649 - self.set_iso_volume_id()
9650 - self.set_build_kernel_vars()
9651 - self.set_fsscript()
9652 - self.set_install_mask()
9653 - self.set_rcadd()
9654 - self.set_rcdel()
9655 - self.set_cdtar()
9656 - self.set_fstype()
9657 - self.set_fsops()
9658 - self.set_iso()
9659 - self.set_packages()
9660 - self.set_rm()
9661 - self.set_linuxrc()
9662 - self.set_busybox_config()
9663 - self.set_overlay()
9664 - self.set_portage_overlay()
9665 - self.set_root_overlay()
9666 -
9667 - """
9668 - This next line checks to make sure that the specified variables exist
9669 - on disk.
9670 - """
9671 - #pdb.set_trace()
9672 - file_locate(self.settings,["source_path","snapshot_path","distdir"],\
9673 - expand=0)
9674 - """ If we are using portage_confdir, check that as well. """
9675 - if "portage_confdir" in self.settings:
9676 - file_locate(self.settings,["portage_confdir"],expand=0)
9677 -
9678 - """ Setup our mount points """
9679 - # initialize our target mounts.
9680 - self.target_mounts = TARGET_MOUNTS_DEFAULTS.copy()
9681 -
9682 - self.mounts = ["proc", "dev", "portdir", "distdir", "port_tmpdir"]
9683 - # initialize our source mounts
9684 - self.mountmap = SOURCE_MOUNTS_DEFAULTS.copy()
9685 - # update them from settings
9686 - self.mountmap["distdir"] = self.settings["distdir"]
9687 - self.mountmap["portdir"] = normpath("/".join([
9688 - self.settings["snapshot_cache_path"],
9689 - self.settings["repo_name"],
9690 - ]))
9691 - if "SNAPCACHE" not in self.settings:
9692 - self.mounts.remove("portdir")
9693 - #self.mountmap["portdir"] = None
9694 - if os.uname()[0] == "Linux":
9695 - self.mounts.append("devpts")
9696 - self.mounts.append("shm")
9697 -
9698 - self.set_mounts()
9699 -
9700 - """
9701 - Configure any user specified options (either in catalyst.conf or on
9702 - the command line).
9703 - """
9704 - if "PKGCACHE" in self.settings:
9705 - self.set_pkgcache_path()
9706 - print "Location of the package cache is "+\
9707 - self.settings["pkgcache_path"]
9708 - self.mounts.append("packagedir")
9709 - self.mountmap["packagedir"] = self.settings["pkgcache_path"]
9710 -
9711 - if "KERNCACHE" in self.settings:
9712 - self.set_kerncache_path()
9713 - print "Location of the kerncache is "+\
9714 - self.settings["kerncache_path"]
9715 - self.mounts.append("kerncache")
9716 - self.mountmap["kerncache"] = self.settings["kerncache_path"]
9717 -
9718 - if "CCACHE" in self.settings:
9719 - if "CCACHE_DIR" in os.environ:
9720 - ccdir=os.environ["CCACHE_DIR"]
9721 - del os.environ["CCACHE_DIR"]
9722 - else:
9723 - ccdir="/root/.ccache"
9724 - if not os.path.isdir(ccdir):
9725 - raise CatalystError,\
9726 - "Compiler cache support can't be enabled (can't find "+\
9727 - ccdir+")"
9728 - self.mounts.append("ccache")
9729 - self.mountmap["ccache"] = ccdir
9730 - """ for the chroot: """
9731 - self.env["CCACHE_DIR"] = self.target_mounts["ccache"]
9732 -
9733 - if "ICECREAM" in self.settings:
9734 - self.mounts.append("icecream")
9735 - self.mountmap["icecream"] = self.settings["icecream"]
9736 - self.env["PATH"] = self.target_mounts["icecream"] + ":" + \
9737 - self.env["PATH"]
9738 -
9739 - if "port_logdir" in self.settings:
9740 - self.mounts.append("port_logdir")
9741 - self.mountmap["port_logdir"] = self.settings["port_logdir"]
9742 - self.env["PORT_LOGDIR"] = self.settings["port_logdir"]
9743 - self.env["PORT_LOGDIR_CLEAN"] = PORT_LOGDIR_CLEAN
9744 -
9745 - def override_cbuild(self):
9746 - if "CBUILD" in self.makeconf:
9747 - self.settings["CBUILD"]=self.makeconf["CBUILD"]
9748 -
9749 - def override_chost(self):
9750 - if "CHOST" in self.makeconf:
9751 - self.settings["CHOST"]=self.makeconf["CHOST"]
9752 -
9753 - def override_cflags(self):
9754 - if "CFLAGS" in self.makeconf:
9755 - self.settings["CFLAGS"]=self.makeconf["CFLAGS"]
9756 -
9757 - def override_cxxflags(self):
9758 - if "CXXFLAGS" in self.makeconf:
9759 - self.settings["CXXFLAGS"]=self.makeconf["CXXFLAGS"]
9760 -
9761 - def override_ldflags(self):
9762 - if "LDFLAGS" in self.makeconf:
9763 - self.settings["LDFLAGS"]=self.makeconf["LDFLAGS"]
9764 -
9765 - def set_install_mask(self):
9766 - if "install_mask" in self.settings:
9767 - if type(self.settings["install_mask"])!=types.StringType:
9768 - self.settings["install_mask"]=\
9769 - string.join(self.settings["install_mask"])
9770 -
9771 - def set_spec_prefix(self):
9772 - self.settings["spec_prefix"]=self.settings["target"]
9773 -
9774 - def set_target_profile(self):
9775 - self.settings["target_profile"]=self.settings["profile"]
9776 -
9777 - def set_target_subpath(self):
9778 - self.settings["target_subpath"]=self.settings["rel_type"]+"/"+\
9779 - self.settings["target"]+"-"+self.settings["subarch"]+"-"+\
9780 - self.settings["version_stamp"]
9781 -
9782 - def set_source_subpath(self):
9783 - if type(self.settings["source_subpath"])!=types.StringType:
9784 - raise CatalystError,\
9785 - "source_subpath should have been a string. Perhaps you have something wrong in your spec file?"
9786 -
9787 - def set_pkgcache_path(self):
9788 - if "pkgcache_path" in self.settings:
9789 - if type(self.settings["pkgcache_path"])!=types.StringType:
9790 - self.settings["pkgcache_path"]=\
9791 - normpath(string.join(self.settings["pkgcache_path"]))
9792 - else:
9793 - self.settings["pkgcache_path"]=\
9794 - normpath(self.settings["storedir"]+"/packages/"+\
9795 - self.settings["target_subpath"]+"/")
9796 -
9797 - def set_kerncache_path(self):
9798 - if "kerncache_path" in self.settings:
9799 - if type(self.settings["kerncache_path"])!=types.StringType:
9800 - self.settings["kerncache_path"]=\
9801 - normpath(string.join(self.settings["kerncache_path"]))
9802 - else:
9803 - self.settings["kerncache_path"]=normpath(self.settings["storedir"]+\
9804 - "/kerncache/"+self.settings["target_subpath"]+"/")
9805 -
9806 - def set_target_path(self):
9807 - self.settings["target_path"] = normpath(self.settings["storedir"] +
9808 - "/builds/" + self.settings["target_subpath"].rstrip('/') +
9809 - ".tar.bz2")
9810 - if "AUTORESUME" in self.settings\
9811 - and os.path.exists(self.settings["autoresume_path"]+\
9812 - "setup_target_path"):
9813 - print \
9814 - "Resume point detected, skipping target path setup operation..."
9815 - else:
9816 - """ First clean up any existing target stuff """
9817 - # XXX WTF are we removing the old tarball before we start building the
9818 - # XXX new one? If the build fails, you don't want to be left with
9819 - # XXX nothing at all
9820 -# if os.path.isfile(self.settings["target_path"]):
9821 -# cmd("rm -f "+self.settings["target_path"],\
9822 -# "Could not remove existing file: "\
9823 -# +self.settings["target_path"],env=self.env)
9824 - touch(self.settings["autoresume_path"]+"setup_target_path")
9825 -
9826 - if not os.path.exists(self.settings["storedir"]+"/builds/"):
9827 - os.makedirs(self.settings["storedir"]+"/builds/")
9828 -
9829 - def set_fsscript(self):
9830 - if self.settings["spec_prefix"]+"/fsscript" in self.settings:
9831 - self.settings["fsscript"]=\
9832 - self.settings[self.settings["spec_prefix"]+"/fsscript"]
9833 - del self.settings[self.settings["spec_prefix"]+"/fsscript"]
9834 -
9835 - def set_rcadd(self):
9836 - if self.settings["spec_prefix"]+"/rcadd" in self.settings:
9837 - self.settings["rcadd"]=\
9838 - self.settings[self.settings["spec_prefix"]+"/rcadd"]
9839 - del self.settings[self.settings["spec_prefix"]+"/rcadd"]
9840 -
9841 - def set_rcdel(self):
9842 - if self.settings["spec_prefix"]+"/rcdel" in self.settings:
9843 - self.settings["rcdel"]=\
9844 - self.settings[self.settings["spec_prefix"]+"/rcdel"]
9845 - del self.settings[self.settings["spec_prefix"]+"/rcdel"]
9846 -
9847 - def set_cdtar(self):
9848 - if self.settings["spec_prefix"]+"/cdtar" in self.settings:
9849 - self.settings["cdtar"]=\
9850 - normpath(self.settings[self.settings["spec_prefix"]+"/cdtar"])
9851 - del self.settings[self.settings["spec_prefix"]+"/cdtar"]
9852 -
9853 - def set_iso(self):
9854 - if self.settings["spec_prefix"]+"/iso" in self.settings:
9855 - if self.settings[self.settings["spec_prefix"]+"/iso"].startswith('/'):
9856 - self.settings["iso"]=\
9857 - normpath(self.settings[self.settings["spec_prefix"]+"/iso"])
9858 - else:
9859 - # This automatically prepends the build dir to the ISO output path
9860 - # if it doesn't start with a /
9861 - self.settings["iso"] = normpath(self.settings["storedir"] + \
9862 - "/builds/" + self.settings["rel_type"] + "/" + \
9863 - self.settings[self.settings["spec_prefix"]+"/iso"])
9864 - del self.settings[self.settings["spec_prefix"]+"/iso"]
9865 -
9866 - def set_fstype(self):
9867 - if self.settings["spec_prefix"]+"/fstype" in self.settings:
9868 - self.settings["fstype"]=\
9869 - self.settings[self.settings["spec_prefix"]+"/fstype"]
9870 - del self.settings[self.settings["spec_prefix"]+"/fstype"]
9871 -
9872 - if "fstype" not in self.settings:
9873 - self.settings["fstype"]="normal"
9874 - for x in self.valid_values:
9875 - if x == self.settings["spec_prefix"]+"/fstype":
9876 - print "\n"+self.settings["spec_prefix"]+\
9877 - "/fstype is being set to the default of \"normal\"\n"
9878 -
9879 - def set_fsops(self):
9880 - if "fstype" in self.settings:
9881 - self.valid_values.append("fsops")
9882 - if self.settings["spec_prefix"]+"/fsops" in self.settings:
9883 - self.settings["fsops"]=\
9884 - self.settings[self.settings["spec_prefix"]+"/fsops"]
9885 - del self.settings[self.settings["spec_prefix"]+"/fsops"]
9886 -
9887 - def set_source_path(self):
9888 - if "SEEDCACHE" in self.settings\
9889 - and os.path.isdir(normpath(self.settings["storedir"]+"/tmp/"+\
9890 - self.settings["source_subpath"]+"/")):
9891 - self.settings["source_path"]=normpath(self.settings["storedir"]+\
9892 - "/tmp/"+self.settings["source_subpath"]+"/")
9893 - else:
9894 - self.settings["source_path"] = normpath(self.settings["storedir"] +
9895 - "/builds/" + self.settings["source_subpath"].rstrip("/") +
9896 - ".tar.bz2")
9897 - if os.path.isfile(self.settings["source_path"]):
9898 - # XXX: Is this even necessary if the previous check passes?
9899 - if os.path.exists(self.settings["source_path"]):
9900 - self.settings["source_path_hash"]=\
9901 - generate_hash(self.settings["source_path"],\
9902 - hash_function=self.settings["hash_function"],\
9903 - verbose=False)
9904 - print "Source path set to "+self.settings["source_path"]
9905 - if os.path.isdir(self.settings["source_path"]):
9906 - print "\tIf this is not desired, remove this directory or turn off"
9907 - print "\tseedcache in the options of catalyst.conf the source path"
9908 - print "\twill then be "+\
9909 - normpath(self.settings["storedir"] + "/builds/" +
9910 - self.settings["source_subpath"].rstrip("/") + ".tar.bz2\n")
9911 -
9912 - def set_dest_path(self):
9913 - if "root_path" in self.settings:
9914 - self.settings["destpath"]=normpath(self.settings["chroot_path"]+\
9915 - self.settings["root_path"])
9916 - else:
9917 - self.settings["destpath"]=normpath(self.settings["chroot_path"])
9918 -
9919 - def set_cleanables(self):
9920 - self.settings["cleanables"]=["/etc/resolv.conf","/var/tmp/*","/tmp/*",\
9921 - "/root/*", self.settings["portdir"]]
9922 -
9923 - def set_snapshot_path(self):
9924 - self.settings["snapshot_path"] = normpath(self.settings["storedir"] +
9925 - "/snapshots/" + self.settings["snapshot_name"] +
9926 - self.settings["snapshot"].rstrip("/") + ".tar.xz")
9927 -
9928 - if os.path.exists(self.settings["snapshot_path"]):
9929 - self.settings["snapshot_path_hash"]=\
9930 - generate_hash(self.settings["snapshot_path"],\
9931 - hash_function=self.settings["hash_function"],verbose=False)
9932 - else:
9933 - self.settings["snapshot_path"]=normpath(self.settings["storedir"]+\
9934 - "/snapshots/" + self.settings["snapshot_name"] +
9935 - self.settings["snapshot"].rstrip("/") + ".tar.bz2")
9936 -
9937 - if os.path.exists(self.settings["snapshot_path"]):
9938 - self.settings["snapshot_path_hash"]=\
9939 - generate_hash(self.settings["snapshot_path"],\
9940 - hash_function=self.settings["hash_function"],verbose=False)
9941 -
9942 - def set_snapcache_path(self):
9943 - if "SNAPCACHE" in self.settings:
9944 - self.settings["snapshot_cache_path"]=\
9945 - normpath(self.settings["snapshot_cache"]+"/"+\
9946 - self.settings["snapshot"])
9947 - self.snapcache_lock=\
9948 - catalyst_lock.LockDir(self.settings["snapshot_cache_path"])
9949 - print "Caching snapshot to "+self.settings["snapshot_cache_path"]
9950 -
9951 - def set_chroot_path(self):
9952 - """
9953 - NOTE: the trailing slash has been removed
9954 - Things *could* break if you don't use a proper join()
9955 - """
9956 - self.settings["chroot_path"]=normpath(self.settings["storedir"]+\
9957 - "/tmp/"+self.settings["target_subpath"])
9958 - self.chroot_lock=catalyst_lock.LockDir(self.settings["chroot_path"])
9959 -
9960 - def set_autoresume_path(self):
9961 - self.settings["autoresume_path"]=normpath(self.settings["storedir"]+\
9962 - "/tmp/"+self.settings["rel_type"]+"/"+".autoresume-"+\
9963 - self.settings["target"]+"-"+self.settings["subarch"]+"-"+\
9964 - self.settings["version_stamp"]+"/")
9965 - if "AUTORESUME" in self.settings:
9966 - print "The autoresume path is " + self.settings["autoresume_path"]
9967 - if not os.path.exists(self.settings["autoresume_path"]):
9968 - os.makedirs(self.settings["autoresume_path"],0755)
9969 -
9970 - def set_controller_file(self):
9971 - self.settings["controller_file"]=normpath(self.settings["sharedir"]+\
9972 - "/targets/"+self.settings["target"]+"/"+self.settings["target"]+\
9973 - "-controller.sh")
9974 -
9975 - def set_iso_volume_id(self):
9976 - if self.settings["spec_prefix"]+"/volid" in self.settings:
9977 - self.settings["iso_volume_id"]=\
9978 - self.settings[self.settings["spec_prefix"]+"/volid"]
9979 - if len(self.settings["iso_volume_id"])>32:
9980 - raise CatalystError,\
9981 - "ISO volume ID must not exceed 32 characters."
9982 - else:
9983 - self.settings["iso_volume_id"]="catalyst "+self.settings["snapshot"]
9984 -
9985 - def set_action_sequence(self):
9986 - """ Default action sequence for run method """
9987 - self.settings["action_sequence"]=["unpack","unpack_snapshot",\
9988 - "setup_confdir","portage_overlay",\
9989 - "base_dirs","bind","chroot_setup","setup_environment",\
9990 - "run_local","preclean","unbind","clean"]
9991 -# if "TARBALL" in self.settings or \
9992 -# "FETCH" not in self.settings:
9993 - if "FETCH" not in self.settings:
9994 - self.settings["action_sequence"].append("capture")
9995 - self.settings["action_sequence"].append("clear_autoresume")
9996 -
9997 - def set_use(self):
9998 - if self.settings["spec_prefix"]+"/use" in self.settings:
9999 - self.settings["use"]=\
10000 - self.settings[self.settings["spec_prefix"]+"/use"]
10001 - del self.settings[self.settings["spec_prefix"]+"/use"]
10002 - if "use" not in self.settings:
10003 - self.settings["use"]=""
10004 - if type(self.settings["use"])==types.StringType:
10005 - self.settings["use"]=self.settings["use"].split()
10006 -
10007 - # Force bindist when options ask for it
10008 - if "BINDIST" in self.settings:
10009 - self.settings["use"].append("bindist")
10010 -
10011 - def set_stage_path(self):
10012 - self.settings["stage_path"]=normpath(self.settings["chroot_path"])
10013 -
10014 - def set_mounts(self):
10015 - pass
10016 -
10017 - def set_packages(self):
10018 - pass
10019 -
10020 - def set_rm(self):
10021 - if self.settings["spec_prefix"]+"/rm" in self.settings:
10022 - if type(self.settings[self.settings["spec_prefix"]+\
10023 - "/rm"])==types.StringType:
10024 - self.settings[self.settings["spec_prefix"]+"/rm"]=\
10025 - self.settings[self.settings["spec_prefix"]+"/rm"].split()
10026 -
10027 - def set_linuxrc(self):
10028 - if self.settings["spec_prefix"]+"/linuxrc" in self.settings:
10029 - if type(self.settings[self.settings["spec_prefix"]+\
10030 - "/linuxrc"])==types.StringType:
10031 - self.settings["linuxrc"]=\
10032 - self.settings[self.settings["spec_prefix"]+"/linuxrc"]
10033 - del self.settings[self.settings["spec_prefix"]+"/linuxrc"]
10034 -
10035 - def set_busybox_config(self):
10036 - if self.settings["spec_prefix"]+"/busybox_config" in self.settings:
10037 - if type(self.settings[self.settings["spec_prefix"]+\
10038 - "/busybox_config"])==types.StringType:
10039 - self.settings["busybox_config"]=\
10040 - self.settings[self.settings["spec_prefix"]+"/busybox_config"]
10041 - del self.settings[self.settings["spec_prefix"]+"/busybox_config"]
10042 -
10043 - def set_portage_overlay(self):
10044 - if "portage_overlay" in self.settings:
10045 - if type(self.settings["portage_overlay"])==types.StringType:
10046 - self.settings["portage_overlay"]=\
10047 - self.settings["portage_overlay"].split()
10048 - print "portage_overlay directories are set to: \""+\
10049 - string.join(self.settings["portage_overlay"])+"\""
10050 -
10051 - def set_overlay(self):
10052 - if self.settings["spec_prefix"]+"/overlay" in self.settings:
10053 - if type(self.settings[self.settings["spec_prefix"]+\
10054 - "/overlay"])==types.StringType:
10055 - self.settings[self.settings["spec_prefix"]+"/overlay"]=\
10056 - self.settings[self.settings["spec_prefix"]+\
10057 - "/overlay"].split()
10058 -
10059 - def set_root_overlay(self):
10060 - if self.settings["spec_prefix"]+"/root_overlay" in self.settings:
10061 - if type(self.settings[self.settings["spec_prefix"]+\
10062 - "/root_overlay"])==types.StringType:
10063 - self.settings[self.settings["spec_prefix"]+"/root_overlay"]=\
10064 - self.settings[self.settings["spec_prefix"]+\
10065 - "/root_overlay"].split()
10066 -
10067 - def set_root_path(self):
10068 - """ ROOT= variable for emerges """
10069 - self.settings["root_path"]="/"
10070 -
10071 - def set_valid_build_kernel_vars(self,addlargs):
10072 - if "boot/kernel" in addlargs:
10073 - if type(addlargs["boot/kernel"])==types.StringType:
10074 - loopy=[addlargs["boot/kernel"]]
10075 - else:
10076 - loopy=addlargs["boot/kernel"]
10077 -
10078 - for x in loopy:
10079 - self.valid_values.append("boot/kernel/"+x+"/aliases")
10080 - self.valid_values.append("boot/kernel/"+x+"/config")
10081 - self.valid_values.append("boot/kernel/"+x+"/console")
10082 - self.valid_values.append("boot/kernel/"+x+"/extraversion")
10083 - self.valid_values.append("boot/kernel/"+x+"/gk_action")
10084 - self.valid_values.append("boot/kernel/"+x+"/gk_kernargs")
10085 - self.valid_values.append("boot/kernel/"+x+"/initramfs_overlay")
10086 - self.valid_values.append("boot/kernel/"+x+"/machine_type")
10087 - self.valid_values.append("boot/kernel/"+x+"/sources")
10088 - self.valid_values.append("boot/kernel/"+x+"/softlevel")
10089 - self.valid_values.append("boot/kernel/"+x+"/use")
10090 - self.valid_values.append("boot/kernel/"+x+"/packages")
10091 - if "boot/kernel/"+x+"/packages" in addlargs:
10092 - if type(addlargs["boot/kernel/"+x+\
10093 - "/packages"])==types.StringType:
10094 - addlargs["boot/kernel/"+x+"/packages"]=\
10095 - [addlargs["boot/kernel/"+x+"/packages"]]
10096 -
10097 - def set_build_kernel_vars(self):
10098 - if self.settings["spec_prefix"]+"/gk_mainargs" in self.settings:
10099 - self.settings["gk_mainargs"]=\
10100 - self.settings[self.settings["spec_prefix"]+"/gk_mainargs"]
10101 - del self.settings[self.settings["spec_prefix"]+"/gk_mainargs"]
10102 -
10103 - def kill_chroot_pids(self):
10104 - print "Checking for processes running in chroot and killing them."
10105 -
10106 - """
10107 - Force environment variables to be exported so script can see them
10108 - """
10109 - self.setup_environment()
10110 -
10111 - if os.path.exists(self.settings["sharedir"]+\
10112 - "/targets/support/kill-chroot-pids.sh"):
10113 - cmd("/bin/bash "+self.settings["sharedir"]+\
10114 - "/targets/support/kill-chroot-pids.sh",\
10115 - "kill-chroot-pids script failed.",env=self.env)
10116 -
10117 - def mount_safety_check(self):
10118 - """
10119 - Check and verify that none of our paths in mypath are mounted. We don't
10120 - want to clean up with things still mounted, and this allows us to check.
10121 - Returns 1 on ok, 0 on "something is still mounted" case.
10122 - """
10123 -
10124 - if not os.path.exists(self.settings["chroot_path"]):
10125 - return
10126 -
10127 - print "self.mounts =", self.mounts
10128 - for x in self.mounts:
10129 - target = normpath(self.settings["chroot_path"] + self.target_mounts[x])
10130 - print "mount_safety_check() x =", x, target
10131 - if not os.path.exists(target):
10132 - continue
10133 -
10134 - if ismount(target):
10135 - """ Something is still mounted "" """
10136 - try:
10137 - print target + " is still mounted; performing auto-bind-umount...",
10138 - """ Try to umount stuff ourselves """
10139 - self.unbind()
10140 - if ismount(target):
10141 - raise CatalystError, "Auto-unbind failed for " + target
10142 - else:
10143 - print "Auto-unbind successful..."
10144 - except CatalystError:
10145 - raise CatalystError, "Unable to auto-unbind " + target
10146 -
10147 - def unpack(self):
10148 - unpack=True
10149 -
10150 - clst_unpack_hash=read_from_clst(self.settings["autoresume_path"]+\
10151 - "unpack")
10152 -
10153 - if "SEEDCACHE" in self.settings:
10154 - if os.path.isdir(self.settings["source_path"]):
10155 - """ SEEDCACHE Is a directory, use rsync """
10156 - unpack_cmd="rsync -a --delete "+self.settings["source_path"]+\
10157 - " "+self.settings["chroot_path"]
10158 - display_msg="\nStarting rsync from "+\
10159 - self.settings["source_path"]+"\nto "+\
10160 - self.settings["chroot_path"]+\
10161 - " (This may take some time) ...\n"
10162 - error_msg="Rsync of "+self.settings["source_path"]+" to "+\
10163 - self.settings["chroot_path"]+" failed."
10164 - else:
10165 - """ SEEDCACHE is a not a directory, try untar'ing """
10166 - print "Referenced SEEDCACHE does not appear to be a directory, trying to untar..."
10167 - display_msg="\nStarting tar extract from "+\
10168 - self.settings["source_path"]+"\nto "+\
10169 - self.settings["chroot_path"]+\
10170 - " (This may take some time) ...\n"
10171 - if "bz2" == self.settings["chroot_path"][-3:]:
10172 - unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
10173 - self.settings["chroot_path"]
10174 - else:
10175 - unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
10176 - self.settings["chroot_path"]
10177 - error_msg="Tarball extraction of "+\
10178 - self.settings["source_path"]+" to "+\
10179 - self.settings["chroot_path"]+" failed."
10180 - else:
10181 - """ No SEEDCACHE, use tar """
10182 - display_msg="\nStarting tar extract from "+\
10183 - self.settings["source_path"]+"\nto "+\
10184 - self.settings["chroot_path"]+\
10185 - " (This may take some time) ...\n"
10186 - if "bz2" == self.settings["chroot_path"][-3:]:
10187 - unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
10188 - self.settings["chroot_path"]
10189 - else:
10190 - unpack_cmd="tar -I lbzip2 -xpf "+self.settings["source_path"]+" -C "+\
10191 - self.settings["chroot_path"]
10192 - error_msg="Tarball extraction of "+self.settings["source_path"]+\
10193 - " to "+self.settings["chroot_path"]+" failed."
10194 -
10195 - if "AUTORESUME" in self.settings:
10196 - if os.path.isdir(self.settings["source_path"]) \
10197 - and os.path.exists(self.settings["autoresume_path"]+"unpack"):
10198 - """ Autoresume is valid, SEEDCACHE is valid """
10199 - unpack=False
10200 - invalid_snapshot=False
10201 -
10202 - elif os.path.isfile(self.settings["source_path"]) \
10203 - and self.settings["source_path_hash"]==clst_unpack_hash:
10204 - """ Autoresume is valid, tarball is valid """
10205 - unpack=False
10206 - invalid_snapshot=True
10207 -
10208 - elif os.path.isdir(self.settings["source_path"]) \
10209 - and not os.path.exists(self.settings["autoresume_path"]+\
10210 - "unpack"):
10211 - """ Autoresume is invalid, SEEDCACHE """
10212 - unpack=True
10213 - invalid_snapshot=False
10214 -
10215 - elif os.path.isfile(self.settings["source_path"]) \
10216 - and self.settings["source_path_hash"]!=clst_unpack_hash:
10217 - """ Autoresume is invalid, tarball """
10218 - unpack=True
10219 - invalid_snapshot=True
10220 - else:
10221 - """ No autoresume, SEEDCACHE """
10222 - if "SEEDCACHE" in self.settings:
10223 - """ SEEDCACHE so let's run rsync and let it clean up """
10224 - if os.path.isdir(self.settings["source_path"]):
10225 - unpack=True
10226 - invalid_snapshot=False
10227 - elif os.path.isfile(self.settings["source_path"]):
10228 - """ Tarball so unpack and remove anything already there """
10229 - unpack=True
10230 - invalid_snapshot=True
10231 - """ No autoresume, no SEEDCACHE """
10232 - else:
10233 - """ Tarball so unpack and remove anything already there """
10234 - if os.path.isfile(self.settings["source_path"]):
10235 - unpack=True
10236 - invalid_snapshot=True
10237 - elif os.path.isdir(self.settings["source_path"]):
10238 - """ We should never reach this, so something is very wrong """
10239 - raise CatalystError,\
10240 - "source path is a dir but seedcache is not enabled"
10241 -
10242 - if unpack:
10243 - self.mount_safety_check()
10244 -
10245 - if invalid_snapshot:
10246 - if "AUTORESUME" in self.settings:
10247 - print "No Valid Resume point detected, cleaning up..."
10248 -
10249 - self.clear_autoresume()
10250 - self.clear_chroot()
10251 -
10252 - if not os.path.exists(self.settings["chroot_path"]):
10253 - os.makedirs(self.settings["chroot_path"])
10254 -
10255 - if not os.path.exists(self.settings["chroot_path"]+"/tmp"):
10256 - os.makedirs(self.settings["chroot_path"]+"/tmp",1777)
10257 -
10258 - if "PKGCACHE" in self.settings:
10259 - if not os.path.exists(self.settings["pkgcache_path"]):
10260 - os.makedirs(self.settings["pkgcache_path"],0755)
10261 -
10262 - if "KERNCACHE" in self.settings:
10263 - if not os.path.exists(self.settings["kerncache_path"]):
10264 - os.makedirs(self.settings["kerncache_path"],0755)
10265 -
10266 - print display_msg
10267 - cmd(unpack_cmd,error_msg,env=self.env)
10268 -
10269 - if "source_path_hash" in self.settings:
10270 - myf=open(self.settings["autoresume_path"]+"unpack","w")
10271 - myf.write(self.settings["source_path_hash"])
10272 - myf.close()
10273 - else:
10274 - touch(self.settings["autoresume_path"]+"unpack")
10275 - else:
10276 - print "Resume point detected, skipping unpack operation..."
10277 -
10278 - def unpack_snapshot(self):
10279 - unpack=True
10280 - snapshot_hash=read_from_clst(self.settings["autoresume_path"]+\
10281 - "unpack_portage")
10282 -
10283 - if "SNAPCACHE" in self.settings:
10284 - snapshot_cache_hash=\
10285 - read_from_clst(self.settings["snapshot_cache_path"]+\
10286 - "catalyst-hash")
10287 - destdir=self.settings["snapshot_cache_path"]
10288 - if "bz2" == self.settings["chroot_path"][-3:]:
10289 - unpack_cmd="tar -I lbzip2 -xpf "+self.settings["snapshot_path"]+" -C "+destdir
10290 - else:
10291 - unpack_cmd="tar xpf "+self.settings["snapshot_path"]+" -C "+destdir
10292 - unpack_errmsg="Error unpacking snapshot"
10293 - cleanup_msg="Cleaning up invalid snapshot cache at \n\t"+\
10294 - self.settings["snapshot_cache_path"]+\
10295 - " (This can take a long time)..."
10296 - cleanup_errmsg="Error removing existing snapshot cache directory."
10297 - self.snapshot_lock_object=self.snapcache_lock
10298 -
10299 - if self.settings["snapshot_path_hash"]==snapshot_cache_hash:
10300 - print "Valid snapshot cache, skipping unpack of portage tree..."
10301 - unpack=False
10302 - else:
10303 - destdir = normpath(self.settings["chroot_path"] + self.settings["portdir"])
10304 - cleanup_errmsg="Error removing existing snapshot directory."
10305 - cleanup_msg=\
10306 - "Cleaning up existing portage tree (This can take a long time)..."
10307 - if "bz2" == self.settings["chroot_path"][-3:]:
10308 - unpack_cmd="tar -I lbzip2 -xpf "+self.settings["snapshot_path"]+" -C "+\
10309 - self.settings["chroot_path"]+"/usr"
10310 - else:
10311 - unpack_cmd="tar xpf "+self.settings["snapshot_path"]+" -C "+\
10312 - self.settings["chroot_path"]+"/usr"
10313 - unpack_errmsg="Error unpacking snapshot"
10314 -
10315 - if "AUTORESUME" in self.settings \
10316 - and os.path.exists(self.settings["chroot_path"]+\
10317 - self.settings["portdir"]) \
10318 - and os.path.exists(self.settings["autoresume_path"]\
10319 - +"unpack_portage") \
10320 - and self.settings["snapshot_path_hash"] == snapshot_hash:
10321 - print \
10322 - "Valid Resume point detected, skipping unpack of portage tree..."
10323 - unpack=False
10324 -
10325 - if unpack:
10326 - if "SNAPCACHE" in self.settings:
10327 - self.snapshot_lock_object.write_lock()
10328 - if os.path.exists(destdir):
10329 - print cleanup_msg
10330 - cleanup_cmd="rm -rf "+destdir
10331 - cmd(cleanup_cmd,cleanup_errmsg,env=self.env)
10332 - if not os.path.exists(destdir):
10333 - os.makedirs(destdir,0755)
10334 -
10335 - print "Unpacking portage tree (This can take a long time) ..."
10336 - cmd(unpack_cmd,unpack_errmsg,env=self.env)
10337 -
10338 - if "SNAPCACHE" in self.settings:
10339 - myf=open(self.settings["snapshot_cache_path"]+"catalyst-hash","w")
10340 - myf.write(self.settings["snapshot_path_hash"])
10341 - myf.close()
10342 - else:
10343 - print "Setting snapshot autoresume point"
10344 - myf=open(self.settings["autoresume_path"]+"unpack_portage","w")
10345 - myf.write(self.settings["snapshot_path_hash"])
10346 - myf.close()
10347 -
10348 - if "SNAPCACHE" in self.settings:
10349 - self.snapshot_lock_object.unlock()
10350 -
10351 - def config_profile_link(self):
10352 - if "AUTORESUME" in self.settings \
10353 - and os.path.exists(self.settings["autoresume_path"]+\
10354 - "config_profile_link"):
10355 - print \
10356 - "Resume point detected, skipping config_profile_link operation..."
10357 - else:
10358 - # TODO: zmedico and I discussed making this a directory and pushing
10359 - # in a parent file, as well as other user-specified configuration.
10360 - print "Configuring profile link..."
10361 - cmd("rm -f "+self.settings["chroot_path"]+"/etc/portage/make.profile",\
10362 - "Error zapping profile link",env=self.env)
10363 - cmd("mkdir -p "+self.settings["chroot_path"]+"/etc/portage/")
10364 - cmd("ln -sf ../.." + self.settings["portdir"] + "/profiles/" + \
10365 - self.settings["target_profile"]+" "+\
10366 - self.settings["chroot_path"]+"/etc/portage/make.profile",\
10367 - "Error creating profile link",env=self.env)
10368 - touch(self.settings["autoresume_path"]+"config_profile_link")
10369 -
10370 - def setup_confdir(self):
10371 - if "AUTORESUME" in self.settings \
10372 - and os.path.exists(self.settings["autoresume_path"]+\
10373 - "setup_confdir"):
10374 - print "Resume point detected, skipping setup_confdir operation..."
10375 - else:
10376 - if "portage_confdir" in self.settings:
10377 - print "Configuring /etc/portage..."
10378 - cmd("rsync -a "+self.settings["portage_confdir"]+"/ "+\
10379 - self.settings["chroot_path"]+"/etc/portage/",\
10380 - "Error copying /etc/portage",env=self.env)
10381 - touch(self.settings["autoresume_path"]+"setup_confdir")
10382 -
10383 - def portage_overlay(self):
10384 - """ We copy the contents of our overlays to /usr/local/portage """
10385 - if "portage_overlay" in self.settings:
10386 - for x in self.settings["portage_overlay"]:
10387 - if os.path.exists(x):
10388 - print "Copying overlay dir " +x
10389 - cmd("mkdir -p "+self.settings["chroot_path"]+\
10390 - self.settings["local_overlay"],\
10391 - "Could not make portage_overlay dir",env=self.env)
10392 - cmd("cp -R "+x+"/* "+self.settings["chroot_path"]+\
10393 - self.settings["local_overlay"],\
10394 - "Could not copy portage_overlay",env=self.env)
10395 -
10396 - def root_overlay(self):
10397 - """ Copy over the root_overlay """
10398 - if self.settings["spec_prefix"]+"/root_overlay" in self.settings:
10399 - for x in self.settings[self.settings["spec_prefix"]+\
10400 - "/root_overlay"]:
10401 - if os.path.exists(x):
10402 - print "Copying root_overlay: "+x
10403 - cmd("rsync -a "+x+"/ "+\
10404 - self.settings["chroot_path"],\
10405 - self.settings["spec_prefix"]+"/root_overlay: "+x+\
10406 - " copy failed.",env=self.env)
10407 -
10408 - def base_dirs(self):
10409 - pass
10410 -
10411 - def bind(self):
10412 - for x in self.mounts:
10413 - #print "bind(); x =", x
10414 - target = normpath(self.settings["chroot_path"] + self.target_mounts[x])
10415 - if not os.path.exists(target):
10416 - os.makedirs(target, 0755)
10417 -
10418 - if not os.path.exists(self.mountmap[x]):
10419 - if self.mountmap[x] not in ["tmpfs", "shmfs"]:
10420 - os.makedirs(self.mountmap[x], 0755)
10421 -
10422 - src=self.mountmap[x]
10423 - #print "bind(); src =", src
10424 - if "SNAPCACHE" in self.settings and x == "portdir":
10425 - self.snapshot_lock_object.read_lock()
10426 - if os.uname()[0] == "FreeBSD":
10427 - if src == "/dev":
10428 - cmd = "mount -t devfs none " + target
10429 - retval=os.system(cmd)
10430 - else:
10431 - cmd = "mount_nullfs " + src + " " + target
10432 - retval=os.system(cmd)
10433 - else:
10434 - if src == "tmpfs":
10435 - if "var_tmpfs_portage" in self.settings:
10436 - cmd = "mount -t tmpfs -o size=" + \
10437 - self.settings["var_tmpfs_portage"] + "G " + \
10438 - src + " " + target
10439 - retval=os.system(cmd)
10440 - elif src == "shmfs":
10441 - cmd = "mount -t tmpfs -o noexec,nosuid,nodev shm " + target
10442 - retval=os.system(cmd)
10443 - else:
10444 - cmd = "mount --bind " + src + " " + target
10445 - #print "bind(); cmd =", cmd
10446 - retval=os.system(cmd)
10447 - if retval!=0:
10448 - self.unbind()
10449 - raise CatalystError,"Couldn't bind mount " + src
10450 -
10451 - def unbind(self):
10452 - ouch=0
10453 - mypath=self.settings["chroot_path"]
10454 - myrevmounts=self.mounts[:]
10455 - myrevmounts.reverse()
10456 - """ Unmount in reverse order for nested bind-mounts """
10457 - for x in myrevmounts:
10458 - target = normpath(mypath + self.target_mounts[x])
10459 - if not os.path.exists(target):
10460 - continue
10461 -
10462 - if not ismount(target):
10463 - continue
10464 -
10465 - retval=os.system("umount " + target)
10466 -
10467 - if retval!=0:
10468 - warn("First attempt to unmount: " + target + " failed.")
10469 - warn("Killing any pids still running in the chroot")
10470 -
10471 - self.kill_chroot_pids()
10472 -
10473 - retval2 = os.system("umount " + target)
10474 - if retval2!=0:
10475 - ouch=1
10476 - warn("Couldn't umount bind mount: " + target)
10477 -
10478 - if "SNAPCACHE" in self.settings and x == "/usr/portage":
10479 - try:
10480 - """
10481 - It's possible the snapshot lock object isn't created yet.
10482 - This is because mount safety check calls unbind before the
10483 - target is fully initialized
10484 - """
10485 - self.snapshot_lock_object.unlock()
10486 - except:
10487 - pass
10488 - if ouch:
10489 - """
10490 - if any bind mounts really failed, then we need to raise
10491 - this to potentially prevent an upcoming bash stage cleanup script
10492 - from wiping our bind mounts.
10493 - """
10494 - raise CatalystError,\
10495 - "Couldn't umount one or more bind-mounts; aborting for safety."
10496 -
10497 - def chroot_setup(self):
10498 - self.makeconf=read_makeconf(self.settings["chroot_path"]+\
10499 - "/etc/portage/make.conf")
10500 - self.override_cbuild()
10501 - self.override_chost()
10502 - self.override_cflags()
10503 - self.override_cxxflags()
10504 - self.override_ldflags()
10505 - if "AUTORESUME" in self.settings \
10506 - and os.path.exists(self.settings["autoresume_path"]+"chroot_setup"):
10507 - print "Resume point detected, skipping chroot_setup operation..."
10508 - else:
10509 - print "Setting up chroot..."
10510 -
10511 - #self.makeconf=read_makeconf(self.settings["chroot_path"]+"/etc/portage/make.conf")
10512 -
10513 - cmd("cp /etc/resolv.conf "+self.settings["chroot_path"]+"/etc",\
10514 - "Could not copy resolv.conf into place.",env=self.env)
10515 -
10516 - """ Copy over the envscript, if applicable """
10517 - if "ENVSCRIPT" in self.settings:
10518 - if not os.path.exists(self.settings["ENVSCRIPT"]):
10519 - raise CatalystError,\
10520 - "Can't find envscript "+self.settings["ENVSCRIPT"]
10521 -
10522 - print "\nWarning!!!!"
10523 - print "\tOverriding certain env variables may cause catastrophic failure."
10524 - print "\tIf your build fails look here first as the possible problem."
10525 - print "\tCatalyst assumes you know what you are doing when setting"
10526 - print "\t\tthese variables."
10527 - print "\tCatalyst Maintainers use VERY minimal envscripts if used at all"
10528 - print "\tYou have been warned\n"
10529 -
10530 - cmd("cp "+self.settings["ENVSCRIPT"]+" "+\
10531 - self.settings["chroot_path"]+"/tmp/envscript",\
10532 - "Could not copy envscript into place.",env=self.env)
10533 -
10534 - """
10535 - Copy over /etc/hosts from the host in case there are any
10536 - specialties in there
10537 - """
10538 - if os.path.exists(self.settings["chroot_path"]+"/etc/hosts"):
10539 - cmd("mv "+self.settings["chroot_path"]+"/etc/hosts "+\
10540 - self.settings["chroot_path"]+"/etc/hosts.catalyst",\
10541 - "Could not backup /etc/hosts",env=self.env)
10542 - cmd("cp /etc/hosts "+self.settings["chroot_path"]+"/etc/hosts",\
10543 - "Could not copy /etc/hosts",env=self.env)
10544 -
10545 - """ Modify and write out make.conf (for the chroot) """
10546 - cmd("rm -f "+self.settings["chroot_path"]+"/etc/portage/make.conf",\
10547 - "Could not remove "+self.settings["chroot_path"]+\
10548 - "/etc/portage/make.conf",env=self.env)
10549 - myf=open(self.settings["chroot_path"]+"/etc/portage/make.conf","w")
10550 - myf.write("# These settings were set by the catalyst build script that automatically\n# built this stage.\n")
10551 - myf.write("# Please consult /usr/share/portage/config/make.conf.example for a more\n# detailed example.\n")
10552 - if "CFLAGS" in self.settings:
10553 - myf.write('CFLAGS="'+self.settings["CFLAGS"]+'"\n')
10554 - if "CXXFLAGS" in self.settings:
10555 - if self.settings["CXXFLAGS"]!=self.settings["CFLAGS"]:
10556 - myf.write('CXXFLAGS="'+self.settings["CXXFLAGS"]+'"\n')
10557 - else:
10558 - myf.write('CXXFLAGS="${CFLAGS}"\n')
10559 - else:
10560 - myf.write('CXXFLAGS="${CFLAGS}"\n')
10561 -
10562 - if "LDFLAGS" in self.settings:
10563 - myf.write("# LDFLAGS is unsupported. USE AT YOUR OWN RISK!\n")
10564 - myf.write('LDFLAGS="'+self.settings["LDFLAGS"]+'"\n')
10565 - if "CBUILD" in self.settings:
10566 - myf.write("# This should not be changed unless you know exactly what you are doing. You\n# should probably be using a different stage, instead.\n")
10567 - myf.write('CBUILD="'+self.settings["CBUILD"]+'"\n')
10568 -
10569 - myf.write("# WARNING: Changing your CHOST is not something that should be done lightly.\n# Please consult http://www.gentoo.org/doc/en/change-chost.xml before changing.\n")
10570 - myf.write('CHOST="'+self.settings["CHOST"]+'"\n')
10571 -
10572 - """ Figure out what our USE vars are for building """
10573 - myusevars=[]
10574 - if "HOSTUSE" in self.settings:
10575 - myusevars.extend(self.settings["HOSTUSE"])
10576 -
10577 - if "use" in self.settings:
10578 - myusevars.extend(self.settings["use"])
10579 -
10580 - if myusevars:
10581 - myf.write("# These are the USE flags that were used in addition to what is provided by the\n# profile used for building.\n")
10582 - myusevars = sorted(set(myusevars))
10583 - myf.write('USE="'+string.join(myusevars)+'"\n')
10584 - if '-*' in myusevars:
10585 - print "\nWarning!!! "
10586 - print "\tThe use of -* in "+self.settings["spec_prefix"]+\
10587 - "/use will cause portage to ignore"
10588 - print "\tpackage.use in the profile and portage_confdir. You've been warned!"
10589 -
10590 - myf.write('PORTDIR="%s"\n' % self.settings['portdir'])
10591 - myf.write('DISTDIR="%s"\n' % self.settings['distdir'])
10592 - myf.write('PKGDIR="%s"\n' % self.settings['packagedir'])
10593 -
10594 - """ Setup the portage overlay """
10595 - if "portage_overlay" in self.settings:
10596 - myf.write('PORTDIR_OVERLAY="/usr/local/portage"\n')
10597 -
10598 - myf.close()
10599 - cmd("cp "+self.settings["chroot_path"]+"/etc/portage/make.conf "+\
10600 - self.settings["chroot_path"]+"/etc/portage/make.conf.catalyst",\
10601 - "Could not backup /etc/portage/make.conf",env=self.env)
10602 - touch(self.settings["autoresume_path"]+"chroot_setup")
10603 -
10604 - def fsscript(self):
10605 - if "AUTORESUME" in self.settings \
10606 - and os.path.exists(self.settings["autoresume_path"]+"fsscript"):
10607 - print "Resume point detected, skipping fsscript operation..."
10608 - else:
10609 - if "fsscript" in self.settings:
10610 - if os.path.exists(self.settings["controller_file"]):
10611 - cmd("/bin/bash "+self.settings["controller_file"]+\
10612 - " fsscript","fsscript script failed.",env=self.env)
10613 - touch(self.settings["autoresume_path"]+"fsscript")
10614 -
10615 - def rcupdate(self):
10616 - if "AUTORESUME" in self.settings \
10617 - and os.path.exists(self.settings["autoresume_path"]+"rcupdate"):
10618 - print "Resume point detected, skipping rcupdate operation..."
10619 - else:
10620 - if os.path.exists(self.settings["controller_file"]):
10621 - cmd("/bin/bash "+self.settings["controller_file"]+" rc-update",\
10622 - "rc-update script failed.",env=self.env)
10623 - touch(self.settings["autoresume_path"]+"rcupdate")
10624 -
10625 - def clean(self):
10626 - if "AUTORESUME" in self.settings \
10627 - and os.path.exists(self.settings["autoresume_path"]+"clean"):
10628 - print "Resume point detected, skipping clean operation..."
10629 - else:
10630 - for x in self.settings["cleanables"]:
10631 - print "Cleaning chroot: "+x+"... "
10632 - cmd("rm -rf "+self.settings["destpath"]+x,"Couldn't clean "+\
10633 - x,env=self.env)
10634 -
10635 - """ Put /etc/hosts back into place """
10636 - if os.path.exists(self.settings["chroot_path"]+"/etc/hosts.catalyst"):
10637 - cmd("mv -f "+self.settings["chroot_path"]+"/etc/hosts.catalyst "+\
10638 - self.settings["chroot_path"]+"/etc/hosts",\
10639 - "Could not replace /etc/hosts",env=self.env)
10640 -
10641 - """ Remove our overlay """
10642 - if os.path.exists(self.settings["chroot_path"] + self.settings["local_overlay"]):
10643 - cmd("rm -rf " + self.settings["chroot_path"] + self.settings["local_overlay"],
10644 - "Could not remove " + self.settings["local_overlay"], env=self.env)
10645 - cmd("sed -i '/^PORTDIR_OVERLAY/d' "+self.settings["chroot_path"]+\
10646 - "/etc/portage/make.conf",\
10647 - "Could not remove PORTDIR_OVERLAY from make.conf",env=self.env)
10648 -
10649 - """ Clean up old and obsoleted files in /etc """
10650 - if os.path.exists(self.settings["stage_path"]+"/etc"):
10651 - cmd("find "+self.settings["stage_path"]+\
10652 - "/etc -maxdepth 1 -name \"*-\" | xargs rm -f",\
10653 - "Could not remove stray files in /etc",env=self.env)
10654 -
10655 - if os.path.exists(self.settings["controller_file"]):
10656 - cmd("/bin/bash "+self.settings["controller_file"]+" clean",\
10657 - "clean script failed.",env=self.env)
10658 - touch(self.settings["autoresume_path"]+"clean")
10659 -
10660 - def empty(self):
10661 - if "AUTORESUME" in self.settings \
10662 - and os.path.exists(self.settings["autoresume_path"]+"empty"):
10663 - print "Resume point detected, skipping empty operation..."
10664 - else:
10665 - if self.settings["spec_prefix"]+"/empty" in self.settings:
10666 - if type(self.settings[self.settings["spec_prefix"]+\
10667 - "/empty"])==types.StringType:
10668 - self.settings[self.settings["spec_prefix"]+"/empty"]=\
10669 - self.settings[self.settings["spec_prefix"]+\
10670 - "/empty"].split()
10671 - for x in self.settings[self.settings["spec_prefix"]+"/empty"]:
10672 - myemp=self.settings["destpath"]+x
10673 - if not os.path.isdir(myemp) or os.path.islink(myemp):
10674 - print x,"not a directory or does not exist, skipping 'empty' operation."
10675 - continue
10676 - print "Emptying directory",x
10677 - """
10678 - stat the dir, delete the dir, recreate the dir and set
10679 - the proper perms and ownership
10680 - """
10681 - mystat=os.stat(myemp)
10682 - shutil.rmtree(myemp)
10683 - os.makedirs(myemp,0755)
10684 - os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
10685 - os.chmod(myemp,mystat[ST_MODE])
10686 - touch(self.settings["autoresume_path"]+"empty")
10687 -
10688 - def remove(self):
10689 - if "AUTORESUME" in self.settings \
10690 - and os.path.exists(self.settings["autoresume_path"]+"remove"):
10691 - print "Resume point detected, skipping remove operation..."
10692 - else:
10693 - if self.settings["spec_prefix"]+"/rm" in self.settings:
10694 - for x in self.settings[self.settings["spec_prefix"]+"/rm"]:
10695 - """
10696 - We're going to shell out for all these cleaning
10697 - operations, so we get easy glob handling.
10698 - """
10699 - print "livecd: removing "+x
10700 - os.system("rm -rf "+self.settings["chroot_path"]+x)
10701 - try:
10702 - if os.path.exists(self.settings["controller_file"]):
10703 - cmd("/bin/bash "+self.settings["controller_file"]+\
10704 - " clean","Clean failed.",env=self.env)
10705 - touch(self.settings["autoresume_path"]+"remove")
10706 - except:
10707 - self.unbind()
10708 - raise
10709 -
10710 - def preclean(self):
10711 - if "AUTORESUME" in self.settings \
10712 - and os.path.exists(self.settings["autoresume_path"]+"preclean"):
10713 - print "Resume point detected, skipping preclean operation..."
10714 - else:
10715 - try:
10716 - if os.path.exists(self.settings["controller_file"]):
10717 - cmd("/bin/bash "+self.settings["controller_file"]+\
10718 - " preclean","preclean script failed.",env=self.env)
10719 - touch(self.settings["autoresume_path"]+"preclean")
10720 -
10721 - except:
10722 - self.unbind()
10723 - raise CatalystError, "Build failed, could not execute preclean"
10724 -
10725 - def capture(self):
10726 - if "AUTORESUME" in self.settings \
10727 - and os.path.exists(self.settings["autoresume_path"]+"capture"):
10728 - print "Resume point detected, skipping capture operation..."
10729 - else:
10730 - """ Capture target in a tarball """
10731 - mypath=self.settings["target_path"].split("/")
10732 - """ Remove filename from path """
10733 - mypath=string.join(mypath[:-1],"/")
10734 -
10735 - """ Now make sure path exists """
10736 - if not os.path.exists(mypath):
10737 - os.makedirs(mypath)
10738 -
10739 - print "Creating stage tarball..."
10740 -
10741 - cmd("tar -I lbzip2 -cpf "+self.settings["target_path"]+" -C "+\
10742 - self.settings["stage_path"]+" .",\
10743 - "Couldn't create stage tarball",env=self.env)
10744 -
10745 - self.gen_contents_file(self.settings["target_path"])
10746 - self.gen_digest_file(self.settings["target_path"])
10747 -
10748 - touch(self.settings["autoresume_path"]+"capture")
10749 -
10750 - def run_local(self):
10751 - if "AUTORESUME" in self.settings \
10752 - and os.path.exists(self.settings["autoresume_path"]+"run_local"):
10753 - print "Resume point detected, skipping run_local operation..."
10754 - else:
10755 - try:
10756 - if os.path.exists(self.settings["controller_file"]):
10757 - cmd("/bin/bash "+self.settings["controller_file"]+" run",\
10758 - "run script failed.",env=self.env)
10759 - touch(self.settings["autoresume_path"]+"run_local")
10760 -
10761 - except CatalystError:
10762 - self.unbind()
10763 - raise CatalystError,"Stage build aborting due to error."
10764 -
10765 - def setup_environment(self):
10766 - """
10767 - Modify the current environment. This is an ugly hack that should be
10768 - fixed. We need this to use the os.system() call since we can't
10769 - specify our own environ
10770 - """
10771 - for x in self.settings.keys():
10772 - """ Sanitize var names by doing "s|/-.|_|g" """
10773 - varname="clst_"+string.replace(x,"/","_")
10774 - varname=string.replace(varname,"-","_")
10775 - varname=string.replace(varname,".","_")
10776 - if type(self.settings[x])==types.StringType:
10777 - """ Prefix to prevent namespace clashes """
10778 - #os.environ[varname]=self.settings[x]
10779 - self.env[varname]=self.settings[x]
10780 - elif type(self.settings[x])==types.ListType:
10781 - #os.environ[varname]=string.join(self.settings[x])
10782 - self.env[varname]=string.join(self.settings[x])
10783 - elif type(self.settings[x])==types.BooleanType:
10784 - if self.settings[x]:
10785 - self.env[varname]="true"
10786 - else:
10787 - self.env[varname]="false"
10788 - if "makeopts" in self.settings:
10789 - self.env["MAKEOPTS"]=self.settings["makeopts"]
10790 -
10791 - def run(self):
10792 - self.chroot_lock.write_lock()
10793 -
10794 - """ Kill any pids in the chroot "" """
10795 - self.kill_chroot_pids()
10796 -
10797 - """ Check for mounts right away and abort if we cannot unmount them """
10798 - self.mount_safety_check()
10799 -
10800 - if "CLEAR_AUTORESUME" in self.settings:
10801 - self.clear_autoresume()
10802 -
10803 - if "PURGETMPONLY" in self.settings:
10804 - self.purge()
10805 - return
10806 -
10807 - if "PURGEONLY" in self.settings:
10808 - self.purge()
10809 - return
10810 -
10811 - if "PURGE" in self.settings:
10812 - self.purge()
10813 -
10814 - for x in self.settings["action_sequence"]:
10815 - print "--- Running action sequence: "+x
10816 - sys.stdout.flush()
10817 - try:
10818 - apply(getattr(self,x))
10819 - except:
10820 - self.mount_safety_check()
10821 - raise
10822 -
10823 - self.chroot_lock.unlock()
10824 -
10825 - def unmerge(self):
10826 - if "AUTORESUME" in self.settings \
10827 - and os.path.exists(self.settings["autoresume_path"]+"unmerge"):
10828 - print "Resume point detected, skipping unmerge operation..."
10829 - else:
10830 - if self.settings["spec_prefix"]+"/unmerge" in self.settings:
10831 - if type(self.settings[self.settings["spec_prefix"]+\
10832 - "/unmerge"])==types.StringType:
10833 - self.settings[self.settings["spec_prefix"]+"/unmerge"]=\
10834 - [self.settings[self.settings["spec_prefix"]+"/unmerge"]]
10835 - myunmerge=\
10836 - self.settings[self.settings["spec_prefix"]+"/unmerge"][:]
10837 -
10838 - for x in range(0,len(myunmerge)):
10839 - """
10840 - Surround args with quotes for passing to bash, allows
10841 - things like "<" to remain intact
10842 - """
10843 - myunmerge[x]="'"+myunmerge[x]+"'"
10844 - myunmerge=string.join(myunmerge)
10845 -
10846 - """ Before cleaning, unmerge stuff """
10847 - try:
10848 - cmd("/bin/bash "+self.settings["controller_file"]+\
10849 - " unmerge "+ myunmerge,"Unmerge script failed.",\
10850 - env=self.env)
10851 - print "unmerge shell script"
10852 - except CatalystError:
10853 - self.unbind()
10854 - raise
10855 - touch(self.settings["autoresume_path"]+"unmerge")
10856 -
10857 - def target_setup(self):
10858 - if "AUTORESUME" in self.settings \
10859 - and os.path.exists(self.settings["autoresume_path"]+"target_setup"):
10860 - print "Resume point detected, skipping target_setup operation..."
10861 - else:
10862 - print "Setting up filesystems per filesystem type"
10863 - cmd("/bin/bash "+self.settings["controller_file"]+\
10864 - " target_image_setup "+ self.settings["target_path"],\
10865 - "target_image_setup script failed.",env=self.env)
10866 - touch(self.settings["autoresume_path"]+"target_setup")
10867 -
10868 - def setup_overlay(self):
10869 - if "AUTORESUME" in self.settings \
10870 - and os.path.exists(self.settings["autoresume_path"]+"setup_overlay"):
10871 - print "Resume point detected, skipping setup_overlay operation..."
10872 - else:
10873 - if self.settings["spec_prefix"]+"/overlay" in self.settings:
10874 - for x in self.settings[self.settings["spec_prefix"]+"/overlay"]:
10875 - if os.path.exists(x):
10876 - cmd("rsync -a "+x+"/ "+\
10877 - self.settings["target_path"],\
10878 - self.settings["spec_prefix"]+"overlay: "+x+\
10879 - " copy failed.",env=self.env)
10880 - touch(self.settings["autoresume_path"]+"setup_overlay")
10881 -
10882 - def create_iso(self):
10883 - if "AUTORESUME" in self.settings \
10884 - and os.path.exists(self.settings["autoresume_path"]+"create_iso"):
10885 - print "Resume point detected, skipping create_iso operation..."
10886 - else:
10887 - """ Create the ISO """
10888 - if "iso" in self.settings:
10889 - cmd("/bin/bash "+self.settings["controller_file"]+" iso "+\
10890 - self.settings["iso"],"ISO creation script failed.",\
10891 - env=self.env)
10892 - self.gen_contents_file(self.settings["iso"])
10893 - self.gen_digest_file(self.settings["iso"])
10894 - touch(self.settings["autoresume_path"]+"create_iso")
10895 - else:
10896 - print "WARNING: livecd/iso was not defined."
10897 - print "An ISO Image will not be created."
10898 -
10899 - def build_packages(self):
10900 - if "AUTORESUME" in self.settings \
10901 - and os.path.exists(self.settings["autoresume_path"]+\
10902 - "build_packages"):
10903 - print "Resume point detected, skipping build_packages operation..."
10904 - else:
10905 - if self.settings["spec_prefix"]+"/packages" in self.settings:
10906 - if "AUTORESUME" in self.settings \
10907 - and os.path.exists(self.settings["autoresume_path"]+\
10908 - "build_packages"):
10909 - print "Resume point detected, skipping build_packages operation..."
10910 - else:
10911 - mypack=\
10912 - list_bashify(self.settings[self.settings["spec_prefix"]\
10913 - +"/packages"])
10914 - try:
10915 - cmd("/bin/bash "+self.settings["controller_file"]+\
10916 - " build_packages "+mypack,\
10917 - "Error in attempt to build packages",env=self.env)
10918 - touch(self.settings["autoresume_path"]+"build_packages")
10919 - except CatalystError:
10920 - self.unbind()
10921 - raise CatalystError,self.settings["spec_prefix"]+\
10922 - "build aborting due to error."
10923 -
10924 - def build_kernel(self):
10925 - "Build all configured kernels"
10926 - if "AUTORESUME" in self.settings \
10927 - and os.path.exists(self.settings["autoresume_path"]+"build_kernel"):
10928 - print "Resume point detected, skipping build_kernel operation..."
10929 - else:
10930 - if "boot/kernel" in self.settings:
10931 - try:
10932 - mynames=self.settings["boot/kernel"]
10933 - if type(mynames)==types.StringType:
10934 - mynames=[mynames]
10935 - """
10936 - Execute the script that sets up the kernel build environment
10937 - """
10938 - cmd("/bin/bash "+self.settings["controller_file"]+\
10939 - " pre-kmerge ","Runscript pre-kmerge failed",\
10940 - env=self.env)
10941 - for kname in mynames:
10942 - self._build_kernel(kname=kname)
10943 - touch(self.settings["autoresume_path"]+"build_kernel")
10944 - except CatalystError:
10945 - self.unbind()
10946 - raise CatalystError,\
10947 - "build aborting due to kernel build error."
10948 -
10949 - def _build_kernel(self, kname):
10950 - "Build a single configured kernel by name"
10951 - if "AUTORESUME" in self.settings \
10952 - and os.path.exists(self.settings["autoresume_path"]\
10953 - +"build_kernel_"+kname):
10954 - print "Resume point detected, skipping build_kernel for "+kname+" operation..."
10955 - return
10956 - self._copy_kernel_config(kname=kname)
10957 -
10958 - """
10959 - If we need to pass special options to the bootloader
10960 - for this kernel put them into the environment
10961 - """
10962 - if "boot/kernel/"+kname+"/kernelopts" in self.settings:
10963 - myopts=self.settings["boot/kernel/"+kname+\
10964 - "/kernelopts"]
10965 -
10966 - if type(myopts) != types.StringType:
10967 - myopts = string.join(myopts)
10968 - self.env[kname+"_kernelopts"]=myopts
10969 -
10970 - else:
10971 - self.env[kname+"_kernelopts"]=""
10972 -
10973 - if "boot/kernel/"+kname+"/extraversion" not in self.settings:
10974 - self.settings["boot/kernel/"+kname+\
10975 - "/extraversion"]=""
10976 -
10977 - self.env["clst_kextraversion"]=\
10978 - self.settings["boot/kernel/"+kname+\
10979 - "/extraversion"]
10980 -
10981 - self._copy_initramfs_overlay(kname=kname)
10982 -
10983 - """ Execute the script that builds the kernel """
10984 - cmd("/bin/bash "+self.settings["controller_file"]+\
10985 - " kernel "+kname,\
10986 - "Runscript kernel build failed",env=self.env)
10987 -
10988 - if "boot/kernel/"+kname+"/initramfs_overlay" in self.settings:
10989 - if os.path.exists(self.settings["chroot_path"]+\
10990 - "/tmp/initramfs_overlay/"):
10991 - print "Cleaning up temporary overlay dir"
10992 - cmd("rm -R "+self.settings["chroot_path"]+\
10993 - "/tmp/initramfs_overlay/",env=self.env)
10994 -
10995 - touch(self.settings["autoresume_path"]+\
10996 - "build_kernel_"+kname)
10997 -
10998 - """
10999 - Execute the script that cleans up the kernel build
11000 - environment
11001 - """
11002 - cmd("/bin/bash "+self.settings["controller_file"]+\
11003 - " post-kmerge ",
11004 - "Runscript post-kmerge failed",env=self.env)
11005 -
11006 - def _copy_kernel_config(self, kname):
11007 - if "boot/kernel/"+kname+"/config" in self.settings:
11008 - if not os.path.exists(self.settings["boot/kernel/"+kname+"/config"]):
11009 - self.unbind()
11010 - raise CatalystError,\
11011 - "Can't find kernel config: "+\
11012 - self.settings["boot/kernel/"+kname+\
11013 - "/config"]
11014 -
11015 - try:
11016 - cmd("cp "+self.settings["boot/kernel/"+kname+\
11017 - "/config"]+" "+\
11018 - self.settings["chroot_path"]+"/var/tmp/"+\
11019 - kname+".config",\
11020 - "Couldn't copy kernel config: "+\
11021 - self.settings["boot/kernel/"+kname+\
11022 - "/config"],env=self.env)
11023 -
11024 - except CatalystError:
11025 - self.unbind()
11026 -
11027 - def _copy_initramfs_overlay(self, kname):
11028 - if "boot/kernel/"+kname+"/initramfs_overlay" in self.settings:
11029 - if os.path.exists(self.settings["boot/kernel/"+\
11030 - kname+"/initramfs_overlay"]):
11031 - print "Copying initramfs_overlay dir "+\
11032 - self.settings["boot/kernel/"+kname+\
11033 - "/initramfs_overlay"]
11034 -
11035 - cmd("mkdir -p "+\
11036 - self.settings["chroot_path"]+\
11037 - "/tmp/initramfs_overlay/"+\
11038 - self.settings["boot/kernel/"+kname+\
11039 - "/initramfs_overlay"],env=self.env)
11040 -
11041 - cmd("cp -R "+self.settings["boot/kernel/"+\
11042 - kname+"/initramfs_overlay"]+"/* "+\
11043 - self.settings["chroot_path"]+\
11044 - "/tmp/initramfs_overlay/"+\
11045 - self.settings["boot/kernel/"+kname+\
11046 - "/initramfs_overlay"],env=self.env)
11047 -
11048 - def bootloader(self):
11049 - if "AUTORESUME" in self.settings \
11050 - and os.path.exists(self.settings["autoresume_path"]+"bootloader"):
11051 - print "Resume point detected, skipping bootloader operation..."
11052 - else:
11053 - try:
11054 - cmd("/bin/bash "+self.settings["controller_file"]+\
11055 - " bootloader " + self.settings["target_path"],\
11056 - "Bootloader script failed.",env=self.env)
11057 - touch(self.settings["autoresume_path"]+"bootloader")
11058 - except CatalystError:
11059 - self.unbind()
11060 - raise CatalystError,"Script aborting due to error."
11061 -
11062 - def livecd_update(self):
11063 - if "AUTORESUME" in self.settings \
11064 - and os.path.exists(self.settings["autoresume_path"]+\
11065 - "livecd_update"):
11066 - print "Resume point detected, skipping build_packages operation..."
11067 - else:
11068 - try:
11069 - cmd("/bin/bash "+self.settings["controller_file"]+\
11070 - " livecd-update","livecd-update failed.",env=self.env)
11071 - touch(self.settings["autoresume_path"]+"livecd_update")
11072 -
11073 - except CatalystError:
11074 - self.unbind()
11075 - raise CatalystError,"build aborting due to livecd_update error."
11076 -
11077 - def clear_chroot(self):
11078 - myemp=self.settings["chroot_path"]
11079 - if os.path.isdir(myemp):
11080 - print "Emptying directory",myemp
11081 - """
11082 - stat the dir, delete the dir, recreate the dir and set
11083 - the proper perms and ownership
11084 - """
11085 - mystat=os.stat(myemp)
11086 - #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
11087 - """ There's no easy way to change flags recursively in python """
11088 - if os.uname()[0] == "FreeBSD":
11089 - os.system("chflags -R noschg "+myemp)
11090 - shutil.rmtree(myemp)
11091 - os.makedirs(myemp,0755)
11092 - os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
11093 - os.chmod(myemp,mystat[ST_MODE])
11094 -
11095 - def clear_packages(self):
11096 - if "PKGCACHE" in self.settings:
11097 - print "purging the pkgcache ..."
11098 -
11099 - myemp=self.settings["pkgcache_path"]
11100 - if os.path.isdir(myemp):
11101 - print "Emptying directory",myemp
11102 - """
11103 - stat the dir, delete the dir, recreate the dir and set
11104 - the proper perms and ownership
11105 - """
11106 - mystat=os.stat(myemp)
11107 - #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
11108 - shutil.rmtree(myemp)
11109 - os.makedirs(myemp,0755)
11110 - os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
11111 - os.chmod(myemp,mystat[ST_MODE])
11112 -
11113 - def clear_kerncache(self):
11114 - if "KERNCACHE" in self.settings:
11115 - print "purging the kerncache ..."
11116 -
11117 - myemp=self.settings["kerncache_path"]
11118 - if os.path.isdir(myemp):
11119 - print "Emptying directory",myemp
11120 - """
11121 - stat the dir, delete the dir, recreate the dir and set
11122 - the proper perms and ownership
11123 - """
11124 - mystat=os.stat(myemp)
11125 - #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env=self.env)
11126 - shutil.rmtree(myemp)
11127 - os.makedirs(myemp,0755)
11128 - os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
11129 - os.chmod(myemp,mystat[ST_MODE])
11130 -
11131 - def clear_autoresume(self):
11132 - """ Clean resume points since they are no longer needed """
11133 - if "AUTORESUME" in self.settings:
11134 - print "Removing AutoResume Points: ..."
11135 - myemp=self.settings["autoresume_path"]
11136 - if os.path.isdir(myemp):
11137 - if "AUTORESUME" in self.settings:
11138 - print "Emptying directory",myemp
11139 - """
11140 - stat the dir, delete the dir, recreate the dir and set
11141 - the proper perms and ownership
11142 - """
11143 - mystat=os.stat(myemp)
11144 - if os.uname()[0] == "FreeBSD":
11145 - cmd("chflags -R noschg "+myemp,\
11146 - "Could not remove immutable flag for file "\
11147 - +myemp)
11148 - #cmd("rm -rf "+myemp, "Could not remove existing file: "+myemp,env-self.env)
11149 - shutil.rmtree(myemp)
11150 - os.makedirs(myemp,0755)
11151 - os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
11152 - os.chmod(myemp,mystat[ST_MODE])
11153 -
11154 - def gen_contents_file(self,file):
11155 - if os.path.exists(file+".CONTENTS"):
11156 - os.remove(file+".CONTENTS")
11157 - if "contents" in self.settings:
11158 - if os.path.exists(file):
11159 - myf=open(file+".CONTENTS","w")
11160 - keys={}
11161 - for i in self.settings["contents"].split():
11162 - keys[i]=1
11163 - array=keys.keys()
11164 - array.sort()
11165 - for j in array:
11166 - contents=generate_contents(file,contents_function=j,\
11167 - verbose="VERBOSE" in self.settings)
11168 - if contents:
11169 - myf.write(contents)
11170 - myf.close()
11171 -
11172 - def gen_digest_file(self,file):
11173 - if os.path.exists(file+".DIGESTS"):
11174 - os.remove(file+".DIGESTS")
11175 - if "digests" in self.settings:
11176 - if os.path.exists(file):
11177 - myf=open(file+".DIGESTS","w")
11178 - keys={}
11179 - for i in self.settings["digests"].split():
11180 - keys[i]=1
11181 - array=keys.keys()
11182 - array.sort()
11183 - for f in [file, file+'.CONTENTS']:
11184 - if os.path.exists(f):
11185 - if "all" in array:
11186 - for k in hash_map.keys():
11187 - hash=generate_hash(f,hash_function=k,verbose=\
11188 - "VERBOSE" in self.settings)
11189 - myf.write(hash)
11190 - else:
11191 - for j in array:
11192 - hash=generate_hash(f,hash_function=j,verbose=\
11193 - "VERBOSE" in self.settings)
11194 - myf.write(hash)
11195 - myf.close()
11196 -
11197 - def purge(self):
11198 - countdown(10,"Purging Caches ...")
11199 - if any(k in self.settings for k in ("PURGE","PURGEONLY","PURGETMPONLY")):
11200 - print "clearing autoresume ..."
11201 - self.clear_autoresume()
11202 -
11203 - print "clearing chroot ..."
11204 - self.clear_chroot()
11205 -
11206 - if "PURGETMPONLY" not in self.settings:
11207 - print "clearing package cache ..."
11208 - self.clear_packages()
11209 -
11210 - print "clearing kerncache ..."
11211 - self.clear_kerncache()
11212 -
11213 -# vim: ts=4 sw=4 sta et sts=4 ai
11214 diff --git a/modules/generic_target.py b/modules/generic_target.py
11215 deleted file mode 100644
11216 index fe96bd7..0000000
11217 --- a/modules/generic_target.py
11218 +++ /dev/null
11219 @@ -1,11 +0,0 @@
11220 -from catalyst_support import *
11221 -
11222 -class generic_target:
11223 - """
11224 - The toplevel class for generic_stage_target. This is about as generic as we get.
11225 - """
11226 - def __init__(self,myspec,addlargs):
11227 - addl_arg_parse(myspec,addlargs,self.required_values,self.valid_values)
11228 - self.settings=myspec
11229 - self.env={}
11230 - self.env["PATH"]="/bin:/sbin:/usr/bin:/usr/sbin"
11231 diff --git a/modules/grp_target.py b/modules/grp_target.py
11232 deleted file mode 100644
11233 index 6941522..0000000
11234 --- a/modules/grp_target.py
11235 +++ /dev/null
11236 @@ -1,118 +0,0 @@
11237 -"""
11238 -Gentoo Reference Platform (GRP) target
11239 -"""
11240 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
11241 -
11242 -import os,types,glob
11243 -from catalyst_support import *
11244 -from generic_stage_target import *
11245 -
11246 -class grp_target(generic_stage_target):
11247 - """
11248 - The builder class for GRP (Gentoo Reference Platform) builds.
11249 - """
11250 - def __init__(self,spec,addlargs):
11251 - self.required_values=["version_stamp","target","subarch",\
11252 - "rel_type","profile","snapshot","source_subpath"]
11253 -
11254 - self.valid_values=self.required_values[:]
11255 - self.valid_values.extend(["grp/use"])
11256 - if "grp" not in addlargs:
11257 - raise CatalystError,"Required value \"grp\" not specified in spec."
11258 -
11259 - self.required_values.extend(["grp"])
11260 - if type(addlargs["grp"])==types.StringType:
11261 - addlargs["grp"]=[addlargs["grp"]]
11262 -
11263 - if "grp/use" in addlargs:
11264 - if type(addlargs["grp/use"])==types.StringType:
11265 - addlargs["grp/use"]=[addlargs["grp/use"]]
11266 -
11267 - for x in addlargs["grp"]:
11268 - self.required_values.append("grp/"+x+"/packages")
11269 - self.required_values.append("grp/"+x+"/type")
11270 -
11271 - generic_stage_target.__init__(self,spec,addlargs)
11272 -
11273 - def set_target_path(self):
11274 - self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"]+"/")
11275 - if "AUTORESUME" in self.settings \
11276 - and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
11277 - print "Resume point detected, skipping target path setup operation..."
11278 - else:
11279 - # first clean up any existing target stuff
11280 - #if os.path.isdir(self.settings["target_path"]):
11281 - #cmd("rm -rf "+self.settings["target_path"],
11282 - #"Could not remove existing directory: "+self.settings["target_path"],env=self.env)
11283 - if not os.path.exists(self.settings["target_path"]):
11284 - os.makedirs(self.settings["target_path"])
11285 -
11286 - touch(self.settings["autoresume_path"]+"setup_target_path")
11287 -
11288 - def run_local(self):
11289 - for pkgset in self.settings["grp"]:
11290 - # example call: "grp.sh run pkgset cd1 xmms vim sys-apps/gleep"
11291 - mypackages=list_bashify(self.settings["grp/"+pkgset+"/packages"])
11292 - try:
11293 - cmd("/bin/bash "+self.settings["controller_file"]+" run "+self.settings["grp/"+pkgset+"/type"]\
11294 - +" "+pkgset+" "+mypackages,env=self.env)
11295 -
11296 - except CatalystError:
11297 - self.unbind()
11298 - raise CatalystError,"GRP build aborting due to error."
11299 -
11300 - def set_use(self):
11301 - generic_stage_target.set_use(self)
11302 - if "BINDIST" in self.settings:
11303 - if "use" in self.settings:
11304 - self.settings["use"].append("bindist")
11305 - else:
11306 - self.settings["use"]=["bindist"]
11307 -
11308 - def set_mounts(self):
11309 - self.mounts.append("/tmp/grp")
11310 - self.mountmap["/tmp/grp"]=self.settings["target_path"]
11311 -
11312 - def generate_digests(self):
11313 - for pkgset in self.settings["grp"]:
11314 - if self.settings["grp/"+pkgset+"/type"] == "pkgset":
11315 - destdir=normpath(self.settings["target_path"]+"/"+pkgset+"/All")
11316 - print "Digesting files in the pkgset....."
11317 - digests=glob.glob(destdir+'/*.DIGESTS')
11318 - for i in digests:
11319 - if os.path.exists(i):
11320 - os.remove(i)
11321 -
11322 - files=os.listdir(destdir)
11323 - #ignore files starting with '.' using list comprehension
11324 - files=[filename for filename in files if filename[0] != '.']
11325 - for i in files:
11326 - if os.path.isfile(normpath(destdir+"/"+i)):
11327 - self.gen_contents_file(normpath(destdir+"/"+i))
11328 - self.gen_digest_file(normpath(destdir+"/"+i))
11329 - else:
11330 - destdir=normpath(self.settings["target_path"]+"/"+pkgset)
11331 - print "Digesting files in the srcset....."
11332 -
11333 - digests=glob.glob(destdir+'/*.DIGESTS')
11334 - for i in digests:
11335 - if os.path.exists(i):
11336 - os.remove(i)
11337 -
11338 - files=os.listdir(destdir)
11339 - #ignore files starting with '.' using list comprehension
11340 - files=[filename for filename in files if filename[0] != '.']
11341 - for i in files:
11342 - if os.path.isfile(normpath(destdir+"/"+i)):
11343 - #self.gen_contents_file(normpath(destdir+"/"+i))
11344 - self.gen_digest_file(normpath(destdir+"/"+i))
11345 -
11346 - def set_action_sequence(self):
11347 - self.settings["action_sequence"]=["unpack","unpack_snapshot",\
11348 - "config_profile_link","setup_confdir","portage_overlay","bind","chroot_setup",\
11349 - "setup_environment","run_local","unbind",\
11350 - "generate_digests","clear_autoresume"]
11351 -
11352 -def register(foo):
11353 - foo.update({"grp":grp_target})
11354 - return foo
11355 diff --git a/modules/livecd_stage1_target.py b/modules/livecd_stage1_target.py
11356 deleted file mode 100644
11357 index 59de9bb..0000000
11358 --- a/modules/livecd_stage1_target.py
11359 +++ /dev/null
11360 @@ -1,75 +0,0 @@
11361 -"""
11362 -LiveCD stage1 target
11363 -"""
11364 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
11365 -
11366 -from catalyst_support import *
11367 -from generic_stage_target import *
11368 -
11369 -class livecd_stage1_target(generic_stage_target):
11370 - """
11371 - Builder class for LiveCD stage1.
11372 - """
11373 - def __init__(self,spec,addlargs):
11374 - self.required_values=["livecd/packages"]
11375 - self.valid_values=self.required_values[:]
11376 -
11377 - self.valid_values.extend(["livecd/use"])
11378 - generic_stage_target.__init__(self,spec,addlargs)
11379 -
11380 - def set_action_sequence(self):
11381 - self.settings["action_sequence"]=["unpack","unpack_snapshot",\
11382 - "config_profile_link","setup_confdir","portage_overlay",\
11383 - "bind","chroot_setup","setup_environment","build_packages",\
11384 - "unbind", "clean","clear_autoresume"]
11385 -
11386 - def set_target_path(self):
11387 - self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"])
11388 - if "AUTORESUME" in self.settings \
11389 - and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
11390 - print "Resume point detected, skipping target path setup operation..."
11391 - else:
11392 - # first clean up any existing target stuff
11393 - if os.path.exists(self.settings["target_path"]):
11394 - cmd("rm -rf "+self.settings["target_path"],\
11395 - "Could not remove existing directory: "+self.settings["target_path"],env=self.env)
11396 - touch(self.settings["autoresume_path"]+"setup_target_path")
11397 -
11398 - if not os.path.exists(self.settings["target_path"]):
11399 - os.makedirs(self.settings["target_path"])
11400 -
11401 - def set_target_path(self):
11402 - pass
11403 -
11404 - def set_spec_prefix(self):
11405 - self.settings["spec_prefix"]="livecd"
11406 -
11407 - def set_use(self):
11408 - generic_stage_target.set_use(self)
11409 - if "use" in self.settings:
11410 - self.settings["use"].append("livecd")
11411 - if "BINDIST" in self.settings:
11412 - self.settings["use"].append("bindist")
11413 - else:
11414 - self.settings["use"]=["livecd"]
11415 - if "BINDIST" in self.settings:
11416 - self.settings["use"].append("bindist")
11417 -
11418 - def set_packages(self):
11419 - generic_stage_target.set_packages(self)
11420 - if self.settings["spec_prefix"]+"/packages" in self.settings:
11421 - if type(self.settings[self.settings["spec_prefix"]+"/packages"]) == types.StringType:
11422 - self.settings[self.settings["spec_prefix"]+"/packages"] = \
11423 - self.settings[self.settings["spec_prefix"]+"/packages"].split()
11424 - self.settings[self.settings["spec_prefix"]+"/packages"].append("app-misc/livecd-tools")
11425 -
11426 - def set_pkgcache_path(self):
11427 - if "pkgcache_path" in self.settings:
11428 - if type(self.settings["pkgcache_path"]) != types.StringType:
11429 - self.settings["pkgcache_path"]=normpath(string.join(self.settings["pkgcache_path"]))
11430 - else:
11431 - generic_stage_target.set_pkgcache_path(self)
11432 -
11433 -def register(foo):
11434 - foo.update({"livecd-stage1":livecd_stage1_target})
11435 - return foo
11436 diff --git a/modules/livecd_stage2_target.py b/modules/livecd_stage2_target.py
11437 deleted file mode 100644
11438 index c74c16d..0000000
11439 --- a/modules/livecd_stage2_target.py
11440 +++ /dev/null
11441 @@ -1,148 +0,0 @@
11442 -"""
11443 -LiveCD stage2 target, builds upon previous LiveCD stage1 tarball
11444 -"""
11445 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
11446 -
11447 -import os,string,types,stat,shutil
11448 -from catalyst_support import *
11449 -from generic_stage_target import *
11450 -
11451 -class livecd_stage2_target(generic_stage_target):
11452 - """
11453 - Builder class for a LiveCD stage2 build.
11454 - """
11455 - def __init__(self,spec,addlargs):
11456 - self.required_values=["boot/kernel"]
11457 -
11458 - self.valid_values=[]
11459 -
11460 - self.valid_values.extend(self.required_values)
11461 - self.valid_values.extend(["livecd/cdtar","livecd/empty","livecd/rm",\
11462 - "livecd/unmerge","livecd/iso","livecd/gk_mainargs","livecd/type",\
11463 - "livecd/readme","livecd/motd","livecd/overlay",\
11464 - "livecd/modblacklist","livecd/splash_theme","livecd/rcadd",\
11465 - "livecd/rcdel","livecd/fsscript","livecd/xinitrc",\
11466 - "livecd/root_overlay","livecd/users","portage_overlay",\
11467 - "livecd/fstype","livecd/fsops","livecd/linuxrc","livecd/bootargs",\
11468 - "gamecd/conf","livecd/xdm","livecd/xsession","livecd/volid"])
11469 -
11470 - generic_stage_target.__init__(self,spec,addlargs)
11471 - if "livecd/type" not in self.settings:
11472 - self.settings["livecd/type"] = "generic-livecd"
11473 -
11474 - file_locate(self.settings, ["cdtar","controller_file"])
11475 -
11476 - def set_source_path(self):
11477 - self.settings["source_path"] = normpath(self.settings["storedir"] +
11478 - "/builds/" + self.settings["source_subpath"].rstrip("/") +
11479 - ".tar.bz2")
11480 - if os.path.isfile(self.settings["source_path"]):
11481 - self.settings["source_path_hash"]=generate_hash(self.settings["source_path"])
11482 - else:
11483 - self.settings["source_path"]=normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/")
11484 - if not os.path.exists(self.settings["source_path"]):
11485 - raise CatalystError,"Source Path: "+self.settings["source_path"]+" does not exist."
11486 -
11487 - def set_spec_prefix(self):
11488 - self.settings["spec_prefix"]="livecd"
11489 -
11490 - def set_target_path(self):
11491 - self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"]+"/")
11492 - if "AUTORESUME" in self.settings \
11493 - and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
11494 - print "Resume point detected, skipping target path setup operation..."
11495 - else:
11496 - # first clean up any existing target stuff
11497 - if os.path.isdir(self.settings["target_path"]):
11498 - cmd("rm -rf "+self.settings["target_path"],
11499 - "Could not remove existing directory: "+self.settings["target_path"],env=self.env)
11500 - touch(self.settings["autoresume_path"]+"setup_target_path")
11501 - if not os.path.exists(self.settings["target_path"]):
11502 - os.makedirs(self.settings["target_path"])
11503 -
11504 - def run_local(self):
11505 - # what modules do we want to blacklist?
11506 - if "livecd/modblacklist" in self.settings:
11507 - try:
11508 - myf=open(self.settings["chroot_path"]+"/etc/modprobe.d/blacklist.conf","a")
11509 - except:
11510 - self.unbind()
11511 - raise CatalystError,"Couldn't open "+self.settings["chroot_path"]+"/etc/modprobe.d/blacklist.conf."
11512 -
11513 - myf.write("\n#Added by Catalyst:")
11514 - # workaround until config.py is using configparser
11515 - if isinstance(self.settings["livecd/modblacklist"], str):
11516 - self.settings["livecd/modblacklist"] = self.settings["livecd/modblacklist"].split()
11517 - for x in self.settings["livecd/modblacklist"]:
11518 - myf.write("\nblacklist "+x)
11519 - myf.close()
11520 -
11521 - def unpack(self):
11522 - unpack=True
11523 - display_msg=None
11524 -
11525 - clst_unpack_hash=read_from_clst(self.settings["autoresume_path"]+"unpack")
11526 -
11527 - if os.path.isdir(self.settings["source_path"]):
11528 - unpack_cmd="rsync -a --delete "+self.settings["source_path"]+" "+self.settings["chroot_path"]
11529 - display_msg="\nStarting rsync from "+self.settings["source_path"]+"\nto "+\
11530 - self.settings["chroot_path"]+" (This may take some time) ...\n"
11531 - error_msg="Rsync of "+self.settings["source_path"]+" to "+self.settings["chroot_path"]+" failed."
11532 - invalid_snapshot=False
11533 -
11534 - if "AUTORESUME" in self.settings:
11535 - if os.path.isdir(self.settings["source_path"]) and \
11536 - os.path.exists(self.settings["autoresume_path"]+"unpack"):
11537 - print "Resume point detected, skipping unpack operation..."
11538 - unpack=False
11539 - elif "source_path_hash" in self.settings:
11540 - if self.settings["source_path_hash"] != clst_unpack_hash:
11541 - invalid_snapshot=True
11542 -
11543 - if unpack:
11544 - self.mount_safety_check()
11545 - if invalid_snapshot:
11546 - print "No Valid Resume point detected, cleaning up ..."
11547 - #os.remove(self.settings["autoresume_path"]+"dir_setup")
11548 - self.clear_autoresume()
11549 - self.clear_chroot()
11550 - #self.dir_setup()
11551 -
11552 - if not os.path.exists(self.settings["chroot_path"]):
11553 - os.makedirs(self.settings["chroot_path"])
11554 -
11555 - if not os.path.exists(self.settings["chroot_path"]+"/tmp"):
11556 - os.makedirs(self.settings["chroot_path"]+"/tmp",1777)
11557 -
11558 - if "PKGCACHE" in self.settings:
11559 - if not os.path.exists(self.settings["pkgcache_path"]):
11560 - os.makedirs(self.settings["pkgcache_path"],0755)
11561 -
11562 - if not display_msg:
11563 - raise CatalystError,"Could not find appropriate source. Please check the 'source_subpath' setting in the spec file."
11564 -
11565 - print display_msg
11566 - cmd(unpack_cmd,error_msg,env=self.env)
11567 -
11568 - if "source_path_hash" in self.settings:
11569 - myf=open(self.settings["autoresume_path"]+"unpack","w")
11570 - myf.write(self.settings["source_path_hash"])
11571 - myf.close()
11572 - else:
11573 - touch(self.settings["autoresume_path"]+"unpack")
11574 -
11575 - def set_action_sequence(self):
11576 - self.settings["action_sequence"]=["unpack","unpack_snapshot",\
11577 - "config_profile_link","setup_confdir","portage_overlay",\
11578 - "bind","chroot_setup","setup_environment","run_local",\
11579 - "build_kernel"]
11580 - if "FETCH" not in self.settings:
11581 - self.settings["action_sequence"] += ["bootloader","preclean",\
11582 - "livecd_update","root_overlay","fsscript","rcupdate","unmerge",\
11583 - "unbind","remove","empty","target_setup",\
11584 - "setup_overlay","create_iso"]
11585 - self.settings["action_sequence"].append("clear_autoresume")
11586 -
11587 -def register(foo):
11588 - foo.update({"livecd-stage2":livecd_stage2_target})
11589 - return foo
11590 diff --git a/modules/netboot2_target.py b/modules/netboot2_target.py
11591 deleted file mode 100644
11592 index 1ab7e7d..0000000
11593 --- a/modules/netboot2_target.py
11594 +++ /dev/null
11595 @@ -1,166 +0,0 @@
11596 -"""
11597 -netboot target, version 2
11598 -"""
11599 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
11600 -
11601 -import os,string,types
11602 -from catalyst_support import *
11603 -from generic_stage_target import *
11604 -
11605 -class netboot2_target(generic_stage_target):
11606 - """
11607 - Builder class for a netboot build, version 2
11608 - """
11609 - def __init__(self,spec,addlargs):
11610 - self.required_values=[
11611 - "boot/kernel"
11612 - ]
11613 - self.valid_values=self.required_values[:]
11614 - self.valid_values.extend([
11615 - "netboot2/packages",
11616 - "netboot2/use",
11617 - "netboot2/extra_files",
11618 - "netboot2/overlay",
11619 - "netboot2/busybox_config",
11620 - "netboot2/root_overlay",
11621 - "netboot2/linuxrc"
11622 - ])
11623 -
11624 - try:
11625 - if "netboot2/packages" in addlargs:
11626 - if type(addlargs["netboot2/packages"]) == types.StringType:
11627 - loopy=[addlargs["netboot2/packages"]]
11628 - else:
11629 - loopy=addlargs["netboot2/packages"]
11630 -
11631 - for x in loopy:
11632 - self.valid_values.append("netboot2/packages/"+x+"/files")
11633 - except:
11634 - raise CatalystError,"configuration error in netboot2/packages."
11635 -
11636 - generic_stage_target.__init__(self,spec,addlargs)
11637 - self.set_build_kernel_vars()
11638 - self.settings["merge_path"]=normpath("/tmp/image/")
11639 -
11640 - def set_target_path(self):
11641 - self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+\
11642 - self.settings["target_subpath"]+"/")
11643 - if "AUTORESUME" in self.settings \
11644 - and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
11645 - print "Resume point detected, skipping target path setup operation..."
11646 - else:
11647 - # first clean up any existing target stuff
11648 - if os.path.isfile(self.settings["target_path"]):
11649 - cmd("rm -f "+self.settings["target_path"], \
11650 - "Could not remove existing file: "+self.settings["target_path"],env=self.env)
11651 - touch(self.settings["autoresume_path"]+"setup_target_path")
11652 -
11653 - if not os.path.exists(self.settings["storedir"]+"/builds/"):
11654 - os.makedirs(self.settings["storedir"]+"/builds/")
11655 -
11656 - def copy_files_to_image(self):
11657 - # copies specific files from the buildroot to merge_path
11658 - myfiles=[]
11659 -
11660 - # check for autoresume point
11661 - if "AUTORESUME" in self.settings \
11662 - and os.path.exists(self.settings["autoresume_path"]+"copy_files_to_image"):
11663 - print "Resume point detected, skipping target path setup operation..."
11664 - else:
11665 - if "netboot2/packages" in self.settings:
11666 - if type(self.settings["netboot2/packages"]) == types.StringType:
11667 - loopy=[self.settings["netboot2/packages"]]
11668 - else:
11669 - loopy=self.settings["netboot2/packages"]
11670 -
11671 - for x in loopy:
11672 - if "netboot2/packages/"+x+"/files" in self.settings:
11673 - if type(self.settings["netboot2/packages/"+x+"/files"]) == types.ListType:
11674 - myfiles.extend(self.settings["netboot2/packages/"+x+"/files"])
11675 - else:
11676 - myfiles.append(self.settings["netboot2/packages/"+x+"/files"])
11677 -
11678 - if "netboot2/extra_files" in self.settings:
11679 - if type(self.settings["netboot2/extra_files"]) == types.ListType:
11680 - myfiles.extend(self.settings["netboot2/extra_files"])
11681 - else:
11682 - myfiles.append(self.settings["netboot2/extra_files"])
11683 -
11684 - try:
11685 - cmd("/bin/bash "+self.settings["controller_file"]+\
11686 - " image " + list_bashify(myfiles),env=self.env)
11687 - except CatalystError:
11688 - self.unbind()
11689 - raise CatalystError,"Failed to copy files to image!"
11690 -
11691 - touch(self.settings["autoresume_path"]+"copy_files_to_image")
11692 -
11693 - def setup_overlay(self):
11694 - if "AUTORESUME" in self.settings \
11695 - and os.path.exists(self.settings["autoresume_path"]+"setup_overlay"):
11696 - print "Resume point detected, skipping setup_overlay operation..."
11697 - else:
11698 - if "netboot2/overlay" in self.settings:
11699 - for x in self.settings["netboot2/overlay"]:
11700 - if os.path.exists(x):
11701 - cmd("rsync -a "+x+"/ "+\
11702 - self.settings["chroot_path"] + self.settings["merge_path"], "netboot2/overlay: "+x+" copy failed.",env=self.env)
11703 - touch(self.settings["autoresume_path"]+"setup_overlay")
11704 -
11705 - def move_kernels(self):
11706 - # we're done, move the kernels to builds/*
11707 - # no auto resume here as we always want the
11708 - # freshest images moved
11709 - try:
11710 - cmd("/bin/bash "+self.settings["controller_file"]+\
11711 - " final",env=self.env)
11712 - print ">>> Netboot Build Finished!"
11713 - except CatalystError:
11714 - self.unbind()
11715 - raise CatalystError,"Failed to move kernel images!"
11716 -
11717 - def remove(self):
11718 - if "AUTORESUME" in self.settings \
11719 - and os.path.exists(self.settings["autoresume_path"]+"remove"):
11720 - print "Resume point detected, skipping remove operation..."
11721 - else:
11722 - if self.settings["spec_prefix"]+"/rm" in self.settings:
11723 - for x in self.settings[self.settings["spec_prefix"]+"/rm"]:
11724 - # we're going to shell out for all these cleaning operations,
11725 - # so we get easy glob handling
11726 - print "netboot2: removing " + x
11727 - os.system("rm -rf " + self.settings["chroot_path"] + self.settings["merge_path"] + x)
11728 -
11729 - def empty(self):
11730 - if "AUTORESUME" in self.settings \
11731 - and os.path.exists(self.settings["autoresume_path"]+"empty"):
11732 - print "Resume point detected, skipping empty operation..."
11733 - else:
11734 - if "netboot2/empty" in self.settings:
11735 - if type(self.settings["netboot2/empty"])==types.StringType:
11736 - self.settings["netboot2/empty"]=self.settings["netboot2/empty"].split()
11737 - for x in self.settings["netboot2/empty"]:
11738 - myemp=self.settings["chroot_path"] + self.settings["merge_path"] + x
11739 - if not os.path.isdir(myemp):
11740 - print x,"not a directory or does not exist, skipping 'empty' operation."
11741 - continue
11742 - print "Emptying directory", x
11743 - # stat the dir, delete the dir, recreate the dir and set
11744 - # the proper perms and ownership
11745 - mystat=os.stat(myemp)
11746 - shutil.rmtree(myemp)
11747 - os.makedirs(myemp,0755)
11748 - os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
11749 - os.chmod(myemp,mystat[ST_MODE])
11750 - touch(self.settings["autoresume_path"]+"empty")
11751 -
11752 - def set_action_sequence(self):
11753 - self.settings["action_sequence"]=["unpack","unpack_snapshot","config_profile_link",
11754 - "setup_confdir","portage_overlay","bind","chroot_setup",\
11755 - "setup_environment","build_packages","root_overlay",\
11756 - "copy_files_to_image","setup_overlay","build_kernel","move_kernels",\
11757 - "remove","empty","unbind","clean","clear_autoresume"]
11758 -
11759 -def register(foo):
11760 - foo.update({"netboot2":netboot2_target})
11761 - return foo
11762 diff --git a/modules/netboot_target.py b/modules/netboot_target.py
11763 deleted file mode 100644
11764 index ff2c81f..0000000
11765 --- a/modules/netboot_target.py
11766 +++ /dev/null
11767 @@ -1,128 +0,0 @@
11768 -"""
11769 -netboot target, version 1
11770 -"""
11771 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
11772 -
11773 -import os,string,types
11774 -from catalyst_support import *
11775 -from generic_stage_target import *
11776 -
11777 -class netboot_target(generic_stage_target):
11778 - """
11779 - Builder class for a netboot build.
11780 - """
11781 - def __init__(self,spec,addlargs):
11782 - self.valid_values = [
11783 - "netboot/kernel/sources",
11784 - "netboot/kernel/config",
11785 - "netboot/kernel/prebuilt",
11786 -
11787 - "netboot/busybox_config",
11788 -
11789 - "netboot/extra_files",
11790 - "netboot/packages"
11791 - ]
11792 - self.required_values=[]
11793 -
11794 - try:
11795 - if "netboot/packages" in addlargs:
11796 - if type(addlargs["netboot/packages"]) == types.StringType:
11797 - loopy=[addlargs["netboot/packages"]]
11798 - else:
11799 - loopy=addlargs["netboot/packages"]
11800 -
11801 - # for x in loopy:
11802 - # self.required_values.append("netboot/packages/"+x+"/files")
11803 - except:
11804 - raise CatalystError,"configuration error in netboot/packages."
11805 -
11806 - generic_stage_target.__init__(self,spec,addlargs)
11807 - self.set_build_kernel_vars(addlargs)
11808 - if "netboot/busybox_config" in addlargs:
11809 - file_locate(self.settings, ["netboot/busybox_config"])
11810 -
11811 - # Custom Kernel Tarball --- use that instead ...
11812 -
11813 - # unless the user wants specific CFLAGS/CXXFLAGS, let's use -Os
11814 -
11815 - for envvar in "CFLAGS", "CXXFLAGS":
11816 - if envvar not in os.environ and envvar not in addlargs:
11817 - self.settings[envvar] = "-Os -pipe"
11818 -
11819 - def set_root_path(self):
11820 - # ROOT= variable for emerges
11821 - self.settings["root_path"]=normpath("/tmp/image")
11822 - print "netboot root path is "+self.settings["root_path"]
11823 -
11824 -# def build_packages(self):
11825 -# # build packages
11826 -# if "netboot/packages" in self.settings:
11827 -# mypack=list_bashify(self.settings["netboot/packages"])
11828 -# try:
11829 -# cmd("/bin/bash "+self.settings["controller_file"]+" packages "+mypack,env=self.env)
11830 -# except CatalystError:
11831 -# self.unbind()
11832 -# raise CatalystError,"netboot build aborting due to error."
11833 -
11834 - def build_busybox(self):
11835 - # build busybox
11836 - if "netboot/busybox_config" in self.settings:
11837 - mycmd = self.settings["netboot/busybox_config"]
11838 - else:
11839 - mycmd = ""
11840 - try:
11841 - cmd("/bin/bash "+self.settings["controller_file"]+" busybox "+ mycmd,env=self.env)
11842 - except CatalystError:
11843 - self.unbind()
11844 - raise CatalystError,"netboot build aborting due to error."
11845 -
11846 - def copy_files_to_image(self):
11847 - # create image
11848 - myfiles=[]
11849 - if "netboot/packages" in self.settings:
11850 - if type(self.settings["netboot/packages"]) == types.StringType:
11851 - loopy=[self.settings["netboot/packages"]]
11852 - else:
11853 - loopy=self.settings["netboot/packages"]
11854 -
11855 - for x in loopy:
11856 - if "netboot/packages/"+x+"/files" in self.settings:
11857 - if type(self.settings["netboot/packages/"+x+"/files"]) == types.ListType:
11858 - myfiles.extend(self.settings["netboot/packages/"+x+"/files"])
11859 - else:
11860 - myfiles.append(self.settings["netboot/packages/"+x+"/files"])
11861 -
11862 - if "netboot/extra_files" in self.settings:
11863 - if type(self.settings["netboot/extra_files"]) == types.ListType:
11864 - myfiles.extend(self.settings["netboot/extra_files"])
11865 - else:
11866 - myfiles.append(self.settings["netboot/extra_files"])
11867 -
11868 - try:
11869 - cmd("/bin/bash "+self.settings["controller_file"]+\
11870 - " image " + list_bashify(myfiles),env=self.env)
11871 - except CatalystError:
11872 - self.unbind()
11873 - raise CatalystError,"netboot build aborting due to error."
11874 -
11875 - def create_netboot_files(self):
11876 - # finish it all up
11877 - try:
11878 - cmd("/bin/bash "+self.settings["controller_file"]+" finish",env=self.env)
11879 - except CatalystError:
11880 - self.unbind()
11881 - raise CatalystError,"netboot build aborting due to error."
11882 -
11883 - # end
11884 - print "netboot: build finished !"
11885 -
11886 - def set_action_sequence(self):
11887 - self.settings["action_sequence"]=["unpack","unpack_snapshot",
11888 - "config_profile_link","setup_confdir","bind","chroot_setup",\
11889 - "setup_environment","build_packages","build_busybox",\
11890 - "build_kernel","copy_files_to_image",\
11891 - "clean","create_netboot_files","unbind","clear_autoresume"]
11892 -
11893 -def register(foo):
11894 - foo.update({"netboot":netboot_target})
11895 - return foo
11896 diff --git a/modules/snapshot_target.py b/modules/snapshot_target.py
11897 deleted file mode 100644
11898 index ba1bab5..0000000
11899 --- a/modules/snapshot_target.py
11900 +++ /dev/null
11901 @@ -1,91 +0,0 @@
11902 -"""
11903 -Snapshot target
11904 -"""
11905 -
11906 -import os
11907 -from catalyst_support import *
11908 -from generic_stage_target import *
11909 -
11910 -class snapshot_target(generic_stage_target):
11911 - """
11912 - Builder class for snapshots.
11913 - """
11914 - def __init__(self,myspec,addlargs):
11915 - self.required_values=["version_stamp","target"]
11916 - self.valid_values=["version_stamp","target"]
11917 -
11918 - generic_target.__init__(self,myspec,addlargs)
11919 - self.settings=myspec
11920 - self.settings["target_subpath"]="portage"
11921 - st=self.settings["storedir"]
11922 - self.settings["snapshot_path"] = normpath(st + "/snapshots/"
11923 - + self.settings["snapshot_name"]
11924 - + self.settings["version_stamp"] + ".tar.bz2")
11925 - self.settings["tmp_path"]=normpath(st+"/tmp/"+self.settings["target_subpath"])
11926 -
11927 - def setup(self):
11928 - x=normpath(self.settings["storedir"]+"/snapshots")
11929 - if not os.path.exists(x):
11930 - os.makedirs(x)
11931 -
11932 - def mount_safety_check(self):
11933 - pass
11934 -
11935 - def run(self):
11936 - if "PURGEONLY" in self.settings:
11937 - self.purge()
11938 - return
11939 -
11940 - if "PURGE" in self.settings:
11941 - self.purge()
11942 -
11943 - self.setup()
11944 - print "Creating Portage tree snapshot "+self.settings["version_stamp"]+\
11945 - " from "+self.settings["portdir"]+"..."
11946 -
11947 - mytmp=self.settings["tmp_path"]
11948 - if not os.path.exists(mytmp):
11949 - os.makedirs(mytmp)
11950 -
11951 - cmd("rsync -a --delete --exclude /packages/ --exclude /distfiles/ " +
11952 - "--exclude /local/ --exclude CVS/ --exclude .svn --filter=H_**/files/digest-* " +
11953 - self.settings["portdir"] + "/ " + mytmp + "/%s/" % self.settings["repo_name"],
11954 - "Snapshot failure", env=self.env)
11955 -
11956 - print "Compressing Portage snapshot tarball..."
11957 - cmd("tar -I lbzip2 -cf " + self.settings["snapshot_path"] + " -C " +
11958 - mytmp + " " + self.settings["repo_name"],
11959 - "Snapshot creation failure",env=self.env)
11960 -
11961 - self.gen_contents_file(self.settings["snapshot_path"])
11962 - self.gen_digest_file(self.settings["snapshot_path"])
11963 -
11964 - self.cleanup()
11965 - print "snapshot: complete!"
11966 -
11967 - def kill_chroot_pids(self):
11968 - pass
11969 -
11970 - def cleanup(self):
11971 - print "Cleaning up..."
11972 -
11973 - def purge(self):
11974 - myemp=self.settings["tmp_path"]
11975 - if os.path.isdir(myemp):
11976 - print "Emptying directory",myemp
11977 - """
11978 - stat the dir, delete the dir, recreate the dir and set
11979 - the proper perms and ownership
11980 - """
11981 - mystat=os.stat(myemp)
11982 - """ There's no easy way to change flags recursively in python """
11983 - if os.uname()[0] == "FreeBSD":
11984 - os.system("chflags -R noschg "+myemp)
11985 - shutil.rmtree(myemp)
11986 - os.makedirs(myemp,0755)
11987 - os.chown(myemp,mystat[ST_UID],mystat[ST_GID])
11988 - os.chmod(myemp,mystat[ST_MODE])
11989 -
11990 -def register(foo):
11991 - foo.update({"snapshot":snapshot_target})
11992 - return foo
11993 diff --git a/modules/stage1_target.py b/modules/stage1_target.py
11994 deleted file mode 100644
11995 index 5f4ffa0..0000000
11996 --- a/modules/stage1_target.py
11997 +++ /dev/null
11998 @@ -1,97 +0,0 @@
11999 -"""
12000 -stage1 target
12001 -"""
12002 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
12003 -
12004 -from catalyst_support import *
12005 -from generic_stage_target import *
12006 -
12007 -class stage1_target(generic_stage_target):
12008 - """
12009 - Builder class for a stage1 installation tarball build.
12010 - """
12011 - def __init__(self,spec,addlargs):
12012 - self.required_values=[]
12013 - self.valid_values=["chost"]
12014 - self.valid_values.extend(["update_seed","update_seed_command"])
12015 - generic_stage_target.__init__(self,spec,addlargs)
12016 -
12017 - def set_stage_path(self):
12018 - self.settings["stage_path"]=normpath(self.settings["chroot_path"]+self.settings["root_path"])
12019 - print "stage1 stage path is "+self.settings["stage_path"]
12020 -
12021 - def set_root_path(self):
12022 - # sets the root path, relative to 'chroot_path', of the stage1 root
12023 - self.settings["root_path"]=normpath("/tmp/stage1root")
12024 - print "stage1 root path is "+self.settings["root_path"]
12025 -
12026 - def set_cleanables(self):
12027 - generic_stage_target.set_cleanables(self)
12028 - self.settings["cleanables"].extend([\
12029 - "/usr/share/zoneinfo", "/etc/portage/package*"])
12030 -
12031 - # XXX: How do these override_foo() functions differ from the ones in generic_stage_target and why aren't they in stage3_target?
12032 -
12033 - def override_chost(self):
12034 - if "chost" in self.settings:
12035 - self.settings["CHOST"]=list_to_string(self.settings["chost"])
12036 -
12037 - def override_cflags(self):
12038 - if "cflags" in self.settings:
12039 - self.settings["CFLAGS"]=list_to_string(self.settings["cflags"])
12040 -
12041 - def override_cxxflags(self):
12042 - if "cxxflags" in self.settings:
12043 - self.settings["CXXFLAGS"]=list_to_string(self.settings["cxxflags"])
12044 -
12045 - def override_ldflags(self):
12046 - if "ldflags" in self.settings:
12047 - self.settings["LDFLAGS"]=list_to_string(self.settings["ldflags"])
12048 -
12049 - def set_portage_overlay(self):
12050 - generic_stage_target.set_portage_overlay(self)
12051 - if "portage_overlay" in self.settings:
12052 - print "\nWARNING !!!!!"
12053 - print "\tUsing an portage overlay for earlier stages could cause build issues."
12054 - print "\tIf you break it, you buy it. Don't complain to us about it."
12055 - print "\tDont say we did not warn you\n"
12056 -
12057 - def base_dirs(self):
12058 - if os.uname()[0] == "FreeBSD":
12059 - # baselayout no longer creates the .keep files in proc and dev for FreeBSD as it
12060 - # would create them too late...we need them earlier before bind mounting filesystems
12061 - # since proc and dev are not writeable, so...create them here
12062 - if not os.path.exists(self.settings["stage_path"]+"/proc"):
12063 - os.makedirs(self.settings["stage_path"]+"/proc")
12064 - if not os.path.exists(self.settings["stage_path"]+"/dev"):
12065 - os.makedirs(self.settings["stage_path"]+"/dev")
12066 - if not os.path.isfile(self.settings["stage_path"]+"/proc/.keep"):
12067 - try:
12068 - proc_keepfile = open(self.settings["stage_path"]+"/proc/.keep","w")
12069 - proc_keepfile.write('')
12070 - proc_keepfile.close()
12071 - except IOError:
12072 - print "!!! Failed to create %s" % (self.settings["stage_path"]+"/dev/.keep")
12073 - if not os.path.isfile(self.settings["stage_path"]+"/dev/.keep"):
12074 - try:
12075 - dev_keepfile = open(self.settings["stage_path"]+"/dev/.keep","w")
12076 - dev_keepfile.write('')
12077 - dev_keepfile.close()
12078 - except IOError:
12079 - print "!!! Failed to create %s" % (self.settings["stage_path"]+"/dev/.keep")
12080 - else:
12081 - pass
12082 -
12083 - def set_mounts(self):
12084 - # stage_path/proc probably doesn't exist yet, so create it
12085 - if not os.path.exists(self.settings["stage_path"]+"/proc"):
12086 - os.makedirs(self.settings["stage_path"]+"/proc")
12087 -
12088 - # alter the mount mappings to bind mount proc onto it
12089 - self.mounts.append("stage1root/proc")
12090 - self.target_mounts["stage1root/proc"] = "/tmp/stage1root/proc"
12091 - self.mountmap["stage1root/proc"] = "/proc"
12092 -
12093 -def register(foo):
12094 - foo.update({"stage1":stage1_target})
12095 - return foo
12096 diff --git a/modules/stage2_target.py b/modules/stage2_target.py
12097 deleted file mode 100644
12098 index 803ec59..0000000
12099 --- a/modules/stage2_target.py
12100 +++ /dev/null
12101 @@ -1,66 +0,0 @@
12102 -"""
12103 -stage2 target, builds upon previous stage1 tarball
12104 -"""
12105 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
12106 -
12107 -from catalyst_support import *
12108 -from generic_stage_target import *
12109 -
12110 -class stage2_target(generic_stage_target):
12111 - """
12112 - Builder class for a stage2 installation tarball build.
12113 - """
12114 - def __init__(self,spec,addlargs):
12115 - self.required_values=[]
12116 - self.valid_values=["chost"]
12117 - generic_stage_target.__init__(self,spec,addlargs)
12118 -
12119 - def set_source_path(self):
12120 - if "SEEDCACHE" in self.settings and os.path.isdir(normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/tmp/stage1root/")):
12121 - self.settings["source_path"]=normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/tmp/stage1root/")
12122 - else:
12123 - self.settings["source_path"] = normpath(self.settings["storedir"] +
12124 - "/builds/" + self.settings["source_subpath"].rstrip("/") +
12125 - ".tar.bz2")
12126 - if os.path.isfile(self.settings["source_path"]):
12127 - if os.path.exists(self.settings["source_path"]):
12128 - # XXX: Is this even necessary if the previous check passes?
12129 - self.settings["source_path_hash"]=generate_hash(self.settings["source_path"],\
12130 - hash_function=self.settings["hash_function"],verbose=False)
12131 - print "Source path set to "+self.settings["source_path"]
12132 - if os.path.isdir(self.settings["source_path"]):
12133 - print "\tIf this is not desired, remove this directory or turn of seedcache in the options of catalyst.conf"
12134 - print "\tthe source path will then be " + \
12135 - normpath(self.settings["storedir"] + "/builds/" + \
12136 - self.settings["source_subpath"].restrip("/") + ".tar.bz2\n")
12137 -
12138 - # XXX: How do these override_foo() functions differ from the ones in
12139 - # generic_stage_target and why aren't they in stage3_target?
12140 -
12141 - def override_chost(self):
12142 - if "chost" in self.settings:
12143 - self.settings["CHOST"]=list_to_string(self.settings["chost"])
12144 -
12145 - def override_cflags(self):
12146 - if "cflags" in self.settings:
12147 - self.settings["CFLAGS"]=list_to_string(self.settings["cflags"])
12148 -
12149 - def override_cxxflags(self):
12150 - if "cxxflags" in self.settings:
12151 - self.settings["CXXFLAGS"]=list_to_string(self.settings["cxxflags"])
12152 -
12153 - def override_ldflags(self):
12154 - if "ldflags" in self.settings:
12155 - self.settings["LDFLAGS"]=list_to_string(self.settings["ldflags"])
12156 -
12157 - def set_portage_overlay(self):
12158 - generic_stage_target.set_portage_overlay(self)
12159 - if "portage_overlay" in self.settings:
12160 - print "\nWARNING !!!!!"
12161 - print "\tUsing an portage overlay for earlier stages could cause build issues."
12162 - print "\tIf you break it, you buy it. Don't complain to us about it."
12163 - print "\tDont say we did not warn you\n"
12164 -
12165 -def register(foo):
12166 - foo.update({"stage2":stage2_target})
12167 - return foo
12168 diff --git a/modules/stage3_target.py b/modules/stage3_target.py
12169 deleted file mode 100644
12170 index 4d3a008..0000000
12171 --- a/modules/stage3_target.py
12172 +++ /dev/null
12173 @@ -1,31 +0,0 @@
12174 -"""
12175 -stage3 target, builds upon previous stage2/stage3 tarball
12176 -"""
12177 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
12178 -
12179 -from catalyst_support import *
12180 -from generic_stage_target import *
12181 -
12182 -class stage3_target(generic_stage_target):
12183 - """
12184 - Builder class for a stage3 installation tarball build.
12185 - """
12186 - def __init__(self,spec,addlargs):
12187 - self.required_values=[]
12188 - self.valid_values=[]
12189 - generic_stage_target.__init__(self,spec,addlargs)
12190 -
12191 - def set_portage_overlay(self):
12192 - generic_stage_target.set_portage_overlay(self)
12193 - if "portage_overlay" in self.settings:
12194 - print "\nWARNING !!!!!"
12195 - print "\tUsing an overlay for earlier stages could cause build issues."
12196 - print "\tIf you break it, you buy it. Don't complain to us about it."
12197 - print "\tDont say we did not warn you\n"
12198 -
12199 - def set_cleanables(self):
12200 - generic_stage_target.set_cleanables(self)
12201 -
12202 -def register(foo):
12203 - foo.update({"stage3":stage3_target})
12204 - return foo
12205 diff --git a/modules/stage4_target.py b/modules/stage4_target.py
12206 deleted file mode 100644
12207 index ce41b2d..0000000
12208 --- a/modules/stage4_target.py
12209 +++ /dev/null
12210 @@ -1,43 +0,0 @@
12211 -"""
12212 -stage4 target, builds upon previous stage3/stage4 tarball
12213 -"""
12214 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
12215 -
12216 -from catalyst_support import *
12217 -from generic_stage_target import *
12218 -
12219 -class stage4_target(generic_stage_target):
12220 - """
12221 - Builder class for stage4.
12222 - """
12223 - def __init__(self,spec,addlargs):
12224 - self.required_values=["stage4/packages"]
12225 - self.valid_values=self.required_values[:]
12226 - self.valid_values.extend(["stage4/use","boot/kernel",\
12227 - "stage4/root_overlay","stage4/fsscript",\
12228 - "stage4/gk_mainargs","splash_theme",\
12229 - "portage_overlay","stage4/rcadd","stage4/rcdel",\
12230 - "stage4/linuxrc","stage4/unmerge","stage4/rm","stage4/empty"])
12231 - generic_stage_target.__init__(self,spec,addlargs)
12232 -
12233 - def set_cleanables(self):
12234 - self.settings["cleanables"]=["/var/tmp/*","/tmp/*"]
12235 -
12236 - def set_action_sequence(self):
12237 - self.settings["action_sequence"]=["unpack","unpack_snapshot",\
12238 - "config_profile_link","setup_confdir","portage_overlay",\
12239 - "bind","chroot_setup","setup_environment","build_packages",\
12240 - "build_kernel","bootloader","root_overlay","fsscript",\
12241 - "preclean","rcupdate","unmerge","unbind","remove","empty",\
12242 - "clean"]
12243 -
12244 -# if "TARBALL" in self.settings or \
12245 -# "FETCH" not in self.settings:
12246 - if "FETCH" not in self.settings:
12247 - self.settings["action_sequence"].append("capture")
12248 - self.settings["action_sequence"].append("clear_autoresume")
12249 -
12250 -def register(foo):
12251 - foo.update({"stage4":stage4_target})
12252 - return foo
12253 -
12254 diff --git a/modules/tinderbox_target.py b/modules/tinderbox_target.py
12255 deleted file mode 100644
12256 index ca55610..0000000
12257 --- a/modules/tinderbox_target.py
12258 +++ /dev/null
12259 @@ -1,48 +0,0 @@
12260 -"""
12261 -Tinderbox target
12262 -"""
12263 -# NOTE: That^^ docstring has influence catalyst-spec(5) man page generation.
12264 -
12265 -from catalyst_support import *
12266 -from generic_stage_target import *
12267 -
12268 -class tinderbox_target(generic_stage_target):
12269 - """
12270 - Builder class for the tinderbox target
12271 - """
12272 - def __init__(self,spec,addlargs):
12273 - self.required_values=["tinderbox/packages"]
12274 - self.valid_values=self.required_values[:]
12275 - self.valid_values.extend(["tinderbox/use"])
12276 - generic_stage_target.__init__(self,spec,addlargs)
12277 -
12278 - def run_local(self):
12279 - # tinderbox
12280 - # example call: "grp.sh run xmms vim sys-apps/gleep"
12281 - try:
12282 - if os.path.exists(self.settings["controller_file"]):
12283 - cmd("/bin/bash "+self.settings["controller_file"]+" run "+\
12284 - list_bashify(self.settings["tinderbox/packages"]),"run script failed.",env=self.env)
12285 -
12286 - except CatalystError:
12287 - self.unbind()
12288 - raise CatalystError,"Tinderbox aborting due to error."
12289 -
12290 - def set_cleanables(self):
12291 - self.settings['cleanables'] = [
12292 - '/etc/resolv.conf',
12293 - '/var/tmp/*',
12294 - '/root/*',
12295 - self.settings['portdir'],
12296 - ]
12297 -
12298 - def set_action_sequence(self):
12299 - #Default action sequence for run method
12300 - self.settings["action_sequence"]=["unpack","unpack_snapshot",\
12301 - "config_profile_link","setup_confdir","bind","chroot_setup",\
12302 - "setup_environment","run_local","preclean","unbind","clean",\
12303 - "clear_autoresume"]
12304 -
12305 -def register(foo):
12306 - foo.update({"tinderbox":tinderbox_target})
12307 - return foo
12308 --
12309 1.8.3.2

Replies