1 |
Split out hash and contents to their own classes, files. |
2 |
--- |
3 |
catalyst/contents.py | 72 +++++++++++++++ |
4 |
catalyst/defaults.py | 68 ++++++++++++++ |
5 |
catalyst/hash_utils.py | 106 ++++++++++++++++++++++ |
6 |
catalyst/main.py | 46 ++++++---- |
7 |
catalyst/support.py | 150 +------------------------------ |
8 |
catalyst/targets/generic_stage_target.py | 15 ++-- |
9 |
catalyst/targets/stage2_target.py | 7 +- |
10 |
etc/catalyst.conf | 12 +-- |
11 |
8 files changed, 300 insertions(+), 176 deletions(-) |
12 |
create mode 100644 catalyst/contents.py |
13 |
create mode 100644 catalyst/defaults.py |
14 |
create mode 100644 catalyst/hash_utils.py |
15 |
|
16 |
diff --git a/catalyst/contents.py b/catalyst/contents.py |
17 |
new file mode 100644 |
18 |
index 0000000..0ecabbd |
19 |
--- /dev/null |
20 |
+++ b/catalyst/contents.py |
21 |
@@ -0,0 +1,72 @@ |
22 |
+ |
23 |
+from collections import namedtuple |
24 |
+from subprocess import Popen, PIPE |
25 |
+ |
26 |
+from support import CatalystError, warn |
27 |
+ |
28 |
+ |
29 |
+class ContentsMap(object): |
30 |
+ '''Class to encompass all known commands to list |
31 |
+ the contents of an archive''' |
32 |
+ |
33 |
+ |
34 |
+ fields = ['func', 'cmd'] |
35 |
+ |
36 |
+ |
37 |
+ def __init__(self, defs=None): |
38 |
+ '''Class init |
39 |
+ |
40 |
+ @param defs: dictionary of Key:[function, cmd] |
41 |
+ ''' |
42 |
+ if defs is None: |
43 |
+ defs = {} |
44 |
+ #self.contents = {} |
45 |
+ self.contents_map = {} |
46 |
+ |
47 |
+ # create the archive type namedtuple classes |
48 |
+ for name in list(defs): |
49 |
+ #obj = self.contents[name] = namedtuple(name, self.fields) |
50 |
+ obj = namedtuple(name, self.fields) |
51 |
+ obj.__slots__ = () |
52 |
+ self.contents_map[name] = obj._make(defs[name]) |
53 |
+ del obj |
54 |
+ |
55 |
+ |
56 |
+ def generate_contents(self, file_, getter="auto", verbose=False): |
57 |
+ try: |
58 |
+ archive = getter |
59 |
+ if archive == 'auto' and file_.endswith('.iso'): |
60 |
+ archive = 'isoinfo_l' |
61 |
+ if (archive in ['tar_tv','auto']): |
62 |
+ if file_.endswith('.tgz') or file_.endswith('.tar.gz'): |
63 |
+ archive = 'tar_tvz' |
64 |
+ elif file_.endswith('.tbz2') or file_.endswith('.tar.bz2'): |
65 |
+ archive = 'tar_tvj' |
66 |
+ elif file_.endswith('.tar'): |
67 |
+ archive = 'tar_tv' |
68 |
+ |
69 |
+ if archive == 'auto': |
70 |
+ warn('File %r has unknown type for automatic detection.' |
71 |
+ % (file_, )) |
72 |
+ return None |
73 |
+ else: |
74 |
+ getter = archive |
75 |
+ func = getattr(self, '_%s_' % self.contents_map[getter].func) |
76 |
+ return func(file_, self.contents_map[getter].cmd, verbose) |
77 |
+ except: |
78 |
+ raise CatalystError,\ |
79 |
+ "Error generating contents, is appropriate utility " +\ |
80 |
+ "(%s) installed on your system?" \ |
81 |
+ % (self.contents_map[getter].cmd) |
82 |
+ |
83 |
+ |
84 |
+ @staticmethod |
85 |
+ def _calc_contents_(file_, cmd, verbose): |
86 |
+ _cmd = (cmd % {'file': file_ }).split() |
87 |
+ proc = Popen(_cmd, stdout=PIPE, stderr=PIPE) |
88 |
+ results = proc.communicate() |
89 |
+ result = "\n".join(results) |
90 |
+ if verbose: |
91 |
+ print result |
92 |
+ return result |
93 |
+ |
94 |
diff --git a/catalyst/defaults.py b/catalyst/defaults.py |
95 |
new file mode 100644 |
96 |
index 0000000..50d1d18 |
97 |
--- /dev/null |
98 |
+++ b/catalyst/defaults.py |
99 |
@@ -0,0 +1,68 @@ |
100 |
+ |
101 |
+ |
102 |
+# these should never be touched |
103 |
+required_build_targets = ["generic_target", "generic_stage_target"] |
104 |
+ |
105 |
+# new build types should be added here |
106 |
+valid_build_targets = ["stage1_target", "stage2_target", "stage3_target", |
107 |
+ "stage4_target", "grp_target", "livecd_stage1_target", "livecd_stage2_target", |
108 |
+ "embedded_target", "tinderbox_target", "snapshot_target", "netboot_target", |
109 |
+ "netboot2_target" |
110 |
+ ] |
111 |
+ |
112 |
+required_config_file_values = ["storedir", "sharedir", "distdir", "portdir"] |
113 |
+ |
114 |
+valid_config_file_values = required_config_file_values[:] |
115 |
+valid_config_file_values.extend(["PKGCACHE", "KERNCACHE", "CCACHE", "DISTCC", |
116 |
+ "ICECREAM", "ENVSCRIPT", "AUTORESUME", "FETCH", "CLEAR_AUTORESUME", |
117 |
+ "options", "DEBUG", "VERBOSE", "PURGE", "PURGEONLY", "SNAPCACHE", |
118 |
+ "snapshot_cache", "hash_function", "digests", "contents", "SEEDCACHE" |
119 |
+ ]) |
120 |
+ |
121 |
+verbosity = 1 |
122 |
+ |
123 |
+# Use hash_utils.HashMap.fields for the value legend |
124 |
+# fields = ["func", "cmd", "args", "id"] |
125 |
+hash_definitions = { |
126 |
+ "adler32" :["calc_hash2", "shash", ["-a", "ADLER32"], "ADLER32"], |
127 |
+ "crc32" :["calc_hash2", "shash", ["-a", "CRC32"], "CRC32"], |
128 |
+ "crc32b" :["calc_hash2", "shash", ["-a", "CRC32B"], "CRC32B"], |
129 |
+ "gost" :["calc_hash2", "shash", ["-a", "GOST"], "GOST"], |
130 |
+ "haval128" :["calc_hash2", "shash", ["-a", "HAVAL128"], "HAVAL128"], |
131 |
+ "haval160" :["calc_hash2", "shash", ["-a", "HAVAL160"], "HAVAL160"], |
132 |
+ "haval192" :["calc_hash2", "shash", ["-a", "HAVAL192"], "HAVAL192"], |
133 |
+ "haval224" :["calc_hash2", "shash", ["-a", "HAVAL224"], "HAVAL224"], |
134 |
+ "haval256" :["calc_hash2", "shash", ["-a", "HAVAL256"], "HAVAL256"], |
135 |
+ "md2" :["calc_hash2", "shash", ["-a", "MD2"], "MD2"], |
136 |
+ "md4" :["calc_hash2", "shash", ["-a", "MD4"], "MD4"], |
137 |
+ "md5" :["calc_hash2", "shash", ["-a", "MD5"], "MD5"], |
138 |
+ "ripemd128":["calc_hash2", "shash", ["-a", "RIPEMD128"], "RIPEMD128"], |
139 |
+ "ripemd160":["calc_hash2", "shash", ["-a", "RIPEMD160"], "RIPEMD160"], |
140 |
+ "ripemd256":["calc_hash2", "shash", ["-a", "RIPEMD256"], "RIPEMD256"], |
141 |
+ "ripemd320":["calc_hash2", "shash", ["-a", "RIPEMD320"], "RIPEMD320"], |
142 |
+ "sha1" :["calc_hash2", "shash", ["-a", "SHA1"], "SHA1"], |
143 |
+ "sha224" :["calc_hash2", "shash", ["-a", "SHA224"], "SHA224"], |
144 |
+ "sha256" :["calc_hash2", "shash", ["-a", "SHA256"], "SHA256"], |
145 |
+ "sha384" :["calc_hash2", "shash", ["-a", "SHA384"], "SHA384"], |
146 |
+ "sha512" :["calc_hash2", "shash", ["-a", "SHA512"], "SHA512"], |
147 |
+ "snefru128":["calc_hash2", "shash", ["-a", "SNEFRU128"], "SNEFRU128"], |
148 |
+ "snefru256":["calc_hash2", "shash", ["-a", "SNEFRU256"], "SNEFRU256"], |
149 |
+ "tiger" :["calc_hash2", "shash", ["-a", "TIGER"], "TIGER"], |
150 |
+ "tiger128" :["calc_hash2", "shash", ["-a", "TIGER128"], "TIGER128"], |
151 |
+ "tiger160" :["calc_hash2", "shash", ["-a", "TIGER160"], "TIGER160"], |
152 |
+ "whirlpool":["calc_hash2", "shash", ["-a", "WHIRLPOOL"], "WHIRLPOOL"], |
153 |
+ } |
154 |
+ |
155 |
+# use contents.ContentsMap.fields for the value legend |
156 |
+# Key:[function, cmd] |
157 |
+contents_definitions = { |
158 |
+ # 'find' is disabled because it requires the source path, which is not |
159 |
+ # always available |
160 |
+ #"find" :["calc_contents","find %(path)s"], |
161 |
+ "tar_tv":["calc_contents","tar tvf %(file)s"], |
162 |
+ "tar_tvz":["calc_contents","tar tvzf %(file)s"], |
163 |
+ "tar_tvj":["calc_contents","tar -I lbzip2 -tvf %(file)s"], |
164 |
+ "isoinfo_l":["calc_contents","isoinfo -l -i %(file)s"], |
165 |
+ # isoinfo_f should be a last resort only |
166 |
+ "isoinfo_f":["calc_contents","isoinfo -f -i %(file)s"], |
167 |
+} |
168 |
diff --git a/catalyst/hash_utils.py b/catalyst/hash_utils.py |
169 |
new file mode 100644 |
170 |
index 0000000..0dfe98b |
171 |
--- /dev/null |
172 |
+++ b/catalyst/hash_utils.py |
173 |
@@ -0,0 +1,106 @@ |
174 |
+ |
175 |
+import os |
176 |
+from collections import namedtuple |
177 |
+from subprocess import Popen, PIPE |
178 |
+ |
179 |
+from support import CatalystError |
180 |
+ |
181 |
+ |
182 |
+class HashMap(object): |
183 |
+ '''Class for handling |
184 |
+ Catalyst's hash generation''' |
185 |
+ |
186 |
+ fields = ["func", "cmd", "args", "id"] |
187 |
+ |
188 |
+ |
189 |
+ def __init__(self, hashes=None): |
190 |
+ '''Class init |
191 |
+ |
192 |
+ @param hashes: dictionary of Key:[function, cmd, cmd_args, Print string] |
193 |
+ @param fields: list of ordered field names for the hashes |
194 |
+ eg: ["func", "cmd", "args", "id"] |
195 |
+ ''' |
196 |
+ if hashes is None: |
197 |
+ hashes = {} |
198 |
+ #self.hashes = {} |
199 |
+ self.hash_map = {} |
200 |
+ |
201 |
+ # create the hash definition namedtuple classes |
202 |
+ for name in list(hashes): |
203 |
+ #obj = self.hashes[name] = namedtuple(name, self.fields) |
204 |
+ obj = namedtuple(name, self.fields) |
205 |
+ obj.__slots__ = () |
206 |
+ self.hash_map[name] = obj._make(hashes[name]) |
207 |
+ del obj |
208 |
+ |
209 |
+ |
210 |
+ def generate_hash(self, file_, hash_="crc32", verbose=False): |
211 |
+ '''Prefered method of generating a hash for the passed in file_ |
212 |
+ |
213 |
+ @param file_: the file to generate the hash for |
214 |
+ @param hash_: the hash algorythm to use |
215 |
+ @param verbose: boolean |
216 |
+ @returns the hash result |
217 |
+ ''' |
218 |
+ try: |
219 |
+ return getattr(self, self.hash_map[hash_].func)( |
220 |
+ file_, |
221 |
+ hash_, |
222 |
+ verbose |
223 |
+ ) |
224 |
+ except: |
225 |
+ raise CatalystError,"Error generating hash, is appropriate " + \ |
226 |
+ "utility installed on your system?" |
227 |
+ |
228 |
+ |
229 |
+ def calc_hash(self, file_, hash_, verbose=False): |
230 |
+ ''' |
231 |
+ Calculate the hash for "file_" |
232 |
+ |
233 |
+ @param file_: the file to generate the hash for |
234 |
+ @param hash_: the hash algorythm to use |
235 |
+ @param verbose: boolean |
236 |
+ @returns the hash result |
237 |
+ ''' |
238 |
+ _hash = self.hash_map[hash_] |
239 |
+ args = [_hash.cmd] |
240 |
+ args.extend(_hash.args) |
241 |
+ args.append(file_) |
242 |
+ source = Popen(args, stdout=PIPE) |
243 |
+ mylines = source.communicate()[0] |
244 |
+ mylines=mylines[0].split() |
245 |
+ result=mylines[0] |
246 |
+ if verbose: |
247 |
+ print _hash_.id + " (%s) = %s" % (file_, result) |
248 |
+ return result |
249 |
+ |
250 |
+ |
251 |
+ def calc_hash2(self, file_, hash_type, verbose=False): |
252 |
+ ''' |
253 |
+ Calculate the hash for "file_" |
254 |
+ |
255 |
+ @param file_: the file to generate the hash for |
256 |
+ @param hash_: the hash algorythm to use |
257 |
+ @param verbose: boolean |
258 |
+ @returns the hash result |
259 |
+ ''' |
260 |
+ _hash = self.hash_map[hash_type] |
261 |
+ args = [_hash.cmd] |
262 |
+ args.extend(_hash.args) |
263 |
+ args.append(file_) |
264 |
+ #print("DEBUG: calc_hash2; args =", args) |
265 |
+ source = Popen(args, stdout=PIPE) |
266 |
+ output = source.communicate() |
267 |
+ lines = output[0].split('\n') |
268 |
+ #print("DEBUG: calc_hash2; output =", output) |
269 |
+ header = lines[0] |
270 |
+ h_f = lines[1].split() |
271 |
+ hash_result = h_f[0] |
272 |
+ short_file = os.path.split(h_f[1])[1] |
273 |
+ result = header + hash_result + " " + short_file + "\n" |
274 |
+ if verbose: |
275 |
+ print header+" (%s) = %s" % (short_file, result) |
276 |
+ return result |
277 |
+ |
278 |
+ |
279 |
+ |
280 |
diff --git a/catalyst/main.py b/catalyst/main.py |
281 |
index 28afc59..be21970 100644 |
282 |
--- a/catalyst/main.py |
283 |
+++ b/catalyst/main.py |
284 |
@@ -21,8 +21,15 @@ sys.path.append(__selfpath__ + "/modules") |
285 |
|
286 |
import catalyst.config |
287 |
import catalyst.util |
288 |
-from catalyst.support import (required_build_targets, |
289 |
- valid_build_targets, CatalystError, hash_map, find_binary, LockInUse) |
290 |
+from catalyst.support import CatalystError, find_binary, LockInUse |
291 |
+from defaults import (required_build_targets, valid_build_targets, |
292 |
+ hash_definitions |
293 |
+ ) |
294 |
+ |
295 |
+from hash_utils import HashMap |
296 |
+from defaults import contents_definitions |
297 |
+from contents import ContentsMap |
298 |
+ |
299 |
|
300 |
__maintainer__="Catalyst <catalyst@g.o>" |
301 |
__version__="2.0.15" |
302 |
@@ -183,7 +190,8 @@ def parse_config(myconfig): |
303 |
if "digests" in myconf: |
304 |
conf_values["digests"]=myconf["digests"] |
305 |
if "contents" in myconf: |
306 |
- conf_values["contents"]=myconf["contents"] |
307 |
+ # replace '-' with '_' (for compatibility with existing configs) |
308 |
+ conf_values["contents"] = myconf["contents"].replace("-", '_') |
309 |
|
310 |
if "envscript" in myconf: |
311 |
print "Envscript support enabled." |
312 |
@@ -224,9 +232,10 @@ def import_modules(): |
313 |
raise CatalystError,"Can't find " + x + ".py plugin in " + \ |
314 |
module_dir |
315 |
|
316 |
- except ImportError: |
317 |
+ except ImportError as e: |
318 |
print "!!! catalyst: Python modules not found in "+\ |
319 |
module_dir + "; exiting." |
320 |
+ print e |
321 |
sys.exit(1) |
322 |
|
323 |
return targetmap |
324 |
@@ -353,40 +362,47 @@ def main(): |
325 |
# import configuration file and import our main module using those settings |
326 |
parse_config(myconfig) |
327 |
|
328 |
- # Start checking that digests are valid now that the hash_map was imported |
329 |
- # from catalyst.support |
330 |
+ # initialize our contents generator |
331 |
+ contents_map = ContentsMap(contents_definitions) |
332 |
+ conf_values["contents_map"] = contents_map |
333 |
+ |
334 |
+ # initialze our hash and contents generators |
335 |
+ hash_map = HashMap(hash_definitions) |
336 |
+ conf_values["hash_map"] = hash_map |
337 |
+ |
338 |
+ # Start checking that digests are valid now that hash_map is initialized |
339 |
if "digests" in conf_values: |
340 |
for i in conf_values["digests"].split(): |
341 |
- if i not in hash_map: |
342 |
+ if i not in hash_definitions: |
343 |
print |
344 |
print i+" is not a valid digest entry" |
345 |
print "Valid digest entries:" |
346 |
- print hash_map.keys() |
347 |
+ print hash_definitions.keys() |
348 |
print |
349 |
print "Catalyst aborting...." |
350 |
sys.exit(2) |
351 |
- if find_binary(hash_map[i][1]) == None: |
352 |
+ if find_binary(hash_map.hash_map[i].cmd) == None: |
353 |
print |
354 |
- print "digest="+i |
355 |
- print "\tThe "+hash_map[i][1]+\ |
356 |
+ print "digest=" + i |
357 |
+ print "\tThe " + hash_map.hash_map[i].cmd + \ |
358 |
" binary was not found. It needs to be in your system path" |
359 |
print |
360 |
print "Catalyst aborting...." |
361 |
sys.exit(2) |
362 |
if "hash_function" in conf_values: |
363 |
- if conf_values["hash_function"] not in hash_map: |
364 |
+ if conf_values["hash_function"] not in hash_definitions: |
365 |
print |
366 |
print conf_values["hash_function"]+\ |
367 |
" is not a valid hash_function entry" |
368 |
print "Valid hash_function entries:" |
369 |
- print hash_map.keys() |
370 |
+ print hash_definitions.keys() |
371 |
print |
372 |
print "Catalyst aborting...." |
373 |
sys.exit(2) |
374 |
- if find_binary(hash_map[conf_values["hash_function"]][1]) == None: |
375 |
+ if find_binary(hash_map.hash_map[conf_values["hash_function"]].cmd) == None: |
376 |
print |
377 |
print "hash_function="+conf_values["hash_function"] |
378 |
- print "\tThe "+hash_map[conf_values["hash_function"]][1]+\ |
379 |
+ print "\tThe "+hash_map.hash_map[conf_values["hash_function"]].cmd + \ |
380 |
" binary was not found. It needs to be in your system path" |
381 |
print |
382 |
print "Catalyst aborting...." |
383 |
diff --git a/catalyst/support.py b/catalyst/support.py |
384 |
index 072b985..fe2c700 100644 |
385 |
--- a/catalyst/support.py |
386 |
+++ b/catalyst/support.py |
387 |
@@ -1,6 +1,9 @@ |
388 |
|
389 |
import sys,string,os,types,re,signal,traceback,time |
390 |
#import md5,sha |
391 |
+ |
392 |
+from catalyst.defaults import verbosity |
393 |
+ |
394 |
selinux_capable = False |
395 |
#userpriv_capable = (os.getuid() == 0) |
396 |
#fakeroot_capable = False |
397 |
@@ -60,120 +63,6 @@ def hexify(str): |
398 |
return r |
399 |
# hexify() |
400 |
|
401 |
-def generate_contents(file,contents_function="auto",verbose=False): |
402 |
- try: |
403 |
- _ = contents_function |
404 |
- if _ == 'auto' and file.endswith('.iso'): |
405 |
- _ = 'isoinfo-l' |
406 |
- if (_ in ['tar-tv','auto']): |
407 |
- if file.endswith('.tgz') or file.endswith('.tar.gz'): |
408 |
- _ = 'tar-tvz' |
409 |
- elif file.endswith('.tbz2') or file.endswith('.tar.bz2'): |
410 |
- _ = 'tar-tvj' |
411 |
- elif file.endswith('.tar'): |
412 |
- _ = 'tar-tv' |
413 |
- |
414 |
- if _ == 'auto': |
415 |
- warn('File %r has unknown type for automatic detection.' % (file, )) |
416 |
- return None |
417 |
- else: |
418 |
- contents_function = _ |
419 |
- _ = contents_map[contents_function] |
420 |
- return _[0](file,_[1],verbose) |
421 |
- except: |
422 |
- raise CatalystError,\ |
423 |
- "Error generating contents, is appropriate utility (%s) installed on your system?" \ |
424 |
- % (contents_function, ) |
425 |
- |
426 |
-def calc_contents(file,cmd,verbose): |
427 |
- args={ 'file': file } |
428 |
- cmd=cmd % dict(args) |
429 |
- a=os.popen(cmd) |
430 |
- mylines=a.readlines() |
431 |
- a.close() |
432 |
- result="".join(mylines) |
433 |
- if verbose: |
434 |
- print result |
435 |
- return result |
436 |
- |
437 |
-# This has map must be defined after the function calc_content |
438 |
-# It is possible to call different functions from this but they must be defined |
439 |
-# before hash_map |
440 |
-# Key,function,cmd |
441 |
-contents_map={ |
442 |
- # 'find' is disabled because it requires the source path, which is not |
443 |
- # always available |
444 |
- #"find" :[calc_contents,"find %(path)s"], |
445 |
- "tar-tv":[calc_contents,"tar tvf %(file)s"], |
446 |
- "tar-tvz":[calc_contents,"tar tvzf %(file)s"], |
447 |
- "tar-tvj":[calc_contents,"tar -I lbzip2 -tvf %(file)s"], |
448 |
- "isoinfo-l":[calc_contents,"isoinfo -l -i %(file)s"], |
449 |
- # isoinfo-f should be a last resort only |
450 |
- "isoinfo-f":[calc_contents,"isoinfo -f -i %(file)s"], |
451 |
-} |
452 |
- |
453 |
-def generate_hash(file,hash_function="crc32",verbose=False): |
454 |
- try: |
455 |
- return hash_map[hash_function][0](file,hash_map[hash_function][1],hash_map[hash_function][2],\ |
456 |
- hash_map[hash_function][3],verbose) |
457 |
- except: |
458 |
- raise CatalystError,"Error generating hash, is appropriate utility installed on your system?" |
459 |
- |
460 |
-def calc_hash(file,cmd,cmd_args,id_string="MD5",verbose=False): |
461 |
- a=os.popen(cmd+" "+cmd_args+" "+file) |
462 |
- mylines=a.readlines() |
463 |
- a.close() |
464 |
- mylines=mylines[0].split() |
465 |
- result=mylines[0] |
466 |
- if verbose: |
467 |
- print id_string+" (%s) = %s" % (file, result) |
468 |
- return result |
469 |
- |
470 |
-def calc_hash2(file,cmd,cmd_args,id_string="MD5",verbose=False): |
471 |
- a=os.popen(cmd+" "+cmd_args+" "+file) |
472 |
- header=a.readline() |
473 |
- mylines=a.readline().split() |
474 |
- hash=mylines[0] |
475 |
- short_file=os.path.split(mylines[1])[1] |
476 |
- a.close() |
477 |
- result=header+hash+" "+short_file+"\n" |
478 |
- if verbose: |
479 |
- print header+" (%s) = %s" % (short_file, result) |
480 |
- return result |
481 |
- |
482 |
-# This has map must be defined after the function calc_hash |
483 |
-# It is possible to call different functions from this but they must be defined |
484 |
-# before hash_map |
485 |
-# Key,function,cmd,cmd_args,Print string |
486 |
-hash_map={ |
487 |
- "adler32":[calc_hash2,"shash","-a ADLER32","ADLER32"],\ |
488 |
- "crc32":[calc_hash2,"shash","-a CRC32","CRC32"],\ |
489 |
- "crc32b":[calc_hash2,"shash","-a CRC32B","CRC32B"],\ |
490 |
- "gost":[calc_hash2,"shash","-a GOST","GOST"],\ |
491 |
- "haval128":[calc_hash2,"shash","-a HAVAL128","HAVAL128"],\ |
492 |
- "haval160":[calc_hash2,"shash","-a HAVAL160","HAVAL160"],\ |
493 |
- "haval192":[calc_hash2,"shash","-a HAVAL192","HAVAL192"],\ |
494 |
- "haval224":[calc_hash2,"shash","-a HAVAL224","HAVAL224"],\ |
495 |
- "haval256":[calc_hash2,"shash","-a HAVAL256","HAVAL256"],\ |
496 |
- "md2":[calc_hash2,"shash","-a MD2","MD2"],\ |
497 |
- "md4":[calc_hash2,"shash","-a MD4","MD4"],\ |
498 |
- "md5":[calc_hash2,"shash","-a MD5","MD5"],\ |
499 |
- "ripemd128":[calc_hash2,"shash","-a RIPEMD128","RIPEMD128"],\ |
500 |
- "ripemd160":[calc_hash2,"shash","-a RIPEMD160","RIPEMD160"],\ |
501 |
- "ripemd256":[calc_hash2,"shash","-a RIPEMD256","RIPEMD256"],\ |
502 |
- "ripemd320":[calc_hash2,"shash","-a RIPEMD320","RIPEMD320"],\ |
503 |
- "sha1":[calc_hash2,"shash","-a SHA1","SHA1"],\ |
504 |
- "sha224":[calc_hash2,"shash","-a SHA224","SHA224"],\ |
505 |
- "sha256":[calc_hash2,"shash","-a SHA256","SHA256"],\ |
506 |
- "sha384":[calc_hash2,"shash","-a SHA384","SHA384"],\ |
507 |
- "sha512":[calc_hash2,"shash","-a SHA512","SHA512"],\ |
508 |
- "snefru128":[calc_hash2,"shash","-a SNEFRU128","SNEFRU128"],\ |
509 |
- "snefru256":[calc_hash2,"shash","-a SNEFRU256","SNEFRU256"],\ |
510 |
- "tiger":[calc_hash2,"shash","-a TIGER","TIGER"],\ |
511 |
- "tiger128":[calc_hash2,"shash","-a TIGER128","TIGER128"],\ |
512 |
- "tiger160":[calc_hash2,"shash","-a TIGER160","TIGER160"],\ |
513 |
- "whirlpool":[calc_hash2,"shash","-a WHIRLPOOL","WHIRLPOOL"],\ |
514 |
- } |
515 |
|
516 |
def read_from_clst(file): |
517 |
line = '' |
518 |
@@ -190,39 +79,6 @@ def read_from_clst(file): |
519 |
return myline |
520 |
# read_from_clst |
521 |
|
522 |
-# these should never be touched |
523 |
-required_build_targets=["generic_target","generic_stage_target"] |
524 |
- |
525 |
-# new build types should be added here |
526 |
-valid_build_targets=["stage1_target","stage2_target","stage3_target","stage4_target","grp_target", |
527 |
- "livecd_stage1_target","livecd_stage2_target","embedded_target", |
528 |
- "tinderbox_target","snapshot_target","netboot_target","netboot2_target"] |
529 |
- |
530 |
-required_config_file_values=["storedir","sharedir","distdir","portdir"] |
531 |
-valid_config_file_values=required_config_file_values[:] |
532 |
-valid_config_file_values.append("PKGCACHE") |
533 |
-valid_config_file_values.append("KERNCACHE") |
534 |
-valid_config_file_values.append("CCACHE") |
535 |
-valid_config_file_values.append("DISTCC") |
536 |
-valid_config_file_values.append("ICECREAM") |
537 |
-valid_config_file_values.append("ENVSCRIPT") |
538 |
-valid_config_file_values.append("AUTORESUME") |
539 |
-valid_config_file_values.append("FETCH") |
540 |
-valid_config_file_values.append("CLEAR_AUTORESUME") |
541 |
-valid_config_file_values.append("options") |
542 |
-valid_config_file_values.append("DEBUG") |
543 |
-valid_config_file_values.append("VERBOSE") |
544 |
-valid_config_file_values.append("PURGE") |
545 |
-valid_config_file_values.append("PURGEONLY") |
546 |
-valid_config_file_values.append("SNAPCACHE") |
547 |
-valid_config_file_values.append("snapshot_cache") |
548 |
-valid_config_file_values.append("hash_function") |
549 |
-valid_config_file_values.append("digests") |
550 |
-valid_config_file_values.append("contents") |
551 |
-valid_config_file_values.append("SEEDCACHE") |
552 |
- |
553 |
-verbosity=1 |
554 |
- |
555 |
def list_bashify(mylist): |
556 |
if type(mylist)==types.StringType: |
557 |
mypack=[mylist] |
558 |
diff --git a/catalyst/targets/generic_stage_target.py b/catalyst/targets/generic_stage_target.py |
559 |
index 5200d8a..123a9be 100644 |
560 |
--- a/catalyst/targets/generic_stage_target.py |
561 |
+++ b/catalyst/targets/generic_stage_target.py |
562 |
@@ -4,6 +4,7 @@ from generic_target import * |
563 |
from stat import * |
564 |
from catalyst.lock import LockDir |
565 |
|
566 |
+ |
567 |
class generic_stage_target(generic_target): |
568 |
""" |
569 |
This class does all of the chroot setup, copying of files, etc. It is |
570 |
@@ -1633,6 +1634,7 @@ class generic_stage_target(generic_target): |
571 |
if os.path.exists(file+".CONTENTS"): |
572 |
os.remove(file+".CONTENTS") |
573 |
if "contents" in self.settings: |
574 |
+ contents_map = self.settings["contents_map"] |
575 |
if os.path.exists(file): |
576 |
myf=open(file+".CONTENTS","w") |
577 |
keys={} |
578 |
@@ -1641,7 +1643,7 @@ class generic_stage_target(generic_target): |
579 |
array=keys.keys() |
580 |
array.sort() |
581 |
for j in array: |
582 |
- contents=generate_contents(file,contents_function=j,\ |
583 |
+ contents = contents_map.generate_contents(file, j, |
584 |
verbose="VERBOSE" in self.settings) |
585 |
if contents: |
586 |
myf.write(contents) |
587 |
@@ -1651,6 +1653,7 @@ class generic_stage_target(generic_target): |
588 |
if os.path.exists(file+".DIGESTS"): |
589 |
os.remove(file+".DIGESTS") |
590 |
if "digests" in self.settings: |
591 |
+ hash_map = self.settings["hash_map"] |
592 |
if os.path.exists(file): |
593 |
myf=open(file+".DIGESTS","w") |
594 |
keys={} |
595 |
@@ -1661,14 +1664,14 @@ class generic_stage_target(generic_target): |
596 |
for f in [file, file+'.CONTENTS']: |
597 |
if os.path.exists(f): |
598 |
if "all" in array: |
599 |
- for k in hash_map.keys(): |
600 |
- hash=generate_hash(f,hash_function=k,verbose=\ |
601 |
- "VERBOSE" in self.settings) |
602 |
+ for k in list(hash_map.hash_map): |
603 |
+ hash = hash_map.generate_hash(f,hash_=k, |
604 |
+ verbose = "VERBOSE" in self.settings) |
605 |
myf.write(hash) |
606 |
else: |
607 |
for j in array: |
608 |
- hash=generate_hash(f,hash_function=j,verbose=\ |
609 |
- "VERBOSE" in self.settings) |
610 |
+ hash = hash_map.generate_hash(f,hash_=j, |
611 |
+ verbose = "VERBOSE" in self.settings) |
612 |
myf.write(hash) |
613 |
myf.close() |
614 |
|
615 |
diff --git a/catalyst/targets/stage2_target.py b/catalyst/targets/stage2_target.py |
616 |
index 15acdee..6377f5d 100644 |
617 |
--- a/catalyst/targets/stage2_target.py |
618 |
+++ b/catalyst/targets/stage2_target.py |
619 |
@@ -23,8 +23,11 @@ class stage2_target(generic_stage_target): |
620 |
if os.path.isfile(self.settings["source_path"]): |
621 |
if os.path.exists(self.settings["source_path"]): |
622 |
# XXX: Is this even necessary if the previous check passes? |
623 |
- self.settings["source_path_hash"]=generate_hash(self.settings["source_path"],\ |
624 |
- hash_function=self.settings["hash_function"],verbose=False) |
625 |
+ self.settings["source_path_hash"] = \ |
626 |
+ self.settings["hash_map"].generate_hash( |
627 |
+ self.settings["source_path"],\ |
628 |
+ hash_=self.settings["hash_function"], |
629 |
+ verbose=False) |
630 |
print "Source path set to "+self.settings["source_path"] |
631 |
if os.path.isdir(self.settings["source_path"]): |
632 |
print "\tIf this is not desired, remove this directory or turn of seedcache in the options of catalyst.conf" |
633 |
diff --git a/etc/catalyst.conf b/etc/catalyst.conf |
634 |
index 57606ca..2d377b2 100644 |
635 |
--- a/etc/catalyst.conf |
636 |
+++ b/etc/catalyst.conf |
637 |
@@ -15,12 +15,12 @@ digests="md5 sha1 sha512 whirlpool" |
638 |
# Creates a .CONTENTS file listing the contents of the file. Pick from any of |
639 |
# the supported options below: |
640 |
# auto - strongly recommended |
641 |
-# tar-tv - does 'tar tvf FILE' |
642 |
-# tar-tvz - does 'tar tvzf FILE' |
643 |
-# tar-tvy - does 'tar tvyf FILE' |
644 |
-# isoinfo-l - does 'isoinfo -l -i FILE' |
645 |
-# isoinfo-f - does 'isoinfo -f -i FILE' |
646 |
-# 'isoinfo-f' is the only option not chosen by the automatic algorithm. |
647 |
+# tar_tv - does 'tar tvf FILE' |
648 |
+# tar_tvz - does 'tar tvzf FILE' |
649 |
+# tar_tvy - does 'tar tvyf FILE' |
650 |
+# isoinfo_l - does 'isoinfo -l -i FILE' |
651 |
+# isoinfo_f - does 'isoinfo -f -i FILE' |
652 |
+# 'isoinfo_f' is the only option not chosen by the automatic algorithm. |
653 |
# If this variable is empty, no .CONTENTS will be generated at all. |
654 |
contents="auto" |
655 |
|
656 |
-- |
657 |
1.8.3.2 |