public inbox for gentoo-catalyst@lists.gentoo.org
 help / color / mirror / Atom feed
* [gentoo-catalyst] rewite-on-master patches round-4
@ 2013-12-14  3:36 Brian Dolbec
  2013-12-14  3:36 ` [gentoo-catalyst] [PATCH 1/3] Initial creation of a defaults file and Split up support.py Brian Dolbec
                   ` (2 more replies)
  0 siblings, 3 replies; 4+ messages in thread
From: Brian Dolbec @ 2013-12-14  3:36 UTC (permalink / raw
  To: gentoo-catalyst


...and the fun begins :)


^ permalink raw reply	[flat|nested] 4+ messages in thread

* [gentoo-catalyst] [PATCH 1/3] Initial creation of a defaults file and Split up support.py
  2013-12-14  3:36 [gentoo-catalyst] rewite-on-master patches round-4 Brian Dolbec
@ 2013-12-14  3:36 ` Brian Dolbec
  2013-12-14  3:36 ` [gentoo-catalyst] [PATCH 2/3] Move confdefaults out of main.py Brian Dolbec
  2013-12-14  3:36 ` [gentoo-catalyst] [PATCH 3/3] Some options cleanup, unifying their use, reducing redundancy Brian Dolbec
  2 siblings, 0 replies; 4+ messages in thread
From: Brian Dolbec @ 2013-12-14  3:36 UTC (permalink / raw
  To: gentoo-catalyst; +Cc: Brian Dolbec

Split out hash and contents to their own classes, files.
---
 catalyst/contents.py                     |  72 +++++++++++++++
 catalyst/defaults.py                     |  68 ++++++++++++++
 catalyst/hash_utils.py                   | 106 ++++++++++++++++++++++
 catalyst/main.py                         |  46 ++++++----
 catalyst/support.py                      | 150 +------------------------------
 catalyst/targets/generic_stage_target.py |  15 ++--
 catalyst/targets/stage2_target.py        |   7 +-
 etc/catalyst.conf                        |  12 +--
 8 files changed, 300 insertions(+), 176 deletions(-)
 create mode 100644 catalyst/contents.py
 create mode 100644 catalyst/defaults.py
 create mode 100644 catalyst/hash_utils.py

diff --git a/catalyst/contents.py b/catalyst/contents.py
new file mode 100644
index 0000000..0ecabbd
--- /dev/null
+++ b/catalyst/contents.py
@@ -0,0 +1,72 @@
+
+from collections import namedtuple
+from subprocess import Popen, PIPE
+
+from support import CatalystError, warn
+
+
+class ContentsMap(object):
+	'''Class to encompass all known commands to list
+	the contents of an archive'''
+
+
+	fields = ['func', 'cmd']
+
+
+	def __init__(self, defs=None):
+		'''Class init
+
+		@param defs: dictionary of Key:[function, cmd]
+		'''
+		if defs is None:
+			defs = {}
+		#self.contents = {}
+		self.contents_map = {}
+
+		# create the archive type namedtuple classes
+		for name in list(defs):
+			#obj = self.contents[name] = namedtuple(name, self.fields)
+			obj = namedtuple(name, self.fields)
+			obj.__slots__ = ()
+			self.contents_map[name] = obj._make(defs[name])
+		del obj
+
+
+	def generate_contents(self, file_, getter="auto", verbose=False):
+		try:
+			archive = getter
+			if archive == 'auto' and file_.endswith('.iso'):
+				archive = 'isoinfo_l'
+			if (archive in ['tar_tv','auto']):
+				if file_.endswith('.tgz') or file_.endswith('.tar.gz'):
+					archive = 'tar_tvz'
+				elif file_.endswith('.tbz2') or file_.endswith('.tar.bz2'):
+					archive = 'tar_tvj'
+				elif file_.endswith('.tar'):
+					archive = 'tar_tv'
+
+			if archive == 'auto':
+				warn('File %r has unknown type for automatic detection.'
+					% (file_, ))
+				return None
+			else:
+				getter = archive
+				func = getattr(self, '_%s_' % self.contents_map[getter].func)
+				return func(file_, self.contents_map[getter].cmd, verbose)
+		except:
+			raise CatalystError,\
+				"Error generating contents, is appropriate utility " +\
+				"(%s) installed on your system?" \
+				% (self.contents_map[getter].cmd)
+
+
+	@staticmethod
+	def _calc_contents_(file_, cmd, verbose):
+		_cmd = (cmd % {'file': file_ }).split()
+		proc = Popen(_cmd, stdout=PIPE, stderr=PIPE)
+		results = proc.communicate()
+		result = "\n".join(results)
+		if verbose:
+			print result
+		return result
+
diff --git a/catalyst/defaults.py b/catalyst/defaults.py
new file mode 100644
index 0000000..50d1d18
--- /dev/null
+++ b/catalyst/defaults.py
@@ -0,0 +1,68 @@
+
+
+# these should never be touched
+required_build_targets = ["generic_target", "generic_stage_target"]
+
+# new build types should be added here
+valid_build_targets = ["stage1_target", "stage2_target", "stage3_target",
+	"stage4_target", "grp_target", "livecd_stage1_target", "livecd_stage2_target",
+	"embedded_target", "tinderbox_target", "snapshot_target", "netboot_target",
+	"netboot2_target"
+	]
+
+required_config_file_values = ["storedir", "sharedir", "distdir", "portdir"]
+
+valid_config_file_values = required_config_file_values[:]
+valid_config_file_values.extend(["PKGCACHE", "KERNCACHE", "CCACHE", "DISTCC",
+	"ICECREAM", "ENVSCRIPT", "AUTORESUME", "FETCH", "CLEAR_AUTORESUME",
+	"options", "DEBUG", "VERBOSE", "PURGE", "PURGEONLY", "SNAPCACHE",
+	"snapshot_cache", "hash_function", "digests", "contents", "SEEDCACHE"
+	])
+
+verbosity = 1
+
+# Use hash_utils.HashMap.fields for the value legend
+# fields = ["func", "cmd", "args", "id"]
+hash_definitions = {
+	"adler32"  :["calc_hash2", "shash", ["-a", "ADLER32"], "ADLER32"],
+	"crc32"    :["calc_hash2", "shash", ["-a", "CRC32"], "CRC32"],
+	"crc32b"   :["calc_hash2", "shash", ["-a", "CRC32B"], "CRC32B"],
+	"gost"     :["calc_hash2", "shash", ["-a", "GOST"], "GOST"],
+	"haval128" :["calc_hash2", "shash", ["-a", "HAVAL128"], "HAVAL128"],
+	"haval160" :["calc_hash2", "shash", ["-a", "HAVAL160"], "HAVAL160"],
+	"haval192" :["calc_hash2", "shash", ["-a", "HAVAL192"], "HAVAL192"],
+	"haval224" :["calc_hash2", "shash", ["-a", "HAVAL224"], "HAVAL224"],
+	"haval256" :["calc_hash2", "shash", ["-a", "HAVAL256"], "HAVAL256"],
+	"md2"      :["calc_hash2", "shash", ["-a", "MD2"], "MD2"],
+	"md4"      :["calc_hash2", "shash", ["-a", "MD4"], "MD4"],
+	"md5"      :["calc_hash2", "shash", ["-a", "MD5"], "MD5"],
+	"ripemd128":["calc_hash2", "shash", ["-a", "RIPEMD128"], "RIPEMD128"],
+	"ripemd160":["calc_hash2", "shash", ["-a", "RIPEMD160"], "RIPEMD160"],
+	"ripemd256":["calc_hash2", "shash", ["-a", "RIPEMD256"], "RIPEMD256"],
+	"ripemd320":["calc_hash2", "shash", ["-a", "RIPEMD320"], "RIPEMD320"],
+	"sha1"     :["calc_hash2", "shash", ["-a", "SHA1"], "SHA1"],
+	"sha224"   :["calc_hash2", "shash", ["-a", "SHA224"], "SHA224"],
+	"sha256"   :["calc_hash2", "shash", ["-a", "SHA256"], "SHA256"],
+	"sha384"   :["calc_hash2", "shash", ["-a", "SHA384"], "SHA384"],
+	"sha512"   :["calc_hash2", "shash", ["-a", "SHA512"], "SHA512"],
+	"snefru128":["calc_hash2", "shash", ["-a", "SNEFRU128"], "SNEFRU128"],
+	"snefru256":["calc_hash2", "shash", ["-a", "SNEFRU256"], "SNEFRU256"],
+	"tiger"    :["calc_hash2", "shash", ["-a", "TIGER"], "TIGER"],
+	"tiger128" :["calc_hash2", "shash", ["-a", "TIGER128"], "TIGER128"],
+	"tiger160" :["calc_hash2", "shash", ["-a", "TIGER160"], "TIGER160"],
+	"whirlpool":["calc_hash2", "shash", ["-a", "WHIRLPOOL"], "WHIRLPOOL"],
+	}
+
+# use contents.ContentsMap.fields for the value legend
+# Key:[function, cmd]
+contents_definitions = {
+	# 'find' is disabled because it requires the source path, which is not
+	# always available
+	#"find"		:["calc_contents","find %(path)s"],
+	"tar_tv":["calc_contents","tar tvf %(file)s"],
+	"tar_tvz":["calc_contents","tar tvzf %(file)s"],
+	"tar_tvj":["calc_contents","tar -I lbzip2 -tvf %(file)s"],
+	"isoinfo_l":["calc_contents","isoinfo -l -i %(file)s"],
+	# isoinfo_f should be a last resort only
+	"isoinfo_f":["calc_contents","isoinfo -f -i %(file)s"],
+}
diff --git a/catalyst/hash_utils.py b/catalyst/hash_utils.py
new file mode 100644
index 0000000..0dfe98b
--- /dev/null
+++ b/catalyst/hash_utils.py
@@ -0,0 +1,106 @@
+
+import os
+from collections import namedtuple
+from subprocess import Popen, PIPE
+
+from support import CatalystError
+
+
+class HashMap(object):
+	'''Class for handling
+	Catalyst's hash generation'''
+
+	fields = ["func", "cmd", "args", "id"]
+
+
+	def __init__(self, hashes=None):
+		'''Class init
+
+		@param hashes: dictionary of Key:[function, cmd, cmd_args, Print string]
+		@param fields: list of ordered field names for the hashes
+			eg: ["func", "cmd", "args", "id"]
+		'''
+		if hashes is None:
+			hashes = {}
+		#self.hashes = {}
+		self.hash_map = {}
+
+		# create the hash definition namedtuple classes
+		for name in list(hashes):
+			#obj = self.hashes[name] = namedtuple(name, self.fields)
+			obj = namedtuple(name, self.fields)
+			obj.__slots__ = ()
+			self.hash_map[name] = obj._make(hashes[name])
+		del obj
+
+
+	def generate_hash(self, file_, hash_="crc32", verbose=False):
+		'''Prefered method of generating a hash for the passed in file_
+
+		@param file_: the file to generate the hash for
+		@param hash_: the hash algorythm to use
+		@param verbose: boolean
+		@returns the hash result
+		'''
+		try:
+			return getattr(self, self.hash_map[hash_].func)(
+				file_,
+				hash_,
+				verbose
+				)
+		except:
+			raise CatalystError,"Error generating hash, is appropriate " + \
+				"utility installed on your system?"
+
+
+	def calc_hash(self, file_, hash_, verbose=False):
+		'''
+		Calculate the hash for "file_"
+
+		@param file_: the file to generate the hash for
+		@param hash_: the hash algorythm to use
+		@param verbose: boolean
+		@returns the hash result
+		'''
+		_hash = self.hash_map[hash_]
+		args = [_hash.cmd]
+		args.extend(_hash.args)
+		args.append(file_)
+		source = Popen(args, stdout=PIPE)
+		mylines = source.communicate()[0]
+		mylines=mylines[0].split()
+		result=mylines[0]
+		if verbose:
+			print _hash_.id + " (%s) = %s" % (file_, result)
+		return result
+
+
+	def calc_hash2(self, file_, hash_type, verbose=False):
+		'''
+		Calculate the hash for "file_"
+
+		@param file_: the file to generate the hash for
+		@param hash_: the hash algorythm to use
+		@param verbose: boolean
+		@returns the hash result
+		'''
+		_hash = self.hash_map[hash_type]
+		args = [_hash.cmd]
+		args.extend(_hash.args)
+		args.append(file_)
+		#print("DEBUG: calc_hash2; args =", args)
+		source = Popen(args, stdout=PIPE)
+		output = source.communicate()
+		lines = output[0].split('\n')
+		#print("DEBUG: calc_hash2; output =", output)
+		header = lines[0]
+		h_f = lines[1].split()
+		hash_result = h_f[0]
+		short_file = os.path.split(h_f[1])[1]
+		result = header + hash_result + "  " + short_file + "\n"
+		if verbose:
+			print header+" (%s) = %s" % (short_file, result)
+		return result
+
+
+
diff --git a/catalyst/main.py b/catalyst/main.py
index 28afc59..be21970 100644
--- a/catalyst/main.py
+++ b/catalyst/main.py
@@ -21,8 +21,15 @@ sys.path.append(__selfpath__ + "/modules")
 
 import catalyst.config
 import catalyst.util
-from catalyst.support import (required_build_targets,
-	valid_build_targets, CatalystError, hash_map, find_binary, LockInUse)
+from catalyst.support import CatalystError, find_binary, LockInUse
+from defaults import (required_build_targets, valid_build_targets,
+	hash_definitions
+	)
+
+from hash_utils import HashMap
+from defaults import  contents_definitions
+from contents import ContentsMap
+
 
 __maintainer__="Catalyst <catalyst@gentoo.org>"
 __version__="2.0.15"
@@ -183,7 +190,8 @@ def parse_config(myconfig):
 	if "digests" in myconf:
 		conf_values["digests"]=myconf["digests"]
 	if "contents" in myconf:
-		conf_values["contents"]=myconf["contents"]
+		# replace '-' with '_' (for compatibility with existing configs)
+		conf_values["contents"] = myconf["contents"].replace("-", '_')
 
 	if "envscript" in myconf:
 		print "Envscript support enabled."
@@ -224,9 +232,10 @@ def import_modules():
 				raise CatalystError,"Can't find " + x + ".py plugin in " + \
 					module_dir
 
-	except ImportError:
+	except ImportError as e:
 		print "!!! catalyst: Python modules not found in "+\
 			module_dir + "; exiting."
+		print e
 		sys.exit(1)
 
 	return targetmap
@@ -353,40 +362,47 @@ def main():
 	# import configuration file and import our main module using those settings
 	parse_config(myconfig)
 
-	# Start checking that digests are valid now that the hash_map was imported
-	# from catalyst.support
+	# initialize our contents generator
+	contents_map = ContentsMap(contents_definitions)
+	conf_values["contents_map"] = contents_map
+
+	# initialze our hash and contents generators
+	hash_map = HashMap(hash_definitions)
+	conf_values["hash_map"] = hash_map
+
+	# Start checking that digests are valid now that hash_map is initialized
 	if "digests" in conf_values:
 		for i in conf_values["digests"].split():
-			if i not in hash_map:
+			if i not in hash_definitions:
 				print
 				print i+" is not a valid digest entry"
 				print "Valid digest entries:"
-				print hash_map.keys()
+				print hash_definitions.keys()
 				print
 				print "Catalyst aborting...."
 				sys.exit(2)
-			if find_binary(hash_map[i][1]) == None:
+			if find_binary(hash_map.hash_map[i].cmd) == None:
 				print
-				print "digest="+i
-				print "\tThe "+hash_map[i][1]+\
+				print "digest=" + i
+				print "\tThe " + hash_map.hash_map[i].cmd + \
 					" binary was not found. It needs to be in your system path"
 				print
 				print "Catalyst aborting...."
 				sys.exit(2)
 	if "hash_function" in conf_values:
-		if conf_values["hash_function"] not in hash_map:
+		if conf_values["hash_function"] not in hash_definitions:
 			print
 			print conf_values["hash_function"]+\
 				" is not a valid hash_function entry"
 			print "Valid hash_function entries:"
-			print hash_map.keys()
+			print hash_definitions.keys()
 			print
 			print "Catalyst aborting...."
 			sys.exit(2)
-		if find_binary(hash_map[conf_values["hash_function"]][1]) == None:
+		if find_binary(hash_map.hash_map[conf_values["hash_function"]].cmd) == None:
 			print
 			print "hash_function="+conf_values["hash_function"]
-			print "\tThe "+hash_map[conf_values["hash_function"]][1]+\
+			print "\tThe "+hash_map.hash_map[conf_values["hash_function"]].cmd + \
 				" binary was not found. It needs to be in your system path"
 			print
 			print "Catalyst aborting...."
diff --git a/catalyst/support.py b/catalyst/support.py
index 072b985..fe2c700 100644
--- a/catalyst/support.py
+++ b/catalyst/support.py
@@ -1,6 +1,9 @@
 
 import sys,string,os,types,re,signal,traceback,time
 #import md5,sha
+
+from catalyst.defaults import verbosity
+
 selinux_capable = False
 #userpriv_capable = (os.getuid() == 0)
 #fakeroot_capable = False
@@ -60,120 +63,6 @@ def hexify(str):
 	return r
 # hexify()
 
-def generate_contents(file,contents_function="auto",verbose=False):
-	try:
-		_ = contents_function
-		if _ == 'auto' and file.endswith('.iso'):
-			_ = 'isoinfo-l'
-		if (_ in ['tar-tv','auto']):
-			if file.endswith('.tgz') or file.endswith('.tar.gz'):
-				_ = 'tar-tvz'
-			elif file.endswith('.tbz2') or file.endswith('.tar.bz2'):
-				_ = 'tar-tvj'
-			elif file.endswith('.tar'):
-				_ = 'tar-tv'
-
-		if _ == 'auto':
-			warn('File %r has unknown type for automatic detection.' % (file, ))
-			return None
-		else:
-			contents_function = _
-			_ = contents_map[contents_function]
-			return _[0](file,_[1],verbose)
-	except:
-		raise CatalystError,\
-			"Error generating contents, is appropriate utility (%s) installed on your system?" \
-			% (contents_function, )
-
-def calc_contents(file,cmd,verbose):
-	args={ 'file': file }
-	cmd=cmd % dict(args)
-	a=os.popen(cmd)
-	mylines=a.readlines()
-	a.close()
-	result="".join(mylines)
-	if verbose:
-		print result
-	return result
-
-# This has map must be defined after the function calc_content
-# It is possible to call different functions from this but they must be defined
-# before hash_map
-# Key,function,cmd
-contents_map={
-	# 'find' is disabled because it requires the source path, which is not
-	# always available
-	#"find"		:[calc_contents,"find %(path)s"],
-	"tar-tv":[calc_contents,"tar tvf %(file)s"],
-	"tar-tvz":[calc_contents,"tar tvzf %(file)s"],
-	"tar-tvj":[calc_contents,"tar -I lbzip2 -tvf %(file)s"],
-	"isoinfo-l":[calc_contents,"isoinfo -l -i %(file)s"],
-	# isoinfo-f should be a last resort only
-	"isoinfo-f":[calc_contents,"isoinfo -f -i %(file)s"],
-}
-
-def generate_hash(file,hash_function="crc32",verbose=False):
-	try:
-		return hash_map[hash_function][0](file,hash_map[hash_function][1],hash_map[hash_function][2],\
-			hash_map[hash_function][3],verbose)
-	except:
-		raise CatalystError,"Error generating hash, is appropriate utility installed on your system?"
-
-def calc_hash(file,cmd,cmd_args,id_string="MD5",verbose=False):
-	a=os.popen(cmd+" "+cmd_args+" "+file)
-	mylines=a.readlines()
-	a.close()
-	mylines=mylines[0].split()
-	result=mylines[0]
-	if verbose:
-		print id_string+" (%s) = %s" % (file, result)
-	return result
-
-def calc_hash2(file,cmd,cmd_args,id_string="MD5",verbose=False):
-	a=os.popen(cmd+" "+cmd_args+" "+file)
-	header=a.readline()
-	mylines=a.readline().split()
-	hash=mylines[0]
-	short_file=os.path.split(mylines[1])[1]
-	a.close()
-	result=header+hash+"  "+short_file+"\n"
-	if verbose:
-		print header+" (%s) = %s" % (short_file, result)
-	return result
-
-# This has map must be defined after the function calc_hash
-# It is possible to call different functions from this but they must be defined
-# before hash_map
-# Key,function,cmd,cmd_args,Print string
-hash_map={
-	 "adler32":[calc_hash2,"shash","-a ADLER32","ADLER32"],\
-	 "crc32":[calc_hash2,"shash","-a CRC32","CRC32"],\
-	 "crc32b":[calc_hash2,"shash","-a CRC32B","CRC32B"],\
-	 "gost":[calc_hash2,"shash","-a GOST","GOST"],\
-	 "haval128":[calc_hash2,"shash","-a HAVAL128","HAVAL128"],\
-	 "haval160":[calc_hash2,"shash","-a HAVAL160","HAVAL160"],\
-	 "haval192":[calc_hash2,"shash","-a HAVAL192","HAVAL192"],\
-	 "haval224":[calc_hash2,"shash","-a HAVAL224","HAVAL224"],\
-	 "haval256":[calc_hash2,"shash","-a HAVAL256","HAVAL256"],\
-	 "md2":[calc_hash2,"shash","-a MD2","MD2"],\
-	 "md4":[calc_hash2,"shash","-a MD4","MD4"],\
-	 "md5":[calc_hash2,"shash","-a MD5","MD5"],\
-	 "ripemd128":[calc_hash2,"shash","-a RIPEMD128","RIPEMD128"],\
-	 "ripemd160":[calc_hash2,"shash","-a RIPEMD160","RIPEMD160"],\
-	 "ripemd256":[calc_hash2,"shash","-a RIPEMD256","RIPEMD256"],\
-	 "ripemd320":[calc_hash2,"shash","-a RIPEMD320","RIPEMD320"],\
-	 "sha1":[calc_hash2,"shash","-a SHA1","SHA1"],\
-	 "sha224":[calc_hash2,"shash","-a SHA224","SHA224"],\
-	 "sha256":[calc_hash2,"shash","-a SHA256","SHA256"],\
-	 "sha384":[calc_hash2,"shash","-a SHA384","SHA384"],\
-	 "sha512":[calc_hash2,"shash","-a SHA512","SHA512"],\
-	 "snefru128":[calc_hash2,"shash","-a SNEFRU128","SNEFRU128"],\
-	 "snefru256":[calc_hash2,"shash","-a SNEFRU256","SNEFRU256"],\
-	 "tiger":[calc_hash2,"shash","-a TIGER","TIGER"],\
-	 "tiger128":[calc_hash2,"shash","-a TIGER128","TIGER128"],\
-	 "tiger160":[calc_hash2,"shash","-a TIGER160","TIGER160"],\
-	 "whirlpool":[calc_hash2,"shash","-a WHIRLPOOL","WHIRLPOOL"],\
-	 }
 
 def read_from_clst(file):
 	line = ''
@@ -190,39 +79,6 @@ def read_from_clst(file):
 	return myline
 # read_from_clst
 
-# these should never be touched
-required_build_targets=["generic_target","generic_stage_target"]
-
-# new build types should be added here
-valid_build_targets=["stage1_target","stage2_target","stage3_target","stage4_target","grp_target",
-			"livecd_stage1_target","livecd_stage2_target","embedded_target",
-			"tinderbox_target","snapshot_target","netboot_target","netboot2_target"]
-
-required_config_file_values=["storedir","sharedir","distdir","portdir"]
-valid_config_file_values=required_config_file_values[:]
-valid_config_file_values.append("PKGCACHE")
-valid_config_file_values.append("KERNCACHE")
-valid_config_file_values.append("CCACHE")
-valid_config_file_values.append("DISTCC")
-valid_config_file_values.append("ICECREAM")
-valid_config_file_values.append("ENVSCRIPT")
-valid_config_file_values.append("AUTORESUME")
-valid_config_file_values.append("FETCH")
-valid_config_file_values.append("CLEAR_AUTORESUME")
-valid_config_file_values.append("options")
-valid_config_file_values.append("DEBUG")
-valid_config_file_values.append("VERBOSE")
-valid_config_file_values.append("PURGE")
-valid_config_file_values.append("PURGEONLY")
-valid_config_file_values.append("SNAPCACHE")
-valid_config_file_values.append("snapshot_cache")
-valid_config_file_values.append("hash_function")
-valid_config_file_values.append("digests")
-valid_config_file_values.append("contents")
-valid_config_file_values.append("SEEDCACHE")
-
-verbosity=1
-
 def list_bashify(mylist):
 	if type(mylist)==types.StringType:
 		mypack=[mylist]
diff --git a/catalyst/targets/generic_stage_target.py b/catalyst/targets/generic_stage_target.py
index 5200d8a..123a9be 100644
--- a/catalyst/targets/generic_stage_target.py
+++ b/catalyst/targets/generic_stage_target.py
@@ -4,6 +4,7 @@ from generic_target import *
 from stat import *
 from catalyst.lock import LockDir
 
+
 class generic_stage_target(generic_target):
 	"""
 	This class does all of the chroot setup, copying of files, etc. It is
@@ -1633,6 +1634,7 @@ class generic_stage_target(generic_target):
 		if os.path.exists(file+".CONTENTS"):
 			os.remove(file+".CONTENTS")
 		if "contents" in self.settings:
+			contents_map = self.settings["contents_map"]
 			if os.path.exists(file):
 				myf=open(file+".CONTENTS","w")
 				keys={}
@@ -1641,7 +1643,7 @@ class generic_stage_target(generic_target):
 					array=keys.keys()
 					array.sort()
 				for j in array:
-					contents=generate_contents(file,contents_function=j,\
+					contents = contents_map.generate_contents(file, j,
 						verbose="VERBOSE" in self.settings)
 					if contents:
 						myf.write(contents)
@@ -1651,6 +1653,7 @@ class generic_stage_target(generic_target):
 		if os.path.exists(file+".DIGESTS"):
 			os.remove(file+".DIGESTS")
 		if "digests" in self.settings:
+			hash_map = self.settings["hash_map"]
 			if os.path.exists(file):
 				myf=open(file+".DIGESTS","w")
 				keys={}
@@ -1661,14 +1664,14 @@ class generic_stage_target(generic_target):
 				for f in [file, file+'.CONTENTS']:
 					if os.path.exists(f):
 						if "all" in array:
-							for k in hash_map.keys():
-								hash=generate_hash(f,hash_function=k,verbose=\
-									"VERBOSE" in self.settings)
+							for k in list(hash_map.hash_map):
+								hash = hash_map.generate_hash(f,hash_=k,
+									verbose = "VERBOSE" in self.settings)
 								myf.write(hash)
 						else:
 							for j in array:
-								hash=generate_hash(f,hash_function=j,verbose=\
-									"VERBOSE" in self.settings)
+								hash = hash_map.generate_hash(f,hash_=j,
+									verbose = "VERBOSE" in self.settings)
 								myf.write(hash)
 				myf.close()
 
diff --git a/catalyst/targets/stage2_target.py b/catalyst/targets/stage2_target.py
index 15acdee..6377f5d 100644
--- a/catalyst/targets/stage2_target.py
+++ b/catalyst/targets/stage2_target.py
@@ -23,8 +23,11 @@ class stage2_target(generic_stage_target):
 			if os.path.isfile(self.settings["source_path"]):
 				if os.path.exists(self.settings["source_path"]):
 				# XXX: Is this even necessary if the previous check passes?
-					self.settings["source_path_hash"]=generate_hash(self.settings["source_path"],\
-						hash_function=self.settings["hash_function"],verbose=False)
+					self.settings["source_path_hash"] = \
+						self.settings["hash_map"].generate_hash(
+							self.settings["source_path"],\
+							hash_=self.settings["hash_function"],
+							verbose=False)
 		print "Source path set to "+self.settings["source_path"]
 		if os.path.isdir(self.settings["source_path"]):
 			print "\tIf this is not desired, remove this directory or turn of seedcache in the options of catalyst.conf"
diff --git a/etc/catalyst.conf b/etc/catalyst.conf
index 57606ca..2d377b2 100644
--- a/etc/catalyst.conf
+++ b/etc/catalyst.conf
@@ -15,12 +15,12 @@ digests="md5 sha1 sha512 whirlpool"
 # Creates a .CONTENTS file listing the contents of the file. Pick from any of
 # the supported options below:
 # auto		- strongly recommended
-# tar-tv	- does 'tar tvf FILE'
-# tar-tvz	- does 'tar tvzf FILE'
-# tar-tvy	- does 'tar tvyf FILE'
-# isoinfo-l	- does 'isoinfo -l -i FILE'
-# isoinfo-f	- does 'isoinfo -f -i FILE'
-# 'isoinfo-f' is the only option not chosen by the automatic algorithm.
+# tar_tv	- does 'tar tvf FILE'
+# tar_tvz	- does 'tar tvzf FILE'
+# tar_tvy	- does 'tar tvyf FILE'
+# isoinfo_l	- does 'isoinfo -l -i FILE'
+# isoinfo_f	- does 'isoinfo -f -i FILE'
+# 'isoinfo_f' is the only option not chosen by the automatic algorithm.
 # If this variable is empty, no .CONTENTS will be generated at all.
 contents="auto"
 
-- 
1.8.3.2



^ permalink raw reply related	[flat|nested] 4+ messages in thread

* [gentoo-catalyst] [PATCH 2/3] Move confdefaults out of main.py
  2013-12-14  3:36 [gentoo-catalyst] rewite-on-master patches round-4 Brian Dolbec
  2013-12-14  3:36 ` [gentoo-catalyst] [PATCH 1/3] Initial creation of a defaults file and Split up support.py Brian Dolbec
@ 2013-12-14  3:36 ` Brian Dolbec
  2013-12-14  3:36 ` [gentoo-catalyst] [PATCH 3/3] Some options cleanup, unifying their use, reducing redundancy Brian Dolbec
  2 siblings, 0 replies; 4+ messages in thread
From: Brian Dolbec @ 2013-12-14  3:36 UTC (permalink / raw
  To: gentoo-catalyst; +Cc: Brian Dolbec

---
 catalyst/defaults.py | 15 +++++++++++++++
 catalyst/main.py     | 19 +++----------------
 2 files changed, 18 insertions(+), 16 deletions(-)

diff --git a/catalyst/defaults.py b/catalyst/defaults.py
index 50d1d18..aa1e9e8 100644
--- a/catalyst/defaults.py
+++ b/catalyst/defaults.py
@@ -66,3 +66,18 @@ contents_definitions = {
 	# isoinfo_f should be a last resort only
 	"isoinfo_f":["calc_contents","isoinfo -f -i %(file)s"],
 }
+
+
+confdefaults={
+	"distdir": "/usr/portage/distfiles",
+	"hash_function": "crc32",
+	"packagedir": "/usr/portage/packages",
+	"portdir": "/usr/portage",
+	"port_tmpdir": "/var/tmp/portage",
+	"repo_name": "portage",
+	"sharedir": "/usr/lib/catalyst",
+	"snapshot_cache": "/var/tmp/catalyst/snapshot_cache",
+	"snapshot_name": "portage-",
+	"storedir": "/var/tmp/catalyst",
+	}
+
diff --git a/catalyst/main.py b/catalyst/main.py
index be21970..e969896 100644
--- a/catalyst/main.py
+++ b/catalyst/main.py
@@ -22,8 +22,8 @@ sys.path.append(__selfpath__ + "/modules")
 import catalyst.config
 import catalyst.util
 from catalyst.support import CatalystError, find_binary, LockInUse
-from defaults import (required_build_targets, valid_build_targets,
-	hash_definitions
+from catalyst.defaults import (required_build_targets, valid_build_targets,
+	hash_definitions, confdefaults
 	)
 
 from hash_utils import HashMap
@@ -76,19 +76,6 @@ def parse_config(myconfig):
 	myconf={}
 	config_file=""
 
-	confdefaults={
-		"distdir": "/usr/portage/distfiles",
-		"hash_function": "crc32",
-		"packagedir": "/usr/portage/packages",
-		"portdir": "/usr/portage",
-		"port_tmpdir": "/var/tmp/portage",
-		"repo_name": "portage",
-		"sharedir": "/usr/lib/catalyst",
-		"snapshot_name": "portage-",
-		"snapshot_cache": "/var/tmp/catalyst/snapshot_cache",
-		"storedir": "/var/tmp/catalyst",
-		}
-
 	# first, try the one passed (presumably from the cmdline)
 	if myconfig:
 		if os.path.exists(myconfig):
@@ -123,7 +110,7 @@ def parse_config(myconfig):
 		sys.exit(1)
 
 	# now, load up the values into conf_values so that we can use them
-	for x in confdefaults.keys():
+	for x in list(confdefaults):
 		if x in myconf:
 			print "Setting",x,"to config file value \""+myconf[x]+"\""
 			conf_values[x]=myconf[x]
-- 
1.8.3.2



^ permalink raw reply related	[flat|nested] 4+ messages in thread

* [gentoo-catalyst] [PATCH 3/3] Some options cleanup, unifying their use, reducing redundancy.
  2013-12-14  3:36 [gentoo-catalyst] rewite-on-master patches round-4 Brian Dolbec
  2013-12-14  3:36 ` [gentoo-catalyst] [PATCH 1/3] Initial creation of a defaults file and Split up support.py Brian Dolbec
  2013-12-14  3:36 ` [gentoo-catalyst] [PATCH 2/3] Move confdefaults out of main.py Brian Dolbec
@ 2013-12-14  3:36 ` Brian Dolbec
  2 siblings, 0 replies; 4+ messages in thread
From: Brian Dolbec @ 2013-12-14  3:36 UTC (permalink / raw
  To: gentoo-catalyst; +Cc: Brian Dolbec

---
 catalyst/defaults.py                     |  22 +++++-
 catalyst/main.py                         |  90 ++++++----------------
 catalyst/targets/generic_stage_target.py | 127 +++++++++++++++----------------
 catalyst/targets/grp_target.py           |   2 +-
 catalyst/targets/livecd_stage1_target.py |   2 +-
 catalyst/targets/livecd_stage2_target.py |   8 +-
 catalyst/targets/netboot2_target.py      |  10 +--
 catalyst/targets/snapshot_target.py      |   4 +-
 catalyst/targets/stage2_target.py        |   2 +-
 catalyst/targets/stage4_target.py        |   4 +-
 10 files changed, 120 insertions(+), 151 deletions(-)

diff --git a/catalyst/defaults.py b/catalyst/defaults.py
index aa1e9e8..6a8050e 100644
--- a/catalyst/defaults.py
+++ b/catalyst/defaults.py
@@ -13,10 +13,9 @@ valid_build_targets = ["stage1_target", "stage2_target", "stage3_target",
 required_config_file_values = ["storedir", "sharedir", "distdir", "portdir"]
 
 valid_config_file_values = required_config_file_values[:]
-valid_config_file_values.extend(["PKGCACHE", "KERNCACHE", "CCACHE", "DISTCC",
-	"ICECREAM", "ENVSCRIPT", "AUTORESUME", "FETCH", "CLEAR_AUTORESUME",
-	"options", "DEBUG", "VERBOSE", "PURGE", "PURGEONLY", "SNAPCACHE",
-	"snapshot_cache", "hash_function", "digests", "contents", "SEEDCACHE"
+valid_config_file_values.extend([ "distcc", "envscript",
+	"options", "DEBUG", "VERBOSE",
+	"snapshot_cache", "hash_function", "digests", "contents"
 	])
 
 verbosity = 1
@@ -81,3 +80,18 @@ confdefaults={
 	"storedir": "/var/tmp/catalyst",
 	}
 
+# legend:  key: message
+option_messages = {
+	"autoresume": "Autoresuming support enabled.",
+	"ccache": "Compiler cache support enabled.",
+	"clear-autoresume": "Cleaning autoresume flags support enabled.",
+	#"compress": "Compression enabled.",
+	"distcc": "Distcc support enabled.",
+	"icecream": "Icecream compiler cluster support enabled.",
+	"kerncache": "Kernel cache support enabled.",
+	"pkgcache": "Package cache support enabled.",
+	"purge": "Purge support enabled.",
+	"seedcache": "Seed cache support enabled.",
+	"snapcache": "Snapshot cache support enabled.",
+	#"tarball": "Tarball creation enabled.",
+	}
diff --git a/catalyst/main.py b/catalyst/main.py
index e969896..8fd8a60 100644
--- a/catalyst/main.py
+++ b/catalyst/main.py
@@ -23,7 +23,7 @@ import catalyst.config
 import catalyst.util
 from catalyst.support import CatalystError, find_binary, LockInUse
 from catalyst.defaults import (required_build_targets, valid_build_targets,
-	hash_definitions, confdefaults
+	hash_definitions, confdefaults, option_messages
 	)
 
 from hash_utils import HashMap
@@ -113,7 +113,10 @@ def parse_config(myconfig):
 	for x in list(confdefaults):
 		if x in myconf:
 			print "Setting",x,"to config file value \""+myconf[x]+"\""
-			conf_values[x]=myconf[x]
+			if x == 'options':
+				conf_values[x] = set(myconf[x].split())
+			else:
+				conf_values[x]=myconf[x]
 		else:
 			print "Setting",x,"to default value \""+confdefaults[x]+"\""
 			conf_values[x]=confdefaults[x]
@@ -121,74 +124,23 @@ def parse_config(myconfig):
 	# add our python base directory to use for loading target arch's
 	conf_values["PythonDir"] = __selfpath__
 
-	# parse out the rest of the options from the config file
-	if "autoresume" in string.split(conf_values["options"]):
-		print "Autoresuming support enabled."
-		conf_values["AUTORESUME"]="1"
-
-	if "bindist" in string.split(conf_values["options"]):
-		print "Binary redistribution enabled"
-		conf_values["BINDIST"]="1"
-	else:
-		print "Bindist is not enabled in catalyst.conf"
-		print "Binary redistribution of generated stages/isos may be prohibited by law."
-		print "Please see the use description for bindist on any package you are including."
-
-	if "ccache" in string.split(conf_values["options"]):
-		print "Compiler cache support enabled."
-		conf_values["CCACHE"]="1"
-
-	if "clear-autoresume" in string.split(conf_values["options"]):
-		print "Cleaning autoresume flags support enabled."
-		conf_values["CLEAR_AUTORESUME"]="1"
-
-	if "distcc" in string.split(conf_values["options"]):
-		print "Distcc support enabled."
-		conf_values["DISTCC"]="1"
-
-	if "icecream" in string.split(conf_values["options"]):
-		print "Icecream compiler cluster support enabled."
-		conf_values["ICECREAM"]="1"
 
-	if "kerncache" in string.split(conf_values["options"]):
-		print "Kernel cache support enabled."
-		conf_values["KERNCACHE"]="1"
+	# print out any options messages
+	for opt in conf_values['options']:
+		if opt in option_messages:
+			print option_messages[opt]
 
-	if "pkgcache" in string.split(conf_values["options"]):
-		print "Package cache support enabled."
-		conf_values["PKGCACHE"]="1"
+	for key in ["digests", "envscript", "var_tmpfs_portage", "port_logdir"]:
+		if key in myconf:
+			conf_values[key] = myconf[key]
 
-	if "preserve_libs" in string.split(conf_values["options"]):
-		print "Preserving libs during unmerge."
-		conf_values["PRESERVE_LIBS"]="1"
-
-	if "purge" in string.split(conf_values["options"]):
-		print "Purge support enabled."
-		conf_values["PURGE"]="1"
-
-	if "seedcache" in string.split(conf_values["options"]):
-		print "Seed cache support enabled."
-		conf_values["SEEDCACHE"]="1"
-
-	if "snapcache" in string.split(conf_values["options"]):
-		print "Snapshot cache support enabled."
-		conf_values["SNAPCACHE"]="1"
-
-	if "digests" in myconf:
-		conf_values["digests"]=myconf["digests"]
 	if "contents" in myconf:
 		# replace '-' with '_' (for compatibility with existing configs)
 		conf_values["contents"] = myconf["contents"].replace("-", '_')
 
 	if "envscript" in myconf:
 		print "Envscript support enabled."
-		conf_values["ENVSCRIPT"]=myconf["envscript"]
 
-	if "var_tmpfs_portage" in myconf:
-		conf_values["var_tmpfs_portage"]=myconf["var_tmpfs_portage"];
-
-	if "port_logdir" in myconf:
-		conf_values["port_logdir"]=myconf["port_logdir"];
 
 def import_modules():
 	# import catalyst's own modules
@@ -285,6 +237,10 @@ def main():
 		usage()
 		sys.exit(2)
 
+	# initialize it if it's not already
+	if 'options' not in conf_values:
+		conf_values['options'] = set()
+
 	run = False
 	for o, a in opts:
 		if o in ("-h", "--help"):
@@ -296,8 +252,8 @@ def main():
 			sys.exit(1)
 
 		if o in ("-d", "--debug"):
-			conf_values["DEBUG"]="1"
-			conf_values["VERBOSE"]="1"
+			conf_values["DEBUG"] = True
+			conf_values["VERBOSE"] = True
 
 		if o in ("-c", "--config"):
 			myconfig=a
@@ -314,7 +270,7 @@ def main():
 			myspecfile=a
 
 		if o in ("-F", "--fetchonly"):
-			conf_values["FETCH"]="1"
+			conf_values['options'].add("fetch")
 
 		if o in ("-v", "--verbose"):
 			conf_values["VERBOSE"]="1"
@@ -330,16 +286,16 @@ def main():
 				mycmdline.append("version_stamp="+a)
 
 		if o in ("-p", "--purge"):
-			conf_values["PURGE"] = "1"
+			conf_values['options'].add("purge")
 
 		if o in ("-P", "--purgeonly"):
-			conf_values["PURGEONLY"] = "1"
+			conf_values['options'].add("purgeonly")
 
 		if o in ("-T", "--purgetmponly"):
-			conf_values["PURGETMPONLY"] = "1"
+			conf_values['options'].add("purgetmponly")
 
 		if o in ("-a", "--clear-autoresume"):
-			conf_values["CLEAR_AUTORESUME"] = "1"
+			conf_values['options'].add("clear-autoresume")
 
 	if not run:
 		print "!!! catalyst: please specify one of either -f or -C\n"
diff --git a/catalyst/targets/generic_stage_target.py b/catalyst/targets/generic_stage_target.py
index 123a9be..bb5e530 100644
--- a/catalyst/targets/generic_stage_target.py
+++ b/catalyst/targets/generic_stage_target.py
@@ -173,11 +173,11 @@ class generic_stage_target(generic_target):
 			file_locate(self.settings,["portage_confdir"],expand=0)
 
 		""" Setup our mount points """
-		if "SNAPCACHE" in self.settings:
-			self.mounts=["/proc","/dev","/usr/portage","/usr/portage/distfiles","/var/tmp/portage"]
-			self.mountmap={"/proc":"/proc","/dev":"/dev","/dev/pts":"/dev/pts",\
-				"/usr/portage":self.settings["snapshot_cache_path"]+"/portage",\
-				"/usr/portage/distfiles":self.settings["distdir"],"/var/tmp/portage":"tmpfs"}
+		if "snapcache" in self.settings["options"]:
+			self.mounts=["proc", "dev", 'portdir', 'distdir', 'port_tmpdir']
+			self.mountmap={"proc":"proc", "dev":"/dev", "pts":"/dev/pts",
+				"portdir":self.settings["snapshot_cache_path"]+"/" + self.settings["repo_name"],
+				"distdir":self.settings["distdir"],"port_tmpdir":"tmpfs"}
 		else:
 			self.mounts=["proc","dev", "distdir", "port_tmpdir"]
 			self.mountmap={"proc":"/proc", "dev":"/dev", "pts":"/dev/pts",
@@ -191,21 +191,21 @@ class generic_stage_target(generic_target):
 		Configure any user specified options (either in catalyst.conf or on
 		the command line).
 		"""
-		if "PKGCACHE" in self.settings:
+		if "pkgcache" in self.settings["options"]:
 			self.set_pkgcache_path()
 			print "Location of the package cache is "+\
 				self.settings["pkgcache_path"]
 			self.mounts.append("packagedir")
 			self.mountmap["packagedir"] = self.settings["pkgcache_path"]
 
-		if "KERNCACHE" in self.settings:
+		if "kerncache" in self.settings["options"]:
 			self.set_kerncache_path()
 			print "Location of the kerncache is "+\
 				self.settings["kerncache_path"]
 			self.mounts.append("kerncache")
 			self.mountmap["kerncache"]=self.settings["kerncache_path"]
 
-		if "CCACHE" in self.settings:
+		if "ccache" in self.settings["options"]:
 			if "CCACHE_DIR" in os.environ:
 				ccdir=os.environ["CCACHE_DIR"]
 				del os.environ["CCACHE_DIR"]
@@ -220,9 +220,9 @@ class generic_stage_target(generic_target):
 			""" for the chroot: """
 			self.env["CCACHE_DIR"]="/var/tmp/ccache"
 
-		if "ICECREAM" in self.settings:
-			self.mounts.append("/var/cache/icecream")
-			self.mountmap["/var/cache/icecream"]="/var/cache/icecream"
+		if "icecream" in self.settings["options"]:
+			self.mounts.append("icecream")
+			self.mountmap["icecream"]="/var/cache/icecream"
 			self.env["PATH"]="/usr/lib/icecc/bin:"+self.env["PATH"]
 
 		if "port_logdir" in self.settings:
@@ -295,7 +295,7 @@ class generic_stage_target(generic_target):
 	def set_target_path(self):
 		self.settings["target_path"]=normpath(self.settings["storedir"]+\
 			"/builds/"+self.settings["target_subpath"]+".tar.bz2")
-		if "AUTORESUME" in self.settings\
+		if "autoresume" in self.settings["options"]\
 			and os.path.exists(self.settings["autoresume_path"]+\
 				"setup_target_path"):
 			print \
@@ -373,7 +373,7 @@ class generic_stage_target(generic_target):
 				del self.settings[self.settings["spec_prefix"]+"/fsops"]
 
 	def set_source_path(self):
-		if "SEEDCACHE" in self.settings\
+		if "seedcache" in self.settings["options"]\
 			and os.path.isdir(normpath(self.settings["storedir"]+"/tmp/"+\
 				self.settings["source_subpath"]+"/")):
 			self.settings["source_path"]=normpath(self.settings["storedir"]+\
@@ -427,7 +427,7 @@ class generic_stage_target(generic_target):
 					hash_function=self.settings["hash_function"],verbose=False)
 
 	def set_snapcache_path(self):
-		if "SNAPCACHE" in self.settings:
+		if "snapcache" in self.settings["options"]:
 			self.settings["snapshot_cache_path"]=\
 				normpath(self.settings["snapshot_cache"]+"/"+\
 				self.settings["snapshot"]+"/")
@@ -449,7 +449,7 @@ class generic_stage_target(generic_target):
 			"/tmp/"+self.settings["rel_type"]+"/"+".autoresume-"+\
 			self.settings["target"]+"-"+self.settings["subarch"]+"-"+\
 			self.settings["version_stamp"]+"/")
-		if "AUTORESUME" in self.settings:
+		if "autoresume" in self.settings["options"]:
 			print "The autoresume path is " + self.settings["autoresume_path"]
 		if not os.path.exists(self.settings["autoresume_path"]):
 			os.makedirs(self.settings["autoresume_path"],0755)
@@ -476,8 +476,8 @@ class generic_stage_target(generic_target):
 				"base_dirs","bind","chroot_setup","setup_environment",\
 				"run_local","preclean","unbind","clean"]
 #		if "TARBALL" in self.settings or \
-#			"FETCH" not in self.settings:
-		if "FETCH" not in self.settings:
+#			"fetch" not in self.settings["options"]:
+		if "fetch" not in self.settings["options"]:
 			self.settings["action_sequence"].append("capture")
 		self.settings["action_sequence"].append("clear_autoresume")
 
@@ -636,7 +636,7 @@ class generic_stage_target(generic_target):
 		clst_unpack_hash=read_from_clst(self.settings["autoresume_path"]+\
 			"unpack")
 
-		if "SEEDCACHE" in self.settings:
+		if "seedcache" in self.settings["options"]:
 			if os.path.isdir(self.settings["source_path"]):
 				""" SEEDCACHE Is a directory, use rsync """
 				unpack_cmd="rsync -a --delete "+self.settings["source_path"]+\
@@ -678,7 +678,7 @@ class generic_stage_target(generic_target):
 			error_msg="Tarball extraction of "+self.settings["source_path"]+\
 				" to "+self.settings["chroot_path"]+" failed."
 
-		if "AUTORESUME" in self.settings:
+		if "autoresume" in self.settings["options"]:
 			if os.path.isdir(self.settings["source_path"]) \
 				and os.path.exists(self.settings["autoresume_path"]+"unpack"):
 				""" Autoresume is valid, SEEDCACHE is valid """
@@ -705,7 +705,7 @@ class generic_stage_target(generic_target):
 				invalid_snapshot=True
 		else:
 			""" No autoresume, SEEDCACHE """
-			if "SEEDCACHE" in self.settings:
+			if "seedcache" in self.settings["options"]:
 				""" SEEDCACHE so let's run rsync and let it clean up """
 				if os.path.isdir(self.settings["source_path"]):
 					unpack=True
@@ -729,7 +729,7 @@ class generic_stage_target(generic_target):
 			self.mount_safety_check()
 
 			if invalid_snapshot:
-				if "AUTORESUME" in self.settings:
+				if "autoresume" in self.settings["options"]:
 					print "No Valid Resume point detected, cleaning up..."
 
 				self.clear_autoresume()
@@ -741,11 +741,11 @@ class generic_stage_target(generic_target):
 			if not os.path.exists(self.settings["chroot_path"]+"/tmp"):
 				os.makedirs(self.settings["chroot_path"]+"/tmp",1777)
 
-			if "PKGCACHE" in self.settings:
+			if "pkgcache" in self.settings["options"]:
 				if not os.path.exists(self.settings["pkgcache_path"]):
 					os.makedirs(self.settings["pkgcache_path"],0755)
 
-			if "KERNCACHE" in self.settings:
+			if "kerncache" in self.settings["options"]:
 				if not os.path.exists(self.settings["kerncache_path"]):
 					os.makedirs(self.settings["kerncache_path"],0755)
 
@@ -766,7 +766,7 @@ class generic_stage_target(generic_target):
 		snapshot_hash=read_from_clst(self.settings["autoresume_path"]+\
 			"unpack_portage")
 
-		if "SNAPCACHE" in self.settings:
+		if "snapcache" in self.settings["options"]:
 			snapshot_cache_hash=\
 				read_from_clst(self.settings["snapshot_cache_path"]+\
 				"catalyst-hash")
@@ -798,7 +798,7 @@ class generic_stage_target(generic_target):
 					self.settings["chroot_path"]+"/usr"
 			unpack_errmsg="Error unpacking snapshot"
 
-			if "AUTORESUME" in self.settings \
+			if "autoresume" in self.settings["options"] \
 				and os.path.exists(self.settings["chroot_path"]+\
 					self.settings["portdir"]) \
 				and os.path.exists(self.settings["autoresume_path"]\
@@ -809,7 +809,7 @@ class generic_stage_target(generic_target):
 					unpack=False
 
 		if unpack:
-			if "SNAPCACHE" in self.settings:
+			if "snapcache" in self.settings["options"]:
 				self.snapshot_lock_object.write_lock()
 			if os.path.exists(destdir):
 				print cleanup_msg
@@ -821,7 +821,7 @@ class generic_stage_target(generic_target):
 			print "Unpacking portage tree (This can take a long time) ..."
 			cmd(unpack_cmd,unpack_errmsg,env=self.env)
 
-			if "SNAPCACHE" in self.settings:
+			if "snapcache" in self.settings["options"]:
 				myf=open(self.settings["snapshot_cache_path"]+"catalyst-hash","w")
 				myf.write(self.settings["snapshot_path_hash"])
 				myf.close()
@@ -831,11 +831,11 @@ class generic_stage_target(generic_target):
 				myf.write(self.settings["snapshot_path_hash"])
 				myf.close()
 
-			if "SNAPCACHE" in self.settings:
+			if "snapcache" in self.settings["options"]:
 				self.snapshot_lock_object.unlock()
 
 	def config_profile_link(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+\
 				"config_profile_link"):
 			print \
@@ -854,7 +854,7 @@ class generic_stage_target(generic_target):
 			touch(self.settings["autoresume_path"]+"config_profile_link")
 
 	def setup_confdir(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+\
 				"setup_confdir"):
 			print "Resume point detected, skipping setup_confdir operation..."
@@ -904,7 +904,7 @@ class generic_stage_target(generic_target):
 					os.makedirs(self.mountmap[x],0755)
 
 			src=self.mountmap[x]
-			if "SNAPCACHE" in self.settings and x == "/usr/portage":
+			if "snapcache" in self.settings["options"] and x == "portdir":
 				self.snapshot_lock_object.read_lock()
 			if os.uname()[0] == "FreeBSD":
 				if src == "/dev":
@@ -954,7 +954,7 @@ class generic_stage_target(generic_target):
 					ouch=1
 					warn("Couldn't umount bind mount: " + mypath + self.mountmap[x])
 
-			if "SNAPCACHE" in self.settings and x == "/usr/portage":
+			if "snapcache" in self.settings["options"] and x == "portdir":
 				try:
 					"""
 					It's possible the snapshot lock object isn't created yet.
@@ -981,7 +981,7 @@ class generic_stage_target(generic_target):
 		self.override_cflags()
 		self.override_cxxflags()
 		self.override_ldflags()
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"chroot_setup"):
 			print "Resume point detected, skipping chroot_setup operation..."
 		else:
@@ -993,10 +993,10 @@ class generic_stage_target(generic_target):
 				"Could not copy resolv.conf into place.",env=self.env)
 
 			""" Copy over the envscript, if applicable """
-			if "ENVSCRIPT" in self.settings:
-				if not os.path.exists(self.settings["ENVSCRIPT"]):
+			if "envscript" in self.settings:
+				if not os.path.exists(self.settings["envscript"]):
 					raise CatalystError,\
-						"Can't find envscript "+self.settings["ENVSCRIPT"]
+						"Can't find envscript "+self.settings["envscript"]
 
 				print "\nWarning!!!!"
 				print "\tOverriding certain env variables may cause catastrophic failure."
@@ -1006,7 +1006,7 @@ class generic_stage_target(generic_target):
 				print "\tCatalyst Maintainers use VERY minimal envscripts if used at all"
 				print "\tYou have been warned\n"
 
-				cmd("cp "+self.settings["ENVSCRIPT"]+" "+\
+				cmd("cp "+self.settings["envscript"]+" "+\
 					self.settings["chroot_path"]+"/tmp/envscript",\
 					"Could not copy envscript into place.",env=self.env)
 
@@ -1081,7 +1081,7 @@ class generic_stage_target(generic_target):
 			touch(self.settings["autoresume_path"]+"chroot_setup")
 
 	def fsscript(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"fsscript"):
 			print "Resume point detected, skipping fsscript operation..."
 		else:
@@ -1092,7 +1092,7 @@ class generic_stage_target(generic_target):
 					touch(self.settings["autoresume_path"]+"fsscript")
 
 	def rcupdate(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"rcupdate"):
 			print "Resume point detected, skipping rcupdate operation..."
 		else:
@@ -1102,7 +1102,7 @@ class generic_stage_target(generic_target):
 				touch(self.settings["autoresume_path"]+"rcupdate")
 
 	def clean(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"clean"):
 			print "Resume point detected, skipping clean operation..."
 		else:
@@ -1137,7 +1137,7 @@ class generic_stage_target(generic_target):
 			touch(self.settings["autoresume_path"]+"clean")
 
 	def empty(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"empty"):
 			print "Resume point detected, skipping empty operation..."
 		else:
@@ -1165,7 +1165,7 @@ class generic_stage_target(generic_target):
 			touch(self.settings["autoresume_path"]+"empty")
 
 	def remove(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"remove"):
 			print "Resume point detected, skipping remove operation..."
 		else:
@@ -1187,7 +1187,7 @@ class generic_stage_target(generic_target):
 					raise
 
 	def preclean(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"preclean"):
 			print "Resume point detected, skipping preclean operation..."
 		else:
@@ -1202,7 +1202,7 @@ class generic_stage_target(generic_target):
 				raise CatalystError, "Build failed, could not execute preclean"
 
 	def capture(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"capture"):
 			print "Resume point detected, skipping capture operation..."
 		else:
@@ -1227,7 +1227,7 @@ class generic_stage_target(generic_target):
 			touch(self.settings["autoresume_path"]+"capture")
 
 	def run_local(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"run_local"):
 			print "Resume point detected, skipping run_local operation..."
 		else:
@@ -1276,10 +1276,10 @@ class generic_stage_target(generic_target):
 		""" Check for mounts right away and abort if we cannot unmount them """
 		self.mount_safety_check()
 
-		if "CLEAR_AUTORESUME" in self.settings:
+		if "clear-autoresume" in self.settings["options"]:
 			self.clear_autoresume()
 
-		if "PURGETMPONLY" in self.settings:
+		if "purgetmponly" in self.settings["options"]:
 			self.purge()
 			return
 
@@ -1287,7 +1287,7 @@ class generic_stage_target(generic_target):
 			self.purge()
 			return
 
-		if "PURGE" in self.settings:
+		if "purge" in self.settings["options"]:
 			self.purge()
 
 		for x in self.settings["action_sequence"]:
@@ -1302,7 +1302,7 @@ class generic_stage_target(generic_target):
 		self.chroot_lock.unlock()
 
 	def unmerge(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"unmerge"):
 			print "Resume point detected, skipping unmerge operation..."
 		else:
@@ -1334,7 +1334,7 @@ class generic_stage_target(generic_target):
 				touch(self.settings["autoresume_path"]+"unmerge")
 
 	def target_setup(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"target_setup"):
 			print "Resume point detected, skipping target_setup operation..."
 		else:
@@ -1345,7 +1345,7 @@ class generic_stage_target(generic_target):
 			touch(self.settings["autoresume_path"]+"target_setup")
 
 	def setup_overlay(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 		and os.path.exists(self.settings["autoresume_path"]+"setup_overlay"):
 			print "Resume point detected, skipping setup_overlay operation..."
 		else:
@@ -1359,7 +1359,7 @@ class generic_stage_target(generic_target):
 				touch(self.settings["autoresume_path"]+"setup_overlay")
 
 	def create_iso(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"create_iso"):
 			print "Resume point detected, skipping create_iso operation..."
 		else:
@@ -1376,13 +1376,13 @@ class generic_stage_target(generic_target):
 				print "An ISO Image will not be created."
 
 	def build_packages(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+\
 				"build_packages"):
 			print "Resume point detected, skipping build_packages operation..."
 		else:
 			if self.settings["spec_prefix"]+"/packages" in self.settings:
-				if "AUTORESUME" in self.settings \
+				if "autoresume" in self.settings["options"] \
 					and os.path.exists(self.settings["autoresume_path"]+\
 						"build_packages"):
 					print "Resume point detected, skipping build_packages operation..."
@@ -1401,8 +1401,7 @@ class generic_stage_target(generic_target):
 							"build aborting due to error."
 
 	def build_kernel(self):
-		"Build all configured kernels"
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"build_kernel"):
 			print "Resume point detected, skipping build_kernel operation..."
 		else:
@@ -1427,7 +1426,7 @@ class generic_stage_target(generic_target):
 
 	def _build_kernel(self, kname):
 		"Build a single configured kernel by name"
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]\
 				+"build_kernel_"+kname):
 			print "Resume point detected, skipping build_kernel for "+kname+" operation..."
@@ -1525,7 +1524,7 @@ class generic_stage_target(generic_target):
 					"/initramfs_overlay"],env=self.env)
 
 	def bootloader(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"bootloader"):
 			print "Resume point detected, skipping bootloader operation..."
 		else:
@@ -1539,7 +1538,7 @@ class generic_stage_target(generic_target):
 				raise CatalystError,"Script aborting due to error."
 
 	def livecd_update(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+\
 				"livecd_update"):
 			print "Resume point detected, skipping build_packages operation..."
@@ -1572,7 +1571,7 @@ class generic_stage_target(generic_target):
 			os.chmod(myemp,mystat[ST_MODE])
 
 	def clear_packages(self):
-		if "PKGCACHE" in self.settings:
+		if "pkgcache" in self.settings["options"]:
 			print "purging the pkgcache ..."
 
 			myemp=self.settings["pkgcache_path"]
@@ -1590,7 +1589,7 @@ class generic_stage_target(generic_target):
 				os.chmod(myemp,mystat[ST_MODE])
 
 	def clear_kerncache(self):
-		if "KERNCACHE" in self.settings:
+		if "kerncache" in self.settings["options"]:
 			print "purging the kerncache ..."
 
 			myemp=self.settings["kerncache_path"]
@@ -1609,11 +1608,11 @@ class generic_stage_target(generic_target):
 
 	def clear_autoresume(self):
 		""" Clean resume points since they are no longer needed """
-		if "AUTORESUME" in self.settings:
+		if "autoresume" in self.settings["options"]:
 			print "Removing AutoResume Points: ..."
 		myemp=self.settings["autoresume_path"]
 		if os.path.isdir(myemp):
-				if "AUTORESUME" in self.settings:
+				if "autoresume" in self.settings["options"]:
 					print "Emptying directory",myemp
 				"""
 				stat the dir, delete the dir, recreate the dir and set
@@ -1677,7 +1676,7 @@ class generic_stage_target(generic_target):
 
 	def purge(self):
 		countdown(10,"Purging Caches ...")
-		if any(k in self.settings for k in ("PURGE","PURGEONLY","PURGETMPONLY")):
+		if any(k in self.settings["options"] for k in ("purge","purgeonly","purgetmponly")):
 			print "clearing autoresume ..."
 			self.clear_autoresume()
 
diff --git a/catalyst/targets/grp_target.py b/catalyst/targets/grp_target.py
index 8e70042..a8309a8 100644
--- a/catalyst/targets/grp_target.py
+++ b/catalyst/targets/grp_target.py
@@ -36,7 +36,7 @@ class grp_target(generic_stage_target):
 
 	def set_target_path(self):
 		self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"]+"/")
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
 			print "Resume point detected, skipping target path setup operation..."
 		else:
diff --git a/catalyst/targets/livecd_stage1_target.py b/catalyst/targets/livecd_stage1_target.py
index ac846ec..6273c9e 100644
--- a/catalyst/targets/livecd_stage1_target.py
+++ b/catalyst/targets/livecd_stage1_target.py
@@ -25,7 +25,7 @@ class livecd_stage1_target(generic_stage_target):
 
 	def set_target_path(self):
 		self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"])
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
 				print "Resume point detected, skipping target path setup operation..."
 		else:
diff --git a/catalyst/targets/livecd_stage2_target.py b/catalyst/targets/livecd_stage2_target.py
index 1bfd820..54d06c7 100644
--- a/catalyst/targets/livecd_stage2_target.py
+++ b/catalyst/targets/livecd_stage2_target.py
@@ -46,7 +46,7 @@ class livecd_stage2_target(generic_stage_target):
 
 	def set_target_path(self):
 		self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["target_subpath"]+"/")
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
 				print "Resume point detected, skipping target path setup operation..."
 		else:
@@ -88,7 +88,7 @@ class livecd_stage2_target(generic_stage_target):
 			error_msg="Rsync of "+self.settings["source_path"]+" to "+self.settings["chroot_path"]+" failed."
 			invalid_snapshot=False
 
-		if "AUTORESUME" in self.settings:
+		if "autoresume" in self.settings["options"]:
 			if os.path.isdir(self.settings["source_path"]) and \
 				os.path.exists(self.settings["autoresume_path"]+"unpack"):
 				print "Resume point detected, skipping unpack operation..."
@@ -112,7 +112,7 @@ class livecd_stage2_target(generic_stage_target):
 			if not os.path.exists(self.settings["chroot_path"]+"/tmp"):
 				os.makedirs(self.settings["chroot_path"]+"/tmp",1777)
 
-			if "PKGCACHE" in self.settings:
+			if "pkgcache" in self.settings["options"]:
 				if not os.path.exists(self.settings["pkgcache_path"]):
 					os.makedirs(self.settings["pkgcache_path"],0755)
 
@@ -134,7 +134,7 @@ class livecd_stage2_target(generic_stage_target):
 				"config_profile_link","setup_confdir","portage_overlay",\
 				"bind","chroot_setup","setup_environment","run_local",\
 				"build_kernel"]
-		if "FETCH" not in self.settings:
+		if "fetch" not in self.settings["options"]:
 			self.settings["action_sequence"] += ["bootloader","preclean",\
 				"livecd_update","root_overlay","fsscript","rcupdate","unmerge",\
 				"unbind","remove","empty","target_setup",\
diff --git a/catalyst/targets/netboot2_target.py b/catalyst/targets/netboot2_target.py
index 2b3cd20..8809dd0 100644
--- a/catalyst/targets/netboot2_target.py
+++ b/catalyst/targets/netboot2_target.py
@@ -45,7 +45,7 @@ class netboot2_target(generic_stage_target):
 	def set_target_path(self):
 		self.settings["target_path"]=normpath(self.settings["storedir"]+"/builds/"+\
 			self.settings["target_subpath"]+"/")
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"setup_target_path"):
 				print "Resume point detected, skipping target path setup operation..."
 		else:
@@ -63,7 +63,7 @@ class netboot2_target(generic_stage_target):
 		myfiles=[]
 
 		# check for autoresume point
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"copy_files_to_image"):
 				print "Resume point detected, skipping target path setup operation..."
 		else:
@@ -96,7 +96,7 @@ class netboot2_target(generic_stage_target):
 			touch(self.settings["autoresume_path"]+"copy_files_to_image")
 
 	def setup_overlay(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 		and os.path.exists(self.settings["autoresume_path"]+"setup_overlay"):
 			print "Resume point detected, skipping setup_overlay operation..."
 		else:
@@ -120,7 +120,7 @@ class netboot2_target(generic_stage_target):
 			raise CatalystError,"Failed to move kernel images!"
 
 	def remove(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"remove"):
 			print "Resume point detected, skipping remove operation..."
 		else:
@@ -132,7 +132,7 @@ class netboot2_target(generic_stage_target):
 					os.system("rm -rf " + self.settings["chroot_path"] + self.settings["merge_path"] + x)
 
 	def empty(self):
-		if "AUTORESUME" in self.settings \
+		if "autoresume" in self.settings["options"] \
 			and os.path.exists(self.settings["autoresume_path"]+"empty"):
 			print "Resume point detected, skipping empty operation..."
 		else:
diff --git a/catalyst/targets/snapshot_target.py b/catalyst/targets/snapshot_target.py
index e21bd1a..682f9b5 100644
--- a/catalyst/targets/snapshot_target.py
+++ b/catalyst/targets/snapshot_target.py
@@ -32,11 +32,11 @@ class snapshot_target(generic_stage_target):
 		pass
 
 	def run(self):
-		if "PURGEONLY" in self.settings:
+		if "purgeonly" in self.settings["options"]:
 			self.purge()
 			return
 
-		if "PURGE" in self.settings:
+		if "purge" in self.settings["options"]:
 			self.purge()
 
 		self.setup()
diff --git a/catalyst/targets/stage2_target.py b/catalyst/targets/stage2_target.py
index 6377f5d..94d4a1e 100644
--- a/catalyst/targets/stage2_target.py
+++ b/catalyst/targets/stage2_target.py
@@ -16,7 +16,7 @@ class stage2_target(generic_stage_target):
 		generic_stage_target.__init__(self,spec,addlargs)
 
 	def set_source_path(self):
-		if "SEEDCACHE" in self.settings and os.path.isdir(normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/tmp/stage1root/")):
+		if "seedcache" in self.settings["options"] and os.path.isdir(normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/tmp/stage1root/")):
 			self.settings["source_path"]=normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/tmp/stage1root/")
 		else:
 			self.settings["source_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["source_subpath"]+".tar.bz2")
diff --git a/catalyst/targets/stage4_target.py b/catalyst/targets/stage4_target.py
index 9168f2e..e2b8a79 100644
--- a/catalyst/targets/stage4_target.py
+++ b/catalyst/targets/stage4_target.py
@@ -32,8 +32,8 @@ class stage4_target(generic_stage_target):
 					"clean"]
 
 #		if "TARBALL" in self.settings or \
-#			"FETCH" not in self.settings:
-		if "FETCH" not in self.settings:
+#			"fetch" not in self.settings['options']:
+		if "fetch" not in self.settings['options']:
 			self.settings["action_sequence"].append("capture")
 		self.settings["action_sequence"].append("clear_autoresume")
 
-- 
1.8.3.2



^ permalink raw reply related	[flat|nested] 4+ messages in thread

end of thread, other threads:[~2013-12-14  3:37 UTC | newest]

Thread overview: 4+ messages (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
2013-12-14  3:36 [gentoo-catalyst] rewite-on-master patches round-4 Brian Dolbec
2013-12-14  3:36 ` [gentoo-catalyst] [PATCH 1/3] Initial creation of a defaults file and Split up support.py Brian Dolbec
2013-12-14  3:36 ` [gentoo-catalyst] [PATCH 2/3] Move confdefaults out of main.py Brian Dolbec
2013-12-14  3:36 ` [gentoo-catalyst] [PATCH 3/3] Some options cleanup, unifying their use, reducing redundancy Brian Dolbec

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox