Gentoo Archives: gentoo-portage-dev

From: Ali Polatel <hawking@g.o>
To: gentoo-portage-dev@l.g.o
Subject: [gentoo-portage-dev] [PATCH 1/6] Replace has_key() with the in operator (portage)
Date: Tue, 01 Jul 2008 11:27:03
Message-Id: 1214911614-31263-1-git-send-email-hawking@gentoo.org
1 This is a series of patches to use the in operator instead of the has_key()
2 method for dictionaries. The has_key() method is deprecated in python2.6 and
3 removed in python3.0.
4
5 I've also added DeprecationWarning's to classes that have a has_key() method so
6 people writing code using portage will be notified to use the in operator
7 instead (or override __contains__ instead of has_key())
8
9 It's briefly tested and seems to work fine here, please test back and report.
10
11 ---
12 pym/_emerge/__init__.py | 6 ++--
13 pym/portage/__init__.py | 60 +++++++++++++++++++++---------------------
14 pym/portage/const.py | 2 +-
15 pym/portage/cvstree.py | 16 +++++-----
16 pym/portage/dispatch_conf.py | 2 +-
17 pym/portage/getbinpkg.py | 12 ++++----
18 pym/portage/glsa.py | 6 ++--
19 pym/portage/locks.py | 6 ++--
20 pym/portage/manifest.py | 4 +-
21 pym/portage/util.py | 4 +-
22 10 files changed, 59 insertions(+), 59 deletions(-)
23
24 diff --git a/pym/_emerge/__init__.py b/pym/_emerge/__init__.py
25 index 93fa112..8b3ab91 100644
26 --- a/pym/_emerge/__init__.py
27 +++ b/pym/_emerge/__init__.py
28 @@ -6518,7 +6518,7 @@ class MergeTask(object):
29 emergelog(xterm_titles, " *** Finished. Cleaning up...")
30
31 # We're out of the loop... We're done. Delete the resume data.
32 - if mtimedb.has_key("resume"):
33 + if "resume" in mtimedb:
34 del mtimedb["resume"]
35 mtimedb.commit()
36
37 @@ -6743,7 +6743,7 @@ def unmerge(root_config, myopts, unmerge_action,
38 # since we're pruning, we don't care about slots
39 # and put all the pkgs in together
40 myslot = 0
41 - if not slotmap.has_key(myslot):
42 + if myslot not in slotmap:
43 slotmap[myslot] = {}
44 slotmap[myslot][localtree.dbapi.cpv_counter(mypkg)] = mypkg
45
46 @@ -7417,7 +7417,7 @@ def action_sync(settings, trees, mtimedb, myopts, myaction):
47 rsync_initial_timeout = 15
48
49 try:
50 - if settings.has_key("RSYNC_RETRIES"):
51 + if "RSYNC_RETRIES" in settings:
52 print yellow("WARNING:")+" usage of RSYNC_RETRIES is deprecated, use PORTAGE_RSYNC_RETRIES instead"
53 maxretries=int(settings["RSYNC_RETRIES"])
54 else:
55 diff --git a/pym/portage/__init__.py b/pym/portage/__init__.py
56 index 879e65b..9e29435 100644
57 --- a/pym/portage/__init__.py
58 +++ b/pym/portage/__init__.py
59 @@ -161,7 +161,7 @@ def load_mod(name):
60
61 def best_from_dict(key, top_dict, key_order, EmptyOnError=1, FullCopy=1, AllowEmpty=1):
62 for x in key_order:
63 - if top_dict.has_key(x) and top_dict[x].has_key(key):
64 + if x in top_dict and key in top_dict[x]:
65 if FullCopy:
66 return copy.deepcopy(top_dict[x][key])
67 else:
68 @@ -195,7 +195,7 @@ cacheStale=0
69 def cacheddir(my_original_path, ignorecvs, ignorelist, EmptyOnError, followSymlinks=True):
70 global cacheHit,cacheMiss,cacheStale
71 mypath = normalize_path(my_original_path)
72 - if dircache.has_key(mypath):
73 + if mypath in dircache:
74 cacheHit += 1
75 cached_mtime, list, ftype = dircache[mypath]
76 else:
77 @@ -220,7 +220,7 @@ def cacheddir(my_original_path, ignorecvs, ignorelist, EmptyOnError, followSymli
78 return None, None
79 # Python retuns mtime in seconds, so if it was changed in the last few seconds, it could be invalid
80 if mtime != cached_mtime or time.time() - mtime < 4:
81 - if dircache.has_key(mypath):
82 + if mypath in dircache:
83 cacheStale += 1
84 try:
85 list = os.listdir(mypath)
86 @@ -852,7 +852,7 @@ def ExtractKernelVersion(base_dir):
87
88 # Check the .config for a CONFIG_LOCALVERSION and append that too, also stripping whitespace
89 kernelconfig = getconfig(base_dir+"/.config")
90 - if kernelconfig and kernelconfig.has_key("CONFIG_LOCALVERSION"):
91 + if kernelconfig and "CONFIG_LOCALVERSION" in kernelconfig:
92 version += "".join(kernelconfig["CONFIG_LOCALVERSION"].split())
93
94 return (version,None)
95 @@ -1225,7 +1225,7 @@ class config(object):
96 self.prevmaskdict={}
97 for x in self.packages:
98 mycatpkg=dep_getkey(x)
99 - if not self.prevmaskdict.has_key(mycatpkg):
100 + if mycatpkg not in self.prevmaskdict:
101 self.prevmaskdict[mycatpkg]=[x]
102 else:
103 self.prevmaskdict[mycatpkg].append(x)
104 @@ -1452,7 +1452,7 @@ class config(object):
105 os.path.join(abs_user_config, "package.use"), recursive=1)
106 for key in pusedict.keys():
107 cp = dep_getkey(key)
108 - if not self.pusedict.has_key(cp):
109 + if cp not in self.pusedict:
110 self.pusedict[cp] = {}
111 self.pusedict[cp][key] = pusedict[key]
112
113 @@ -1464,7 +1464,7 @@ class config(object):
114 # default to ~arch if no specific keyword is given
115 if not pkgdict[key]:
116 mykeywordlist = []
117 - if self.configdict["defaults"] and self.configdict["defaults"].has_key("ACCEPT_KEYWORDS"):
118 + if self.configdict["defaults"] and "ACCEPT_KEYWORDS" in self.configdict["defaults"]:
119 groups = self.configdict["defaults"]["ACCEPT_KEYWORDS"].split()
120 else:
121 groups = []
122 @@ -1473,7 +1473,7 @@ class config(object):
123 mykeywordlist.append("~"+keyword)
124 pkgdict[key] = mykeywordlist
125 cp = dep_getkey(key)
126 - if not self.pkeywordsdict.has_key(cp):
127 + if cp not in self.pkeywordsdict:
128 self.pkeywordsdict[cp] = {}
129 self.pkeywordsdict[cp][key] = pkgdict[key]
130
131 @@ -1494,7 +1494,7 @@ class config(object):
132 recursive=1)
133 for x in pkgunmasklines:
134 mycatpkg=dep_getkey(x)
135 - if self.punmaskdict.has_key(mycatpkg):
136 + if mycatpkg in self.punmaskdict:
137 self.punmaskdict[mycatpkg].append(x)
138 else:
139 self.punmaskdict[mycatpkg]=[x]
140 @@ -1518,7 +1518,7 @@ class config(object):
141 self.pmaskdict = {}
142 for x in pkgmasklines:
143 mycatpkg=dep_getkey(x)
144 - if self.pmaskdict.has_key(mycatpkg):
145 + if mycatpkg in self.pmaskdict:
146 self.pmaskdict[mycatpkg].append(x)
147 else:
148 self.pmaskdict[mycatpkg]=[x]
149 @@ -1556,7 +1556,7 @@ class config(object):
150 if not x:
151 continue
152 mycatpkg=dep_getkey(x)
153 - if self.pprovideddict.has_key(mycatpkg):
154 + if mycatpkg in self.pprovideddict:
155 self.pprovideddict[mycatpkg].append(x)
156 else:
157 self.pprovideddict[mycatpkg]=[x]
158 @@ -1822,7 +1822,7 @@ class config(object):
159
160 def backup_changes(self,key=None):
161 self.modifying()
162 - if key and self.configdict["env"].has_key(key):
163 + if key and key in self.configdict["env"]:
164 self.backupenv[key] = copy.deepcopy(self.configdict["env"][key])
165 else:
166 raise KeyError, "No such key defined in environment: %s" % key
167 @@ -2655,7 +2655,7 @@ class config(object):
168 if virts:
169 for x in virts:
170 vkeysplit = x.split("/")
171 - if not self.virts_p.has_key(vkeysplit[1]):
172 + if vkeysplit[1] not in self.virts_p:
173 self.virts_p[vkeysplit[1]] = virts[x]
174 return self.virts_p
175
176 @@ -2828,7 +2828,7 @@ class config(object):
177 # remain unset.
178 continue
179 mydict[x] = myvalue
180 - if not mydict.has_key("HOME") and mydict.has_key("BUILD_PREFIX"):
181 + if "HOME" not in mydict and "BUILD_PREFIX" in mydict:
182 writemsg("*** HOME not set. Setting to "+mydict["BUILD_PREFIX"]+"\n")
183 mydict["HOME"]=mydict["BUILD_PREFIX"][:]
184
185 @@ -3305,7 +3305,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
186 # use_locks = 0
187
188 # local mirrors are always added
189 - if custommirrors.has_key("local"):
190 + if "local" in custommirrors:
191 mymirrors += custommirrors["local"]
192
193 if "nomirror" in restrict or \
194 @@ -3352,7 +3352,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
195 primaryuri_dict = {}
196 for myuri in myuris:
197 myfile=os.path.basename(myuri)
198 - if not filedict.has_key(myfile):
199 + if myfile not in filedict:
200 filedict[myfile]=[]
201 for y in range(0,len(locations)):
202 filedict[myfile].append(locations[y]+"/distfiles/"+myfile)
203 @@ -3362,14 +3362,14 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
204 mirrorname = myuri[9:eidx]
205
206 # Try user-defined mirrors first
207 - if custommirrors.has_key(mirrorname):
208 + if mirrorname in custommirrors:
209 for cmirr in custommirrors[mirrorname]:
210 filedict[myfile].append(cmirr+"/"+myuri[eidx+1:])
211 # remove the mirrors we tried from the list of official mirrors
212 if cmirr.strip() in thirdpartymirrors[mirrorname]:
213 thirdpartymirrors[mirrorname].remove(cmirr)
214 # now try the official mirrors
215 - if thirdpartymirrors.has_key(mirrorname):
216 + if mirrorname in thirdpartymirrors:
217 shuffle(thirdpartymirrors[mirrorname])
218
219 for locmirr in thirdpartymirrors[mirrorname]:
220 @@ -3386,7 +3386,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
221 continue
222 if "primaryuri" in restrict:
223 # Use the source site first.
224 - if primaryuri_indexes.has_key(myfile):
225 + if myfile in primaryuri_indexes:
226 primaryuri_indexes[myfile] += 1
227 else:
228 primaryuri_indexes[myfile] = 0
229 @@ -3697,11 +3697,11 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
230 continue
231 # allow different fetchcommands per protocol
232 protocol = loc[0:loc.find("://")]
233 - if mysettings.has_key("FETCHCOMMAND_"+protocol.upper()):
234 + if "FETCHCOMMAND_"+protocol.upper() in mysettings:
235 fetchcommand=mysettings["FETCHCOMMAND_"+protocol.upper()]
236 else:
237 fetchcommand=mysettings["FETCHCOMMAND"]
238 - if mysettings.has_key("RESUMECOMMAND_"+protocol.upper()):
239 + if "RESUMECOMMAND_"+protocol.upper() in mysettings:
240 resumecommand=mysettings["RESUMECOMMAND_"+protocol.upper()]
241 else:
242 resumecommand=mysettings["RESUMECOMMAND"]
243 @@ -3818,7 +3818,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
244 except EnvironmentError:
245 pass
246
247 - if mydigests!=None and mydigests.has_key(myfile):
248 + if mydigests!=None and myfile in mydigests:
249 try:
250 mystat = os.stat(myfile_path)
251 except OSError, e:
252 @@ -4369,7 +4369,7 @@ def doebuild_environment(myebuild, mydo, myroot, mysettings, debug, use_cache, m
253 ebuild_path = os.path.abspath(myebuild)
254 pkg_dir = os.path.dirname(ebuild_path)
255
256 - if mysettings.configdict["pkg"].has_key("CATEGORY"):
257 + if "CATEGORY" in mysettings.configdict["pkg"]:
258 cat = mysettings.configdict["pkg"]["CATEGORY"]
259 else:
260 cat = os.path.basename(normalize_path(os.path.join(pkg_dir, "..")))
261 @@ -4472,7 +4472,7 @@ def doebuild_environment(myebuild, mydo, myroot, mysettings, debug, use_cache, m
262 else:
263 mysettings["PVR"]=mysplit[1]+"-"+mysplit[2]
264
265 - if mysettings.has_key("PATH"):
266 + if "PATH" in mysettings:
267 mysplit=mysettings["PATH"].split(":")
268 else:
269 mysplit=[]
270 @@ -6170,7 +6170,7 @@ def dep_wordreduce(mydeplist,mysettings,mydbapi,mode,use_cache=1):
271 pass
272 else:
273 mykey = dep_getkey(deplist[mypos])
274 - if mysettings and mysettings.pprovideddict.has_key(mykey) and \
275 + if mysettings and mykey in mysettings.pprovideddict and \
276 match_from_list(deplist[mypos], mysettings.pprovideddict[mykey]):
277 deplist[mypos]=True
278 elif mydbapi is None:
279 @@ -6221,12 +6221,12 @@ def key_expand(mykey, mydb=None, use_cache=1, settings=None):
280 for x in mydb.categories:
281 if mydb.cp_list(x+"/"+mykey,use_cache=use_cache):
282 return x+"/"+mykey
283 - if virts_p.has_key(mykey):
284 + if mykey in virts_p:
285 return(virts_p[mykey][0])
286 return "null/"+mykey
287 elif mydb:
288 if hasattr(mydb, "cp_list"):
289 - if (not mydb.cp_list(mykey,use_cache=use_cache)) and virts and virts.has_key(mykey):
290 + if (not mydb.cp_list(mykey,use_cache=use_cache)) and virts and mykey in virts:
291 return virts[mykey][0]
292 return mykey
293
294 @@ -6300,7 +6300,7 @@ def cpv_expand(mycpv, mydb=None, use_cache=1, settings=None):
295 mykey=matches[0]
296
297 if not mykey and not isinstance(mydb, list):
298 - if virts_p.has_key(myp):
299 + if myp in virts_p:
300 mykey=virts_p[myp][0]
301 #again, we only perform virtual expansion if we have a dbapi (not a list)
302 if not mykey:
303 @@ -6348,7 +6348,7 @@ def getmaskingreason(mycpv, metadata=None, settings=None, portdb=None, return_lo
304 locations.reverse()
305 pmasklists = [(x, grablines(os.path.join(x, "package.mask"), recursive=1)) for x in locations]
306
307 - if settings.pmaskdict.has_key(mycp):
308 + if mycp in settings.pmaskdict:
309 for x in settings.pmaskdict[mycp]:
310 if match_from_list(x, cpv_slot_list):
311 comment = ""
312 @@ -6766,7 +6766,7 @@ def commit_mtimedb(mydict=None, filename=None):
313
314 def portageexit():
315 global uid,portage_gid,portdb,db
316 - if secpass and not os.environ.has_key("SANDBOX_ACTIVE"):
317 + if secpass and "SANDBOX_ACTIVE" not in os.environ:
318 close_portdbapi_caches()
319 commit_mtimedb()
320
321 diff --git a/pym/portage/const.py b/pym/portage/const.py
322 index c96037d..94996dc 100644
323 --- a/pym/portage/const.py
324 +++ b/pym/portage/const.py
325 @@ -12,7 +12,7 @@ from portage.const_autotool import *
326 # save the original prefix
327 BPREFIX = EPREFIX
328 # pick up EPREFIX from the environment if set
329 -if os.environ.has_key("EPREFIX"):
330 +if "EPREFIX" in os.environ:
331 EPREFIX = os.path.normpath(os.environ["EPREFIX"])
332
333 # ===========================================================================
334 diff --git a/pym/portage/cvstree.py b/pym/portage/cvstree.py
335 index f74ecd4..d283fb4 100644
336 --- a/pym/portage/cvstree.py
337 +++ b/pym/portage/cvstree.py
338 @@ -17,13 +17,13 @@ def pathdata(entries, path):
339 mytarget=mysplit[-1]
340 mysplit=mysplit[:-1]
341 for mys in mysplit:
342 - if myentries["dirs"].has_key(mys):
343 + if mys in myentries["dirs"]:
344 myentries=myentries["dirs"][mys]
345 else:
346 return None
347 - if myentries["dirs"].has_key(mytarget):
348 + if mytarget in myentries["dirs"]:
349 return myentries["dirs"][mytarget]
350 - elif myentries["files"].has_key(mytarget):
351 + elif mytarget in myentries["files"]:
352 return myentries["files"][mytarget]
353 else:
354 return None
355 @@ -242,9 +242,9 @@ def getentries(mydir,recursive=0):
356 if file=="digest-framerd-2.4.3":
357 print mydir,file
358 if os.path.isdir(mydir+"/"+file):
359 - if not entries["dirs"].has_key(file):
360 + if file not in entries["dirs"]:
361 entries["dirs"][file]={"dirs":{},"files":{}}
362 - if entries["dirs"][file].has_key("status"):
363 + if "status" in entries["dirs"][file]:
364 if "exists" not in entries["dirs"][file]["status"]:
365 entries["dirs"][file]["status"]+=["exists"]
366 else:
367 @@ -252,9 +252,9 @@ def getentries(mydir,recursive=0):
368 elif os.path.isfile(mydir+"/"+file):
369 if file=="digest-framerd-2.4.3":
370 print "isfile"
371 - if not entries["files"].has_key(file):
372 + if file not in entries["files"]:
373 entries["files"][file]={"revision":"","date":"","flags":"","tags":""}
374 - if entries["files"][file].has_key("status"):
375 + if "status" in entries["files"][file]:
376 if file=="digest-framerd-2.4.3":
377 print "has status"
378 if "exists" not in entries["files"][file]["status"]:
379 @@ -270,7 +270,7 @@ def getentries(mydir,recursive=0):
380 print "stat'ing"
381 mystat=os.stat(mydir+"/"+file)
382 mytime=time.asctime(time.gmtime(mystat[ST_MTIME]))
383 - if not entries["files"][file].has_key("status"):
384 + if "status" not in entries["files"][file]:
385 if file=="digest-framerd-2.4.3":
386 print "status not set"
387 entries["files"][file]["status"]=[]
388 diff --git a/pym/portage/dispatch_conf.py b/pym/portage/dispatch_conf.py
389 index df256dc..541bced 100644
390 --- a/pym/portage/dispatch_conf.py
391 +++ b/pym/portage/dispatch_conf.py
392 @@ -31,7 +31,7 @@ def read_config(mandatory_opts):
393 sys.exit(1)
394
395 for key in mandatory_opts:
396 - if not opts.has_key(key):
397 + if key not in opts:
398 if key == "merge":
399 opts["merge"] = "sdiff --suppress-common-lines --output='%s' '%s' '%s'"
400 else:
401 diff --git a/pym/portage/getbinpkg.py b/pym/portage/getbinpkg.py
402 index 574da21..12b038b 100644
403 --- a/pym/portage/getbinpkg.py
404 +++ b/pym/portage/getbinpkg.py
405 @@ -478,15 +478,15 @@ def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache=
406 metadatafile.close()
407 except (cPickle.UnpicklingError, OSError, IOError, EOFError):
408 metadata = {}
409 - if not metadata.has_key(baseurl):
410 + if baseurl not in metadata:
411 metadata[baseurl]={}
412 - if not metadata[baseurl].has_key("indexname"):
413 + if "indexname" not in metadata[baseurl]:
414 metadata[baseurl]["indexname"]=""
415 - if not metadata[baseurl].has_key("timestamp"):
416 + if "timestamp" not in metadata[baseurl]:
417 metadata[baseurl]["timestamp"]=0
418 - if not metadata[baseurl].has_key("unmodified"):
419 + if "unmodified" not in metadata[baseurl]:
420 metadata[baseurl]["unmodified"]=0
421 - if not metadata[baseurl].has_key("data"):
422 + if "data" not in metadata[baseurl]:
423 metadata[baseurl]["data"]={}
424
425 if not os.access(cache_path, os.W_OK):
426 @@ -648,7 +648,7 @@ def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache=
427 out.flush()
428
429 try:
430 - if metadata[baseurl].has_key("modified") and metadata[baseurl]["modified"]:
431 + if "modified" in metadata[baseurl] and metadata[baseurl]["modified"]:
432 metadata[baseurl]["timestamp"] = int(time.time())
433 metadatafile = open(CACHE_PATH+"/remote_metadata.pickle", "w+")
434 cPickle.dump(metadata,metadatafile)
435 diff --git a/pym/portage/glsa.py b/pym/portage/glsa.py
436 index 2d2f27b..4dc05f7 100644
437 --- a/pym/portage/glsa.py
438 +++ b/pym/portage/glsa.py
439 @@ -92,7 +92,7 @@ def get_glsa_list(myconfig):
440 """
441 rValue = []
442
443 - if myconfig.has_key("GLSA_DIR"):
444 + if "GLSA_DIR" in myconfig:
445 repository = myconfig["GLSA_DIR"]
446 else:
447 repository = os.path.join(myconfig["PORTDIR"], "metadata", "glsa")
448 @@ -407,7 +407,7 @@ class Glsa:
449 @rtype: None
450 @return: None
451 """
452 - if self.config.has_key("GLSA_DIR"):
453 + if "GLSA_DIR" in self.config:
454 repository = "file://" + self.config["GLSA_DIR"]+"/"
455 else:
456 repository = "file://" + self.config["PORTDIR"] + "/metadata/glsa/"
457 @@ -470,7 +470,7 @@ class Glsa:
458 self.packages = {}
459 for p in self.affected.getElementsByTagName("package"):
460 name = p.getAttribute("name")
461 - if not self.packages.has_key(name):
462 + if name not in self.packages:
463 self.packages[name] = []
464 tmp = {}
465 tmp["arch"] = p.getAttribute("arch")
466 diff --git a/pym/portage/locks.py b/pym/portage/locks.py
467 index f3e090f..dd3df10 100644
468 --- a/pym/portage/locks.py
469 +++ b/pym/portage/locks.py
470 @@ -290,9 +290,9 @@ def hardlock_cleanup(path, remove_all_locks=False):
471 host = "-".join(hostpid[:-1])
472 pid = hostpid[-1]
473
474 - if not mylist.has_key(filename):
475 + if filename not in mylist:
476 mylist[filename] = {}
477 - if not mylist[filename].has_key(host):
478 + if host not in mylist[filename]:
479 mylist[filename][host] = []
480 mylist[filename][host].append(pid)
481
482 @@ -302,7 +302,7 @@ def hardlock_cleanup(path, remove_all_locks=False):
483 results.append("Found %(count)s locks" % {"count":mycount})
484
485 for x in mylist:
486 - if mylist[x].has_key(myhost) or remove_all_locks:
487 + if myhost in mylist[x] or remove_all_locks:
488 mylockname = hardlock_name(path+"/"+x)
489 if hardlink_is_mine(mylockname, path+"/"+x) or \
490 not os.path.exists(path+"/"+x) or \
491 diff --git a/pym/portage/manifest.py b/pym/portage/manifest.py
492 index 9c8ab37..50ed2f5 100644
493 --- a/pym/portage/manifest.py
494 +++ b/pym/portage/manifest.py
495 @@ -419,9 +419,9 @@ class Manifest(object):
496 """ Regenerate hashes for the given file """
497 if checkExisting:
498 self.checkFileHashes(ftype, fname, ignoreMissing=ignoreMissing)
499 - if not ignoreMissing and not self.fhashdict[ftype].has_key(fname):
500 + if not ignoreMissing and fname not in self.fhashdict[ftype]:
501 raise FileNotInManifestException(fname)
502 - if not self.fhashdict[ftype].has_key(fname):
503 + if fname not in self.fhashdict[ftype]:
504 self.fhashdict[ftype][fname] = {}
505 myhashkeys = list(self.hashes)
506 if reuseExisting:
507 diff --git a/pym/portage/util.py b/pym/portage/util.py
508 index f3951b5..398bdd2 100644
509 --- a/pym/portage/util.py
510 +++ b/pym/portage/util.py
511 @@ -175,7 +175,7 @@ def stack_dicts(dicts, incremental=0, incrementals=[], ignore_none=0):
512 final_dict = {}
513 for y in mydict.keys():
514 if True:
515 - if final_dict.has_key(y) and (incremental or (y in incrementals)):
516 + if y in final_dict and (incremental or (y in incrementals)):
517 final_dict[y] += " "+mydict[y][:]
518 else:
519 final_dict[y] = mydict[y][:]
520 @@ -494,7 +494,7 @@ def varexpand(mystring, mydict={}):
521 cexpand[mystring]=""
522 return ""
523 numvars=numvars+1
524 - if mydict.has_key(myvarname):
525 + if myvarname in mydict:
526 newstring=newstring+mydict[myvarname]
527 else:
528 newstring=newstring+mystring[pos]
529 --
530 1.5.6.1
531
532 --
533 gentoo-portage-dev@l.g.o mailing list

Replies