1 |
Author: grobian |
2 |
Date: 2009-08-24 09:28:51 +0000 (Mon, 24 Aug 2009) |
3 |
New Revision: 14138 |
4 |
|
5 |
Modified: |
6 |
main/branches/prefix/bin/ebuild-helpers/dohtml |
7 |
main/branches/prefix/bin/ebuild.sh |
8 |
main/branches/prefix/pym/_emerge/MergeListItem.py |
9 |
main/branches/prefix/pym/_emerge/PackageUninstall.py |
10 |
main/branches/prefix/pym/_emerge/Scheduler.py |
11 |
main/branches/prefix/pym/_emerge/unmerge.py |
12 |
main/branches/prefix/pym/portage/__init__.py |
13 |
main/branches/prefix/pym/portage/cache/ebuild_xattr.py |
14 |
main/branches/prefix/pym/portage/elog/__init__.py |
15 |
main/branches/prefix/pym/portage/env/loaders.py |
16 |
main/branches/prefix/pym/portage/sets/files.py |
17 |
main/branches/prefix/pym/portage/tests/__init__.py |
18 |
main/branches/prefix/pym/portage/tests/xpak/test_decodeint.py |
19 |
main/branches/prefix/pym/portage/update.py |
20 |
main/branches/prefix/pym/portage/util.py |
21 |
main/branches/prefix/pym/portage/xpak.py |
22 |
Log: |
23 |
Merged from trunk -r14067:14077 |
24 |
|
25 |
| 14068 | Use elog in _eapi0_pkg_nofetch(). | |
26 |
| zmedico | | |
27 |
|
28 |
| 14069 | sets/files.py cleanPackages function stop calling lock and | |
29 |
| volkmar | load and requires the caller to do that changing unmerge to | |
30 |
| | reflect this change | |
31 |
|
32 |
| 14070 | Scheduler is now able to clean world set when removing a | |
33 |
| volkmar | package. world_atom function has been updated and | |
34 |
| | PackageUninstall is calling it after unmerge. | |
35 |
|
36 |
| 14071 | Use a clean listener system for portage.elog instead of | |
37 |
| volkmar | _emerge_elog_listener | |
38 |
|
39 |
| 14072 | Use _content_encoding and _fs_encoding for unicode | |
40 |
| zmedico | encoding/decoding. | |
41 |
|
42 |
| 14073 | When _unicode_func_wrapper() decodes a string in a returned | |
43 |
| zmedico | list (typically from os.listdir), discard values with | |
44 |
| | invalid encoding. This insures that all names returned from | |
45 |
| | all os.listdir() calls are valid. | |
46 |
|
47 |
| 14074 | Use portage.os and _fs_encoding where appropriate, and fix | |
48 |
| zmedico | binary string handling for py3k compat. | |
49 |
|
50 |
| 14075 | Print a warning when nonexistent files have been passed to | |
51 |
| arfrever | dohtml. | |
52 |
|
53 |
| 14076 | Add 'return False' which was missing from the previous | |
54 |
| arfrever | commit. | |
55 |
|
56 |
| 14077 | Test the edge case. | |
57 |
| zmedico | | |
58 |
|
59 |
|
60 |
Modified: main/branches/prefix/bin/ebuild-helpers/dohtml |
61 |
=================================================================== |
62 |
--- main/branches/prefix/bin/ebuild-helpers/dohtml 2009-08-24 09:27:07 UTC (rev 14137) |
63 |
+++ main/branches/prefix/bin/ebuild-helpers/dohtml 2009-08-24 09:28:51 UTC (rev 14138) |
64 |
@@ -59,7 +59,10 @@ |
65 |
else: |
66 |
destdir = options.ED + "/usr/share/doc/" + options.PF + "/html/" + options.doc_prefix + "/" + prefix |
67 |
|
68 |
- if os.path.isfile(fullpath): |
69 |
+ if not os.path.exists(fullpath): |
70 |
+ sys.stderr.write("!!! dohtml: %s does not exist\n" % fullpath) |
71 |
+ return False |
72 |
+ elif os.path.isfile(fullpath): |
73 |
ext = os.path.splitext(basename)[1] |
74 |
if (len(ext) and ext[1:] in options.allowed_exts) or basename in options.allowed_files: |
75 |
dodir(destdir) |
76 |
|
77 |
Modified: main/branches/prefix/bin/ebuild.sh |
78 |
=================================================================== |
79 |
--- main/branches/prefix/bin/ebuild.sh 2009-08-24 09:27:07 UTC (rev 14137) |
80 |
+++ main/branches/prefix/bin/ebuild.sh 2009-08-24 09:28:51 UTC (rev 14138) |
81 |
@@ -588,10 +588,10 @@ |
82 |
_eapi0_pkg_nofetch() { |
83 |
[ -z "${SRC_URI}" ] && return |
84 |
|
85 |
- echo "!!! The following are listed in SRC_URI for ${PN}:" |
86 |
+ elog "The following are listed in SRC_URI for ${PN}:" |
87 |
local x |
88 |
for x in $(echo ${SRC_URI}); do |
89 |
- echo "!!! ${x}" |
90 |
+ elog " ${x}" |
91 |
done |
92 |
} |
93 |
|
94 |
|
95 |
Modified: main/branches/prefix/pym/_emerge/MergeListItem.py |
96 |
=================================================================== |
97 |
--- main/branches/prefix/pym/_emerge/MergeListItem.py 2009-08-24 09:27:07 UTC (rev 14137) |
98 |
+++ main/branches/prefix/pym/_emerge/MergeListItem.py 2009-08-24 09:28:51 UTC (rev 14138) |
99 |
@@ -132,7 +132,8 @@ |
100 |
|
101 |
uninstall = PackageUninstall(background=self.background, |
102 |
ldpath_mtimes=ldpath_mtimes, opts=self.emerge_opts, |
103 |
- pkg=pkg, scheduler=scheduler, settings=settings) |
104 |
+ pkg=pkg, scheduler=scheduler, settings=settings, |
105 |
+ world_atom=world_atom) |
106 |
|
107 |
uninstall.start() |
108 |
retval = uninstall.wait() |
109 |
|
110 |
Modified: main/branches/prefix/pym/_emerge/PackageUninstall.py |
111 |
=================================================================== |
112 |
--- main/branches/prefix/pym/_emerge/PackageUninstall.py 2009-08-24 09:27:07 UTC (rev 14137) |
113 |
+++ main/branches/prefix/pym/_emerge/PackageUninstall.py 2009-08-24 09:28:51 UTC (rev 14138) |
114 |
@@ -12,11 +12,12 @@ |
115 |
|
116 |
class PackageUninstall(AsynchronousTask): |
117 |
|
118 |
- __slots__ = ("ldpath_mtimes", "opts", "pkg", "scheduler", "settings") |
119 |
+ __slots__ = ("world_atom", "ldpath_mtimes", "opts", |
120 |
+ "pkg", "scheduler", "settings") |
121 |
|
122 |
def _start(self): |
123 |
try: |
124 |
- unmerge(self.pkg.root_config, self.opts, "unmerge", |
125 |
+ retval = unmerge(self.pkg.root_config, self.opts, "unmerge", |
126 |
[self.pkg.cpv], self.ldpath_mtimes, clean_world=0, |
127 |
clean_delay=0, raise_on_error=1, scheduler=self.scheduler, |
128 |
writemsg_level=self._writemsg_level) |
129 |
@@ -24,6 +25,10 @@ |
130 |
self.returncode = e.status |
131 |
else: |
132 |
self.returncode = os.EX_OK |
133 |
+ |
134 |
+ if retval == 1: |
135 |
+ self.world_atom(self.pkg) |
136 |
+ |
137 |
self.wait() |
138 |
|
139 |
def _writemsg_level(self, msg, level=0, noiselevel=0): |
140 |
|
141 |
Modified: main/branches/prefix/pym/_emerge/Scheduler.py |
142 |
=================================================================== |
143 |
--- main/branches/prefix/pym/_emerge/Scheduler.py 2009-08-24 09:27:07 UTC (rev 14137) |
144 |
+++ main/branches/prefix/pym/_emerge/Scheduler.py 2009-08-24 09:28:51 UTC (rev 14138) |
145 |
@@ -1108,7 +1108,7 @@ |
146 |
pkg_queue = self._pkg_queue |
147 |
failed_pkgs = self._failed_pkgs |
148 |
portage.locks._quiet = self._background |
149 |
- portage.elog._emerge_elog_listener = self._elog_listener |
150 |
+ portage.elog.add_listener(self._elog_listener) |
151 |
rval = os.EX_OK |
152 |
|
153 |
try: |
154 |
@@ -1116,7 +1116,7 @@ |
155 |
finally: |
156 |
self._main_loop_cleanup() |
157 |
portage.locks._quiet = False |
158 |
- portage.elog._emerge_elog_listener = None |
159 |
+ portage.elog.remove_listener(self._elog_listener) |
160 |
if failed_pkgs: |
161 |
rval = failed_pkgs[-1].returncode |
162 |
|
163 |
@@ -1566,8 +1566,8 @@ |
164 |
|
165 |
def _world_atom(self, pkg): |
166 |
""" |
167 |
- Add the package to the world file, but only if |
168 |
- it's supposed to be added. Otherwise, do nothing. |
169 |
+ Add or remove the package to the world file, but only if |
170 |
+ it's supposed to be added or removed. Otherwise, do nothing. |
171 |
""" |
172 |
|
173 |
if set(("--buildpkgonly", "--fetchonly", |
174 |
@@ -1596,17 +1596,25 @@ |
175 |
if hasattr(world_set, "load"): |
176 |
world_set.load() # maybe it's changed on disk |
177 |
|
178 |
- atom = create_world_atom(pkg, args_set, root_config) |
179 |
- if atom: |
180 |
- if hasattr(world_set, "add"): |
181 |
- self._status_msg(('Recording %s in "world" ' + \ |
182 |
- 'favorites file...') % atom) |
183 |
- logger.log(" === (%s of %s) Updating world file (%s)" % \ |
184 |
- (pkg_count.curval, pkg_count.maxval, pkg.cpv)) |
185 |
- world_set.add(atom) |
186 |
- else: |
187 |
- writemsg_level('\n!!! Unable to record %s in "world"\n' % \ |
188 |
- (atom,), level=logging.WARN, noiselevel=-1) |
189 |
+ if pkg.operation == "uninstall": |
190 |
+ if hasattr(world_set, "cleanPackage"): |
191 |
+ world_set.cleanPackage(pkg.root_config.trees["vartree"].dbapi, |
192 |
+ pkg.cpv) |
193 |
+ if hasattr(world_set, "remove"): |
194 |
+ for s in pkg.root_config.setconfig.active: |
195 |
+ world_set.remove(SETPREFIX+s) |
196 |
+ else: |
197 |
+ atom = create_world_atom(pkg, args_set, root_config) |
198 |
+ if atom: |
199 |
+ if hasattr(world_set, "add"): |
200 |
+ self._status_msg(('Recording %s in "world" ' + \ |
201 |
+ 'favorites file...') % atom) |
202 |
+ logger.log(" === (%s of %s) Updating world file (%s)" % \ |
203 |
+ (pkg_count.curval, pkg_count.maxval, pkg.cpv)) |
204 |
+ world_set.add(atom) |
205 |
+ else: |
206 |
+ writemsg_level('\n!!! Unable to record %s in "world"\n' % \ |
207 |
+ (atom,), level=logging.WARN, noiselevel=-1) |
208 |
finally: |
209 |
if world_locked: |
210 |
world_set.unlock() |
211 |
|
212 |
Modified: main/branches/prefix/pym/_emerge/unmerge.py |
213 |
=================================================================== |
214 |
--- main/branches/prefix/pym/_emerge/unmerge.py 2009-08-24 09:27:07 UTC (rev 14137) |
215 |
+++ main/branches/prefix/pym/_emerge/unmerge.py 2009-08-24 09:28:51 UTC (rev 14138) |
216 |
@@ -510,11 +510,22 @@ |
217 |
raise UninstallFailure(retval) |
218 |
sys.exit(retval) |
219 |
else: |
220 |
- if clean_world and hasattr(sets["world"], "cleanPackage"): |
221 |
+ if clean_world and hasattr(sets["world"], "cleanPackage")\ |
222 |
+ and hasattr(sets["world"], "lock"): |
223 |
+ sets["world"].lock() |
224 |
+ if hasattr(sets["world"], "load"): |
225 |
+ sets["world"].load() |
226 |
sets["world"].cleanPackage(vartree.dbapi, y) |
227 |
+ sets["world"].unlock() |
228 |
emergelog(xterm_titles, " >>> unmerge success: "+y) |
229 |
- if clean_world and hasattr(sets["world"], "remove"): |
230 |
+ |
231 |
+ if clean_world and hasattr(sets["world"], "remove")\ |
232 |
+ and hasattr(sets["world"], "lock"): |
233 |
+ sets["world"].lock() |
234 |
+ # load is called inside remove() |
235 |
for s in root_config.setconfig.active: |
236 |
sets["world"].remove(SETPREFIX+s) |
237 |
+ sets["world"].unlock() |
238 |
+ |
239 |
return 1 |
240 |
|
241 |
|
242 |
Modified: main/branches/prefix/pym/portage/__init__.py |
243 |
=================================================================== |
244 |
--- main/branches/prefix/pym/portage/__init__.py 2009-08-24 09:27:07 UTC (rev 14137) |
245 |
+++ main/branches/prefix/pym/portage/__init__.py 2009-08-24 09:28:51 UTC (rev 14138) |
246 |
@@ -169,12 +169,21 @@ |
247 |
if isinstance(rval, (basestring, list, tuple)): |
248 |
if isinstance(rval, basestring): |
249 |
rval = _unicode_decode(rval, encoding=encoding) |
250 |
- elif isinstance(rval, list): |
251 |
- rval = [_unicode_decode(x, encoding=encoding) for x in rval] |
252 |
- elif isinstance(rval, tuple): |
253 |
- rval = tuple(_unicode_decode(x, encoding=encoding) \ |
254 |
- for x in rval) |
255 |
+ else: |
256 |
+ decoded_rval = [] |
257 |
+ for x in rval: |
258 |
+ try: |
259 |
+ x = _unicode_decode(x, encoding=encoding, errors='strict') |
260 |
+ except UnicodeDecodeError: |
261 |
+ pass |
262 |
+ else: |
263 |
+ decoded_rval.append(x) |
264 |
|
265 |
+ if isinstance(rval, tuple): |
266 |
+ rval = tuple(decoded_rval) |
267 |
+ else: |
268 |
+ rval = decoded_rval |
269 |
+ |
270 |
return rval |
271 |
|
272 |
class _unicode_module_wrapper(object): |
273 |
|
274 |
Modified: main/branches/prefix/pym/portage/cache/ebuild_xattr.py |
275 |
=================================================================== |
276 |
--- main/branches/prefix/pym/portage/cache/ebuild_xattr.py 2009-08-24 09:27:07 UTC (rev 14137) |
277 |
+++ main/branches/prefix/pym/portage/cache/ebuild_xattr.py 2009-08-24 09:28:51 UTC (rev 14138) |
278 |
@@ -10,6 +10,7 @@ |
279 |
from portage.versions import catsplit |
280 |
from portage import cpv_getkey |
281 |
from portage import os |
282 |
+from portage import _fs_encoding |
283 |
from portage import _unicode_decode |
284 |
import xattr |
285 |
from errno import ENODATA,ENOSPC,E2BIG |
286 |
@@ -156,7 +157,11 @@ |
287 |
|
288 |
for root, dirs, files in os.walk(self.portdir): |
289 |
for file in files: |
290 |
- file = _unicode_decode(file) |
291 |
+ try: |
292 |
+ file = _unicode_decode(file, |
293 |
+ encoding=_fs_encoding, errors='strict') |
294 |
+ except UnicodeDecodeError: |
295 |
+ continue |
296 |
if file[-7:] == '.ebuild': |
297 |
cat = os.path.basename(os.path.dirname(root)) |
298 |
pn_pv = file[:-7] |
299 |
|
300 |
Modified: main/branches/prefix/pym/portage/elog/__init__.py |
301 |
=================================================================== |
302 |
--- main/branches/prefix/pym/portage/elog/__init__.py 2009-08-24 09:27:07 UTC (rev 14137) |
303 |
+++ main/branches/prefix/pym/portage/elog/__init__.py 2009-08-24 09:28:51 UTC (rev 14138) |
304 |
@@ -56,11 +56,23 @@ |
305 |
_elog_mod_imports[name] = m |
306 |
return m |
307 |
|
308 |
-_emerge_elog_listener = None |
309 |
+_elog_listeners = [] |
310 |
+def add_listener(listener): |
311 |
+ ''' |
312 |
+ Listeners should accept four arguments: settings, key, logentries and logtext |
313 |
+ ''' |
314 |
+ _elog_listeners.append(listener) |
315 |
+ |
316 |
+def remove_listener(listener): |
317 |
+ ''' |
318 |
+ Remove previously added listener |
319 |
+ ''' |
320 |
+ _elog_listeners.remove(listener) |
321 |
+ |
322 |
_elog_atexit_handlers = [] |
323 |
_preserve_logentries = {} |
324 |
def elog_process(cpv, mysettings, phasefilter=None): |
325 |
- global _elog_atexit_handlers, _emerge_elog_listener, _preserve_logentries |
326 |
+ global _elog_atexit_handlers, _preserve_logentries |
327 |
|
328 |
logsystems = mysettings.get("PORTAGE_ELOG_SYSTEM","").split() |
329 |
for s in logsystems: |
330 |
@@ -123,9 +135,9 @@ |
331 |
|
332 |
default_fulllog = _combine_logentries(default_logentries) |
333 |
|
334 |
- if _emerge_elog_listener is not None: |
335 |
- _emerge_elog_listener(mysettings, str(key), |
336 |
- default_logentries, default_fulllog) |
337 |
+ # call listeners |
338 |
+ for listener in _elog_listeners: |
339 |
+ listener(mysettings, str(key), default_logentries, default_fulllog) |
340 |
|
341 |
# pass the processing to the individual modules |
342 |
for s, levels in logsystems.iteritems(): |
343 |
|
344 |
Modified: main/branches/prefix/pym/portage/env/loaders.py |
345 |
=================================================================== |
346 |
--- main/branches/prefix/pym/portage/env/loaders.py 2009-08-24 09:27:07 UTC (rev 14137) |
347 |
+++ main/branches/prefix/pym/portage/env/loaders.py 2009-08-24 09:28:51 UTC (rev 14138) |
348 |
@@ -4,8 +4,13 @@ |
349 |
# $Id$ |
350 |
|
351 |
import codecs |
352 |
-import os |
353 |
+import errno |
354 |
import stat |
355 |
+from portage import os |
356 |
+from portage import _content_encoding |
357 |
+from portage import _fs_encoding |
358 |
+from portage import _unicode_decode |
359 |
+from portage import _unicode_encode |
360 |
from portage.localization import _ |
361 |
|
362 |
class LoaderError(Exception): |
363 |
@@ -40,11 +45,6 @@ |
364 |
@returns: List of files to process |
365 |
""" |
366 |
|
367 |
- if isinstance(filename, unicode): |
368 |
- # Avoid UnicodeDecodeError raised from |
369 |
- # os.path.join when called by os.walk. |
370 |
- filename = filename.encode('utf_8', 'replace') |
371 |
- |
372 |
try: |
373 |
st = os.stat(filename) |
374 |
except OSError: |
375 |
@@ -55,6 +55,11 @@ |
376 |
if d[:1] == '.' or d == 'CVS': |
377 |
dirs.remove(d) |
378 |
for f in files: |
379 |
+ try: |
380 |
+ f = _unicode_decode(f, |
381 |
+ encoding=_fs_encoding, errors='strict') |
382 |
+ except UnicodeDecodeError: |
383 |
+ continue |
384 |
if f[:1] == '.' or f[-1:] == '~': |
385 |
continue |
386 |
yield os.path.join(root, f) |
387 |
@@ -145,9 +150,18 @@ |
388 |
# once, which may be expensive due to digging in child classes. |
389 |
func = self.lineParser |
390 |
for fn in RecursiveFileLoader(self.fname): |
391 |
- f = codecs.open(fn, mode='r', encoding='utf_8', errors='replace') |
392 |
+ try: |
393 |
+ f = codecs.open(_unicode_encode(fn, |
394 |
+ encoding=_fs_encoding, errors='strict'), mode='r', |
395 |
+ encoding=_content_encoding, errors='replace') |
396 |
+ except EnvironmentError, e: |
397 |
+ if e.errno not in (errno.ENOENT, errno.ESTALE): |
398 |
+ raise |
399 |
+ del e |
400 |
+ continue |
401 |
for line_num, line in enumerate(f): |
402 |
func(line, line_num, data, errors) |
403 |
+ f.close() |
404 |
return (data, errors) |
405 |
|
406 |
def lineParser(self, line, line_num, data, errors): |
407 |
|
408 |
Modified: main/branches/prefix/pym/portage/sets/files.py |
409 |
=================================================================== |
410 |
--- main/branches/prefix/pym/portage/sets/files.py 2009-08-24 09:27:07 UTC (rev 14137) |
411 |
+++ main/branches/prefix/pym/portage/sets/files.py 2009-08-24 09:28:51 UTC (rev 14138) |
412 |
@@ -293,8 +293,13 @@ |
413 |
self._lock = None |
414 |
|
415 |
def cleanPackage(self, vardb, cpv): |
416 |
- self.lock() |
417 |
- self._load() # loads latest from disk |
418 |
+ ''' |
419 |
+ Before calling this function you should call lock and load. |
420 |
+ After calling this function you should call unlock. |
421 |
+ ''' |
422 |
+ if not self._lock: |
423 |
+ raise AssertionError('cleanPackage needs the set to be locked') |
424 |
+ |
425 |
worldlist = list(self._atoms) |
426 |
mykey = cpv_getkey(cpv) |
427 |
newworldlist = [] |
428 |
@@ -316,7 +321,6 @@ |
429 |
|
430 |
newworldlist.extend(self._nonatoms) |
431 |
self.replace(newworldlist) |
432 |
- self.unlock() |
433 |
|
434 |
def singleBuilder(self, options, settings, trees): |
435 |
return WorldSet(settings["ROOT"]) |
436 |
|
437 |
Modified: main/branches/prefix/pym/portage/tests/__init__.py |
438 |
=================================================================== |
439 |
--- main/branches/prefix/pym/portage/tests/__init__.py 2009-08-24 09:27:07 UTC (rev 14137) |
440 |
+++ main/branches/prefix/pym/portage/tests/__init__.py 2009-08-24 09:28:51 UTC (rev 14138) |
441 |
@@ -3,14 +3,21 @@ |
442 |
# Distributed under the terms of the GNU General Public License v2 |
443 |
# $Id$ |
444 |
|
445 |
-import os |
446 |
import sys |
447 |
import time |
448 |
import unittest |
449 |
|
450 |
+from portage import os |
451 |
+from portage import _fs_encoding |
452 |
+from portage import _unicode_encode |
453 |
+from portage import _unicode_decode |
454 |
+ |
455 |
def main(): |
456 |
|
457 |
- TEST_FILE = '__test__' |
458 |
+ TEST_FILE = _unicode_encode('__test__', |
459 |
+ encoding=_fs_encoding, errors='strict') |
460 |
+ svn_dirname = _unicode_encode('.svn', |
461 |
+ encoding=_fs_encoding, errors='strict') |
462 |
suite = unittest.TestSuite() |
463 |
basedir = os.path.dirname(os.path.realpath(__file__)) |
464 |
testDirs = [] |
465 |
@@ -19,8 +26,14 @@ |
466 |
# I was tired of adding dirs to the list, so now we add __test__ |
467 |
# to each dir we want tested. |
468 |
for root, dirs, files in os.walk(basedir): |
469 |
- if ".svn" in dirs: |
470 |
- dirs.remove('.svn') |
471 |
+ if svn_dirname in dirs: |
472 |
+ dirs.remove(svn_dirname) |
473 |
+ try: |
474 |
+ root = _unicode_decode(root, |
475 |
+ encoding=_fs_encoding, errors='strict') |
476 |
+ except UnicodeDecodeError: |
477 |
+ continue |
478 |
+ |
479 |
if TEST_FILE in files: |
480 |
testDirs.append(root) |
481 |
|
482 |
|
483 |
Modified: main/branches/prefix/pym/portage/tests/xpak/test_decodeint.py |
484 |
=================================================================== |
485 |
--- main/branches/prefix/pym/portage/tests/xpak/test_decodeint.py 2009-08-24 09:27:07 UTC (rev 14137) |
486 |
+++ main/branches/prefix/pym/portage/tests/xpak/test_decodeint.py 2009-08-24 09:28:51 UTC (rev 14138) |
487 |
@@ -12,3 +12,6 @@ |
488 |
|
489 |
for n in xrange(1000): |
490 |
self.assertEqual(decodeint(encodeint(n)), n) |
491 |
+ |
492 |
+ for n in (2 ** 32 - 1,): |
493 |
+ self.assertEqual(decodeint(encodeint(n)), n) |
494 |
|
495 |
Modified: main/branches/prefix/pym/portage/update.py |
496 |
=================================================================== |
497 |
--- main/branches/prefix/pym/portage/update.py 2009-08-24 09:27:07 UTC (rev 14137) |
498 |
+++ main/branches/prefix/pym/portage/update.py 2009-08-24 09:28:51 UTC (rev 14138) |
499 |
@@ -2,8 +2,16 @@ |
500 |
# Distributed under the terms of the GNU General Public License v2 |
501 |
# $Id$ |
502 |
|
503 |
-import errno, os, re, sys |
504 |
+import codecs |
505 |
+import errno |
506 |
+import re |
507 |
+import sys |
508 |
|
509 |
+from portage import os |
510 |
+from portage import _content_encoding |
511 |
+from portage import _fs_encoding |
512 |
+from portage import _unicode_decode |
513 |
+from portage import _unicode_encode |
514 |
import portage |
515 |
portage.proxy.lazyimport.lazyimport(globals(), |
516 |
'portage.dep:dep_getkey,get_operator,isvalidatom,isjustname,remove_slot', |
517 |
@@ -12,7 +20,7 @@ |
518 |
'portage.versions:ververify' |
519 |
) |
520 |
|
521 |
-from portage.const import USER_CONFIG_PATH, WORLD_FILE |
522 |
+from portage.const import USER_CONFIG_PATH |
523 |
from portage.exception import DirectoryNotFound, PortageException |
524 |
from portage.localization import _ |
525 |
|
526 |
@@ -63,9 +71,10 @@ |
527 |
mydata = {} |
528 |
for myfile in [f for f in os.listdir(dbdir) if f not in ignored_dbentries]: |
529 |
file_path = os.path.join(dbdir, myfile) |
530 |
- f = open(file_path, "r") |
531 |
- mydata[myfile] = f.read() |
532 |
- f.close() |
533 |
+ mydata[myfile] = codecs.open(_unicode_encode(file_path, |
534 |
+ encoding=_fs_encoding, errors='strict'), |
535 |
+ mode='r', encoding=_content_encoding, |
536 |
+ errors='replace').read() |
537 |
updated_items = update_dbentries(update_iter, mydata) |
538 |
for myfile, mycontent in updated_items.iteritems(): |
539 |
file_path = os.path.join(dbdir, myfile) |
540 |
@@ -100,9 +109,9 @@ |
541 |
mystat = os.stat(file_path) |
542 |
if file_path not in prev_mtimes or \ |
543 |
long(prev_mtimes[file_path]) != long(mystat.st_mtime): |
544 |
- f = open(file_path) |
545 |
- content = f.read() |
546 |
- f.close() |
547 |
+ content = codecs.open(_unicode_encode(file_path, |
548 |
+ encoding=_fs_encoding, errors='strict'), |
549 |
+ mode='r', encoding=_content_encoding, errors='replace').read() |
550 |
update_data.append((file_path, mystat, content)) |
551 |
return update_data |
552 |
|
553 |
@@ -142,17 +151,12 @@ |
554 |
return myupd, errors |
555 |
|
556 |
def update_config_files(config_root, protect, protect_mask, update_iter): |
557 |
- """Perform global updates on /etc/portage/package.* and the world file. |
558 |
+ """Perform global updates on /etc/portage/package.*. |
559 |
config_root - location of files to update |
560 |
protect - list of paths from CONFIG_PROTECT |
561 |
protect_mask - list of paths from CONFIG_PROTECT_MASK |
562 |
update_iter - list of update commands as returned from parse_updates()""" |
563 |
|
564 |
- if isinstance(config_root, unicode): |
565 |
- # Avoid UnicodeDecodeError raised from |
566 |
- # os.path.join when called by os.walk. |
567 |
- config_root = config_root.encode('utf_8', 'replace') |
568 |
- |
569 |
config_root = normalize_path(config_root) |
570 |
update_files = {} |
571 |
file_contents = {} |
572 |
@@ -166,9 +170,20 @@ |
573 |
if os.path.isdir(config_file): |
574 |
for parent, dirs, files in os.walk(config_file): |
575 |
for y in dirs: |
576 |
+ try: |
577 |
+ y = _unicode_decode(y, |
578 |
+ encoding=_fs_encoding, errors='strict') |
579 |
+ except UnicodeDecodeError: |
580 |
+ dirs.remove(y) |
581 |
+ continue |
582 |
if y.startswith("."): |
583 |
dirs.remove(y) |
584 |
for y in files: |
585 |
+ try: |
586 |
+ y = _unicode_decode(y, |
587 |
+ encoding=_fs_encoding, errors='strict') |
588 |
+ except UnicodeDecodeError: |
589 |
+ continue |
590 |
if y.startswith("."): |
591 |
continue |
592 |
recursivefiles.append( |
593 |
@@ -178,9 +193,11 @@ |
594 |
myxfiles = recursivefiles |
595 |
for x in myxfiles: |
596 |
try: |
597 |
- myfile = open(os.path.join(abs_user_config, x),"r") |
598 |
- file_contents[x] = myfile.readlines() |
599 |
- myfile.close() |
600 |
+ file_contents[x] = codecs.open( |
601 |
+ _unicode_encode(os.path.join(abs_user_config, x), |
602 |
+ encoding=_fs_encoding, errors='strict'), |
603 |
+ mode='r', encoding=_content_encoding, |
604 |
+ errors='replace').readlines() |
605 |
except IOError: |
606 |
if file_contents.has_key(x): |
607 |
del file_contents[x] |
608 |
|
609 |
Modified: main/branches/prefix/pym/portage/util.py |
610 |
=================================================================== |
611 |
--- main/branches/prefix/pym/portage/util.py 2009-08-24 09:27:07 UTC (rev 14137) |
612 |
+++ main/branches/prefix/pym/portage/util.py 2009-08-24 09:28:51 UTC (rev 14138) |
613 |
@@ -14,7 +14,6 @@ |
614 |
|
615 |
import commands |
616 |
import codecs |
617 |
-import os |
618 |
import errno |
619 |
import logging |
620 |
import shlex |
621 |
@@ -24,7 +23,8 @@ |
622 |
|
623 |
import portage |
624 |
from portage import os |
625 |
-from portage import _merge_encoding |
626 |
+from portage import _content_encoding |
627 |
+from portage import _fs_encoding |
628 |
from portage import _os_merge |
629 |
from portage import _unicode_encode |
630 |
from portage import _unicode_decode |
631 |
@@ -328,8 +328,9 @@ |
632 |
os.path.join(myfilename, f), recursive)) |
633 |
else: |
634 |
try: |
635 |
- myfile = codecs.open(_unicode_encode(myfilename), |
636 |
- mode='r', encoding='utf_8', errors='replace') |
637 |
+ myfile = codecs.open(_unicode_encode(myfilename, |
638 |
+ encoding=_fs_encoding, errors='strict'), |
639 |
+ mode='r', encoding=_content_encoding, errors='replace') |
640 |
mylines = myfile.readlines() |
641 |
myfile.close() |
642 |
except IOError, e: |
643 |
@@ -395,10 +396,12 @@ |
644 |
# NOTE: shex doesn't seem to support unicode objects |
645 |
# (produces spurious \0 characters with python-2.6.2) |
646 |
if sys.hexversion < 0x3000000: |
647 |
- content = open(_unicode_encode(mycfg), 'rb').read() |
648 |
+ content = open(_unicode_encode(mycfg, |
649 |
+ encoding=_fs_encoding, errors='strict'), 'rb').read() |
650 |
else: |
651 |
- content = open(_unicode_encode(mycfg), mode='r', |
652 |
- encoding='utf_8', errors='replace').read() |
653 |
+ content = open(_unicode_encode(mycfg, |
654 |
+ encoding=_fs_encoding, errors='strict'), mode='r', |
655 |
+ encoding=_content_encoding, errors='replace').read() |
656 |
if content and content[-1] != '\n': |
657 |
content += '\n' |
658 |
except IOError, e: |
659 |
@@ -590,7 +593,8 @@ |
660 |
return default |
661 |
data = None |
662 |
try: |
663 |
- myf = open(_unicode_encode(filename), 'rb') |
664 |
+ myf = open(_unicode_encode(filename, |
665 |
+ encoding=_fs_encoding, errors='strict'), 'rb') |
666 |
mypickle = pickle.Unpickler(myf) |
667 |
data = mypickle.load() |
668 |
myf.close() |
669 |
@@ -905,7 +909,7 @@ |
670 |
open_func = open |
671 |
else: |
672 |
open_func = codecs.open |
673 |
- kargs.setdefault('encoding', 'utf_8') |
674 |
+ kargs.setdefault('encoding', _content_encoding) |
675 |
kargs.setdefault('errors', 'replace') |
676 |
|
677 |
if follow_links: |
678 |
@@ -914,7 +918,9 @@ |
679 |
tmp_name = "%s.%i" % (canonical_path, os.getpid()) |
680 |
try: |
681 |
object.__setattr__(self, '_file', |
682 |
- open_func(_unicode_encode(tmp_name), mode=mode, **kargs)) |
683 |
+ open_func(_unicode_encode(tmp_name, |
684 |
+ encoding=_fs_encoding, errors='strict'), |
685 |
+ mode=mode, **kargs)) |
686 |
return |
687 |
except IOError, e: |
688 |
if canonical_path == filename: |
689 |
@@ -926,7 +932,9 @@ |
690 |
object.__setattr__(self, '_real_name', filename) |
691 |
tmp_name = "%s.%i" % (filename, os.getpid()) |
692 |
object.__setattr__(self, '_file', |
693 |
- open_func(_unicode_encode(tmp_name), mode=mode, **kargs)) |
694 |
+ open_func(_unicode_encode(tmp_name, |
695 |
+ encoding=_fs_encoding, errors='strict'), |
696 |
+ mode=mode, **kargs)) |
697 |
|
698 |
def _get_target(self): |
699 |
return object.__getattribute__(self, '_file') |
700 |
|
701 |
Modified: main/branches/prefix/pym/portage/xpak.py |
702 |
=================================================================== |
703 |
--- main/branches/prefix/pym/portage/xpak.py 2009-08-24 09:27:07 UTC (rev 14137) |
704 |
+++ main/branches/prefix/pym/portage/xpak.py 2009-08-24 09:28:51 UTC (rev 14138) |
705 |
@@ -16,29 +16,50 @@ |
706 |
# (integer) == encodeint(integer) ===> 4 characters (big-endian copy) |
707 |
# '+' means concatenate the fields ===> All chunks are strings |
708 |
|
709 |
-import sys,os,shutil,errno |
710 |
-from stat import * |
711 |
+import array |
712 |
+import errno |
713 |
+import shutil |
714 |
|
715 |
-def addtolist(mylist,curdir): |
716 |
+from portage import os |
717 |
+from portage import normalize_path |
718 |
+from portage import _fs_encoding |
719 |
+from portage import _unicode_decode |
720 |
+from portage import _unicode_encode |
721 |
+ |
722 |
+def addtolist(mylist, curdir): |
723 |
"""(list, dir) --- Takes an array(list) and appends all files from dir down |
724 |
the directory tree. Returns nothing. list is modified.""" |
725 |
- for x in os.listdir("."): |
726 |
- if os.path.isdir(x): |
727 |
- os.chdir(x) |
728 |
- addtolist(mylist,curdir+x+"/") |
729 |
- os.chdir("..") |
730 |
- else: |
731 |
- if curdir+x not in mylist: |
732 |
- mylist.append(curdir+x) |
733 |
+ curdir = normalize_path(_unicode_decode(curdir, |
734 |
+ encoding=_fs_encoding, errors='strict')) |
735 |
+ for parent, dirs, files in os.walk(curdir): |
736 |
|
737 |
+ parent = _unicode_decode(parent, |
738 |
+ encoding=_fs_encoding, errors='strict') |
739 |
+ if parent != curdir: |
740 |
+ mylist.append(parent[len(curdir) + 1:] + os.sep) |
741 |
+ |
742 |
+ for x in dirs: |
743 |
+ try: |
744 |
+ _unicode_decode(x, encoding=_fs_encoding, errors='strict') |
745 |
+ except UnicodeDecodeError: |
746 |
+ dirs.remove(x) |
747 |
+ |
748 |
+ for x in files: |
749 |
+ try: |
750 |
+ x = _unicode_decode(x, encoding=_fs_encoding, errors='strict') |
751 |
+ except UnicodeDecodeError: |
752 |
+ continue |
753 |
+ mylist.append(os.path.join(parent, x)[len(curdir) + 1:]) |
754 |
+ |
755 |
def encodeint(myint): |
756 |
"""Takes a 4 byte integer and converts it into a string of 4 characters. |
757 |
Returns the characters in a string.""" |
758 |
- part1=chr((myint >> 24 ) & 0x000000ff) |
759 |
- part2=chr((myint >> 16 ) & 0x000000ff) |
760 |
- part3=chr((myint >> 8 ) & 0x000000ff) |
761 |
- part4=chr(myint & 0x000000ff) |
762 |
- return part1+part2+part3+part4 |
763 |
+ a = array.array('B') |
764 |
+ a.append((myint >> 24 ) & 0xff) |
765 |
+ a.append((myint >> 16 ) & 0xff) |
766 |
+ a.append((myint >> 8 ) & 0xff) |
767 |
+ a.append(myint & 0xff) |
768 |
+ return a.tostring() |
769 |
|
770 |
def decodeint(mystring): |
771 |
"""Takes a 4 byte string and converts it into a 4 byte integer. |
772 |
@@ -54,28 +75,20 @@ |
773 |
"""(rootdir,outfile) -- creates an xpak segment of the directory 'rootdir' |
774 |
and under the name 'outfile' if it is specified. Otherwise it returns the |
775 |
xpak segment.""" |
776 |
- try: |
777 |
- origdir=os.getcwd() |
778 |
- except SystemExit, e: |
779 |
- raise |
780 |
- except: |
781 |
- os.chdir("/") |
782 |
- origdir="/" |
783 |
- os.chdir(rootdir) |
784 |
+ |
785 |
mylist=[] |
786 |
|
787 |
- addtolist(mylist,"") |
788 |
+ addtolist(mylist, rootdir) |
789 |
mylist.sort() |
790 |
mydata = {} |
791 |
for x in mylist: |
792 |
- a = open(x, 'rb') |
793 |
- mydata[x] = a.read() |
794 |
- a.close() |
795 |
- os.chdir(origdir) |
796 |
+ x = _unicode_encode(x, encoding=_fs_encoding, errors='strict') |
797 |
+ mydata[x] = open(os.path.join(rootdir, x), 'rb').read() |
798 |
|
799 |
xpak_segment = xpak_mem(mydata) |
800 |
if outfile: |
801 |
- outf = open(outfile, 'wb') |
802 |
+ outf = open(_unicode_encode(outfile, |
803 |
+ encoding=_fs_encoding, errors='strict'), 'wb') |
804 |
outf.write(xpak_segment) |
805 |
outf.close() |
806 |
else: |
807 |
@@ -83,9 +96,9 @@ |
808 |
|
809 |
def xpak_mem(mydata): |
810 |
"""Create an xpack segement from a map object.""" |
811 |
- indexglob="" |
812 |
+ indexglob = _unicode_encode('') |
813 |
indexpos=0 |
814 |
- dataglob="" |
815 |
+ dataglob = _unicode_encode('') |
816 |
datapos=0 |
817 |
for x, newglob in mydata.iteritems(): |
818 |
mydatasize=len(newglob) |
819 |
@@ -93,18 +106,21 @@ |
820 |
indexpos=indexpos+4+len(x)+4+4 |
821 |
dataglob=dataglob+newglob |
822 |
datapos=datapos+mydatasize |
823 |
- return "XPAKPACK" \ |
824 |
+ return _unicode_encode('XPAKPACK') \ |
825 |
+ encodeint(len(indexglob)) \ |
826 |
+ encodeint(len(dataglob)) \ |
827 |
+ indexglob \ |
828 |
+ dataglob \ |
829 |
- + "XPAKSTOP" |
830 |
+ + _unicode_encode('XPAKSTOP') |
831 |
|
832 |
def xsplit(infile): |
833 |
"""(infile) -- Splits the infile into two files. |
834 |
'infile.index' contains the index segment. |
835 |
'infile.dat' contails the data segment.""" |
836 |
- myfile = open(infile, 'rb') |
837 |
+ infile = _unicode_decode(infile, |
838 |
+ encoding=_fs_encoding, errors='strict') |
839 |
+ myfile = open(_unicode_encode(infile, |
840 |
+ encoding=_fs_encoding, errors='strict'), 'rb') |
841 |
mydat=myfile.read() |
842 |
myfile.close() |
843 |
|
844 |
@@ -112,27 +128,30 @@ |
845 |
if not splits: |
846 |
return False |
847 |
|
848 |
- myfile = open(infile + '.index', 'wb') |
849 |
+ myfile = open(_unicode_encode(infile + '.index', |
850 |
+ encoding=_fs_encoding, errors='strict'), 'wb') |
851 |
myfile.write(splits[0]) |
852 |
myfile.close() |
853 |
- myfile = open(infile + '.dat', 'wb') |
854 |
+ myfile = open(_unicode_encode(infile + '.dat', |
855 |
+ encoding=_fs_encoding, errors='strict'), 'wb') |
856 |
myfile.write(splits[1]) |
857 |
myfile.close() |
858 |
return True |
859 |
|
860 |
def xsplit_mem(mydat): |
861 |
- if mydat[0:8]!="XPAKPACK": |
862 |
+ if mydat[0:8] != _unicode_encode('XPAKPACK'): |
863 |
return None |
864 |
- if mydat[-8:]!="XPAKSTOP": |
865 |
+ if mydat[-8:] != _unicode_encode('XPAKSTOP'): |
866 |
return None |
867 |
indexsize=decodeint(mydat[8:12]) |
868 |
return (mydat[16:indexsize+16], mydat[indexsize+16:-8]) |
869 |
|
870 |
def getindex(infile): |
871 |
"""(infile) -- grabs the index segment from the infile and returns it.""" |
872 |
- myfile = open(infile, 'rb') |
873 |
+ myfile = open(_unicode_encode(infile, |
874 |
+ encoding=_fs_encoding, errors='strict'), 'rb') |
875 |
myheader=myfile.read(16) |
876 |
- if myheader[0:8]!="XPAKPACK": |
877 |
+ if myheader[0:8] != _unicode_encode('XPAKPACK'): |
878 |
myfile.close() |
879 |
return |
880 |
indexsize=decodeint(myheader[8:12]) |
881 |
@@ -143,9 +162,10 @@ |
882 |
def getboth(infile): |
883 |
"""(infile) -- grabs the index and data segments from the infile. |
884 |
Returns an array [indexSegment,dataSegment]""" |
885 |
- myfile = open(infile, 'rb') |
886 |
+ myfile = open(_unicode_encode(infile, |
887 |
+ encoding=_fs_encoding, errors='strict'), 'rb') |
888 |
myheader=myfile.read(16) |
889 |
- if myheader[0:8]!="XPAKPACK": |
890 |
+ if myheader[0:8] != _unicode_encode('XPAKPACK'): |
891 |
myfile.close() |
892 |
return |
893 |
indexsize=decodeint(myheader[8:12]) |
894 |
@@ -217,7 +237,8 @@ |
895 |
if dirname: |
896 |
if not os.path.exists(dirname): |
897 |
os.makedirs(dirname) |
898 |
- mydat = open(myname, 'wb') |
899 |
+ mydat = open(_unicode_encode(myname, |
900 |
+ encoding=_fs_encoding, errors='strict'), 'wb') |
901 |
mydat.write(mydata[datapos:datapos+datalen]) |
902 |
mydat.close() |
903 |
startpos=startpos+namelen+12 |
904 |
@@ -227,7 +248,7 @@ |
905 |
def __init__(self,myfile): |
906 |
self.file=myfile |
907 |
self.filestat=None |
908 |
- self.index="" |
909 |
+ self.index = _unicode_encode('') |
910 |
self.infosize=0 |
911 |
self.xpaksize=0 |
912 |
self.indexsize=None |
913 |
@@ -262,12 +283,13 @@ |
914 |
|
915 |
def recompose_mem(self, xpdata): |
916 |
self.scan() # Don't care about condition... We'll rewrite the data anyway. |
917 |
- myfile = open(self.file, 'ab+') |
918 |
+ myfile = open(_unicode_encode(self.file, |
919 |
+ encoding=_fs_encoding, errors='strict'), 'ab+') |
920 |
if not myfile: |
921 |
raise IOError |
922 |
myfile.seek(-self.xpaksize,2) # 0,2 or -0,2 just mean EOF. |
923 |
myfile.truncate() |
924 |
- myfile.write(xpdata+encodeint(len(xpdata))+"STOP") |
925 |
+ myfile.write(xpdata+encodeint(len(xpdata)) + _unicode_encode('STOP')) |
926 |
myfile.flush() |
927 |
myfile.close() |
928 |
return 1 |
929 |
@@ -292,28 +314,30 @@ |
930 |
mystat=os.stat(self.file) |
931 |
if self.filestat: |
932 |
changed=0 |
933 |
- for x in [ST_SIZE, ST_MTIME, ST_CTIME]: |
934 |
- if mystat[x] != self.filestat[x]: |
935 |
- changed=1 |
936 |
+ if mystat.st_size != self.filestat.st_size \ |
937 |
+ or mystat.st_mtime != self.filestat.st_mtime \ |
938 |
+ or mystat.st_ctime != self.filestat.st_ctime: |
939 |
+ changed = True |
940 |
if not changed: |
941 |
return 1 |
942 |
self.filestat=mystat |
943 |
- a = open(self.file, 'rb') |
944 |
+ a = open(_unicode_encode(self.file, |
945 |
+ encoding=_fs_encoding, errors='strict'), 'rb') |
946 |
a.seek(-16,2) |
947 |
trailer=a.read() |
948 |
self.infosize=0 |
949 |
self.xpaksize=0 |
950 |
- if trailer[-4:]!="STOP": |
951 |
+ if trailer[-4:] != _unicode_encode('STOP'): |
952 |
a.close() |
953 |
return 0 |
954 |
- if trailer[0:8]!="XPAKSTOP": |
955 |
+ if trailer[0:8] != _unicode_encode('XPAKSTOP'): |
956 |
a.close() |
957 |
return 0 |
958 |
self.infosize=decodeint(trailer[8:12]) |
959 |
self.xpaksize=self.infosize+8 |
960 |
a.seek(-(self.xpaksize),2) |
961 |
header=a.read(16) |
962 |
- if header[0:8]!="XPAKPACK": |
963 |
+ if header[0:8] != _unicode_encode('XPAKPACK'): |
964 |
a.close() |
965 |
return 0 |
966 |
self.indexsize=decodeint(header[8:12]) |
967 |
@@ -341,7 +365,8 @@ |
968 |
myresult=searchindex(self.index,myfile) |
969 |
if not myresult: |
970 |
return mydefault |
971 |
- a = open(self.file, 'rb') |
972 |
+ a = open(_unicode_encode(self.file, |
973 |
+ encoding=_fs_encoding, errors='strict'), 'rb') |
974 |
a.seek(self.datapos+myresult[0],0) |
975 |
myreturn=a.read(myresult[1]) |
976 |
a.close() |
977 |
@@ -365,7 +390,8 @@ |
978 |
except: |
979 |
os.chdir("/") |
980 |
origdir="/" |
981 |
- a = open(self.file, 'rb') |
982 |
+ a = open(_unicode_encode(self.file, |
983 |
+ encoding=_fs_encoding, errors='strict'), 'rb') |
984 |
if not os.path.exists(mydest): |
985 |
os.makedirs(mydest) |
986 |
os.chdir(mydest) |
987 |
@@ -379,7 +405,8 @@ |
988 |
if dirname: |
989 |
if not os.path.exists(dirname): |
990 |
os.makedirs(dirname) |
991 |
- mydat = open(myname, 'wb') |
992 |
+ mydat = open(_unicode_encode(myname, |
993 |
+ encoding=_fs_encoding, errors='strict'), 'wb') |
994 |
a.seek(self.datapos+datapos) |
995 |
mydat.write(a.read(datalen)) |
996 |
mydat.close() |
997 |
@@ -392,7 +419,8 @@ |
998 |
"""Returns all the files from the dataSegment as a map object.""" |
999 |
if not self.scan(): |
1000 |
return 0 |
1001 |
- a = open(self.file, 'rb') |
1002 |
+ a = open(_unicode_encode(self.file, |
1003 |
+ encoding=_fs_encoding, errors='strict'), 'rb') |
1004 |
mydata = {} |
1005 |
startpos=0 |
1006 |
while ((startpos+8)<self.indexsize): |
1007 |
@@ -411,7 +439,8 @@ |
1008 |
if not self.scan(): |
1009 |
return None |
1010 |
|
1011 |
- a = open(self.file, 'rb') |
1012 |
+ a = open(_unicode_encode(self.file, |
1013 |
+ encoding=_fs_encoding, errors='strict'), 'rb') |
1014 |
a.seek(self.datapos) |
1015 |
mydata =a.read(self.datasize) |
1016 |
a.close() |