1 |
commit: 807324e57f63918e0ee64837608caaa28a0e53a6 |
2 |
Author: Brian Dolbec <dolsen <AT> gentoo <DOT> org> |
3 |
AuthorDate: Fri May 30 18:36:11 2014 +0000 |
4 |
Commit: Brian Dolbec <brian.dolbec <AT> gmail <DOT> com> |
5 |
CommitDate: Fri May 30 19:22:51 2014 +0000 |
6 |
URL: http://git.overlays.gentoo.org/gitweb/?p=proj/portage.git;a=commit;h=807324e5 |
7 |
|
8 |
Repoman: Create a new QATracker class |
9 |
|
10 |
Make use of the new QATracker class to track all fails. |
11 |
Use the length of each tracker for stats count. |
12 |
Only store trackers if they occurred. |
13 |
QATracker roughed in for tracking warnings as well as errors. |
14 |
|
15 |
--- |
16 |
pym/repoman/checks/ebuilds/thirdpartymirrors.py | 11 +- |
17 |
pym/repoman/main.py | 261 +++++++++--------------- |
18 |
pym/repoman/qa_data.py | 16 +- |
19 |
pym/repoman/qa_tracker.py | 46 +++++ |
20 |
pym/repoman/vcs/vcsstatus.py | 23 +-- |
21 |
5 files changed, 161 insertions(+), 196 deletions(-) |
22 |
|
23 |
diff --git a/pym/repoman/checks/ebuilds/thirdpartymirrors.py b/pym/repoman/checks/ebuilds/thirdpartymirrors.py |
24 |
index 976a62c..cce61f6 100644 |
25 |
--- a/pym/repoman/checks/ebuilds/thirdpartymirrors.py |
26 |
+++ b/pym/repoman/checks/ebuilds/thirdpartymirrors.py |
27 |
@@ -4,7 +4,7 @@ import portage |
28 |
|
29 |
class ThirdPartyMirrors(object): |
30 |
|
31 |
- def __init__(self, repoman_settings): |
32 |
+ def __init__(self, repoman_settings, qatracker): |
33 |
# Build a regex from thirdpartymirrors for the SRC_URI.mirror check. |
34 |
self.thirdpartymirrors = {} |
35 |
for k, v in repoman_settings.thirdpartymirrors().items(): |
36 |
@@ -12,14 +12,10 @@ class ThirdPartyMirrors(object): |
37 |
if not v.endswith("/"): |
38 |
v += "/" |
39 |
self.thirdpartymirrors[v] = k |
40 |
- self.stats = 0 |
41 |
- self.fails = [] |
42 |
+ self.qatracker = qatracker |
43 |
|
44 |
|
45 |
def check(self, myaux, relative_path): |
46 |
- # reset our stats in case this is a repeat run |
47 |
- self.stats = 0 |
48 |
- self.fails = [] |
49 |
# Check that URIs don't reference a server from thirdpartymirrors. |
50 |
for uri in portage.dep.use_reduce( |
51 |
myaux["SRC_URI"], matchall=True, is_src_uri=True, |
52 |
@@ -33,8 +29,7 @@ class ThirdPartyMirrors(object): |
53 |
continue |
54 |
|
55 |
new_uri = "mirror://%s/%s" % (mirror_alias, uri[len(mirror):]) |
56 |
- self.stats += 1 |
57 |
- self.fails.append( |
58 |
+ self.qatracker.add_error("SRC_URI.mirror", |
59 |
"%s: '%s' found in thirdpartymirrors, use '%s'" % |
60 |
(relative_path, mirror, new_uri)) |
61 |
return |
62 |
|
63 |
diff --git a/pym/repoman/main.py b/pym/repoman/main.py |
64 |
index f48c8ba..3a1ff7a 100755 |
65 |
--- a/pym/repoman/main.py |
66 |
+++ b/pym/repoman/main.py |
67 |
@@ -77,6 +77,7 @@ from repoman.profile import check_profiles, dev_keywords, setup_profile |
68 |
from repoman.qa_data import (format_qa_output, format_qa_output_column, qahelp, |
69 |
qawarnings, qacats, no_exec, allvars, max_desc_len, missingvars, |
70 |
ruby_deprecated, suspect_virtual, suspect_rdepend, valid_restrict) |
71 |
+from qa_tracker import QATracker |
72 |
from repoman.repos import has_global_mask, RepoSettings, repo_metadata |
73 |
from repoman.scan import Changes, scan |
74 |
from repoman._subprocess import repoman_popen, repoman_getstatusoutput |
75 |
@@ -258,12 +259,7 @@ scanlist = scan(repolevel, reposplit, startdir, categories, repo_settings) |
76 |
|
77 |
dev_keywords = dev_keywords(profiles) |
78 |
|
79 |
-stats = {} |
80 |
-fails = {} |
81 |
- |
82 |
-for x in qacats: |
83 |
- stats[x] = 0 |
84 |
- fails[x] = [] |
85 |
+qatracker = QATracker() |
86 |
|
87 |
#################### |
88 |
|
89 |
@@ -348,8 +344,7 @@ for xpkg in effective_scanlist: |
90 |
repoman_settings['O'] = checkdir |
91 |
repoman_settings['PORTAGE_QUIET'] = '1' |
92 |
if not portage.digestcheck([], repoman_settings, strict=1): |
93 |
- stats["manifest.bad"] += 1 |
94 |
- fails["manifest.bad"].append(os.path.join(xpkg, 'Manifest')) |
95 |
+ qatracker.add_error("manifest.bad", os.path.join(xpkg, 'Manifest')) |
96 |
repoman_settings.pop('PORTAGE_QUIET', None) |
97 |
|
98 |
if options.mode == 'manifest-check': |
99 |
@@ -368,8 +363,7 @@ for xpkg in effective_scanlist: |
100 |
os.stat(os.path.join(checkdir, y)).st_mode) & 0o111 |
101 |
|
102 |
if file_is_executable: |
103 |
- stats["file.executable"] += 1 |
104 |
- fails["file.executable"].append(os.path.join(checkdir, y)) |
105 |
+ qatracker.add_error("file.executable", os.path.join(checkdir, y)) |
106 |
if file_is_ebuild: |
107 |
pf = y[:-7] |
108 |
ebuildlist.append(pf) |
109 |
@@ -378,18 +372,15 @@ for xpkg in effective_scanlist: |
110 |
myaux = dict(zip(allvars, portdb.aux_get(cpv, allvars))) |
111 |
except KeyError: |
112 |
allvalid = False |
113 |
- stats["ebuild.syntax"] += 1 |
114 |
- fails["ebuild.syntax"].append(os.path.join(xpkg, y)) |
115 |
+ qatracker.add_error("ebuild.syntax", os.path.join(xpkg, y)) |
116 |
continue |
117 |
except IOError: |
118 |
allvalid = False |
119 |
- stats["ebuild.output"] += 1 |
120 |
- fails["ebuild.output"].append(os.path.join(xpkg, y)) |
121 |
+ qatracker.add_error("ebuild.output", os.path.join(xpkg, y)) |
122 |
continue |
123 |
if not portage.eapi_is_supported(myaux["EAPI"]): |
124 |
allvalid = False |
125 |
- stats["EAPI.unsupported"] += 1 |
126 |
- fails["EAPI.unsupported"].append(os.path.join(xpkg, y)) |
127 |
+ qatracker.add_error("EAPI.unsupported", os.path.join(xpkg, y)) |
128 |
continue |
129 |
pkgs[pf] = Package( |
130 |
cpv=cpv, metadata=myaux, root_config=root_config, |
131 |
@@ -421,8 +412,7 @@ for xpkg in effective_scanlist: |
132 |
# prohibited characters). See bug #406877. |
133 |
index = -1 |
134 |
if index != -1: |
135 |
- stats["file.name"] += 1 |
136 |
- fails["file.name"].append( |
137 |
+ qatracker.add_error("file.name", |
138 |
"%s/%s: char '%s'" % (checkdir, y, y[index])) |
139 |
|
140 |
if not (y in ("ChangeLog", "metadata.xml") or y.endswith(".ebuild")): |
141 |
@@ -438,25 +428,21 @@ for xpkg in effective_scanlist: |
142 |
for l in f: |
143 |
line += 1 |
144 |
except UnicodeDecodeError as ue: |
145 |
- stats["file.UTF8"] += 1 |
146 |
s = ue.object[:ue.start] |
147 |
l2 = s.count("\n") |
148 |
line += l2 |
149 |
if l2 != 0: |
150 |
s = s[s.rfind("\n") + 1:] |
151 |
- fails["file.UTF8"].append( |
152 |
+ qatracker.add_error("file.UTF8", |
153 |
"%s/%s: line %i, just after: '%s'" % (checkdir, y, line, s)) |
154 |
finally: |
155 |
if f is not None: |
156 |
f.close() |
157 |
|
158 |
############### |
159 |
- status_check = VCSStatus(vcs_settings, checkdir, checkdir_relative, xpkg) |
160 |
+ status_check = VCSStatus(vcs_settings, checkdir, checkdir_relative, xpkg, qatracker) |
161 |
status_check.check(check_ebuild_notadded) |
162 |
eadded.extend(status_check.eadded) |
163 |
- for key in list(status_check.stats): |
164 |
- stats[key] += status_check.stats[key] |
165 |
- fails[key].extend(status_check.fails[key]) |
166 |
############### |
167 |
|
168 |
mf = repoman_settings.repositories.get_repo_for_location( |
169 |
@@ -478,8 +464,7 @@ for xpkg in effective_scanlist: |
170 |
# This will be reported as an "ebuild.syntax" error. |
171 |
pass |
172 |
else: |
173 |
- stats["SRC_URI.syntax"] += 1 |
174 |
- fails["SRC_URI.syntax"].append( |
175 |
+ qatracker.add_error("SRC_URI.syntax", |
176 |
"%s.ebuild SRC_URI: %s" % (mykey, e)) |
177 |
del fetchlist_dict |
178 |
if not src_uri_error: |
179 |
@@ -491,12 +476,10 @@ for xpkg in effective_scanlist: |
180 |
myfiles_all = set(myfiles_all) |
181 |
for entry in mydigests: |
182 |
if entry not in myfiles_all: |
183 |
- stats["digest.unused"] += 1 |
184 |
- fails["digest.unused"].append(checkdir + "::" + entry) |
185 |
+ qatracker.add_error("digest.unused", checkdir + "::" + entry) |
186 |
for entry in myfiles_all: |
187 |
if entry not in mydigests: |
188 |
- stats["digest.missing"] += 1 |
189 |
- fails["digest.missing"].append(checkdir + "::" + entry) |
190 |
+ qatracker.add_error("digest.missing", checkdir + "::" + entry) |
191 |
del myfiles_all |
192 |
|
193 |
if os.path.exists(checkdir + "/files"): |
194 |
@@ -529,12 +512,10 @@ for xpkg in effective_scanlist: |
195 |
# File size between 20 KiB and 60 KiB causes a warning, |
196 |
# while file size over 60 KiB causes an error. |
197 |
elif mystat.st_size > 61440: |
198 |
- stats["file.size.fatal"] += 1 |
199 |
- fails["file.size.fatal"].append( |
200 |
+ qatracker.add_error("file.size.fatal", |
201 |
"(%d KiB) %s/files/%s" % (mystat.st_size // 1024, xpkg, y)) |
202 |
elif mystat.st_size > 20480: |
203 |
- stats["file.size"] += 1 |
204 |
- fails["file.size"].append( |
205 |
+ qatracker.add_error("file.size", |
206 |
"(%d KiB) %s/files/%s" % (mystat.st_size // 1024, xpkg, y)) |
207 |
|
208 |
index = repo_settings.repo_config.find_invalid_path_char(y) |
209 |
@@ -547,20 +528,17 @@ for xpkg in effective_scanlist: |
210 |
# prohibited characters). See bug #406877. |
211 |
index = -1 |
212 |
if index != -1: |
213 |
- stats["file.name"] += 1 |
214 |
- fails["file.name"].append( |
215 |
+ qatracker.add_error("file.name", |
216 |
"%s/files/%s: char '%s'" % (checkdir, y, y[index])) |
217 |
del mydigests |
218 |
|
219 |
if check_changelog and "ChangeLog" not in checkdirlist: |
220 |
- stats["changelog.missing"] += 1 |
221 |
- fails["changelog.missing"].append(xpkg + "/ChangeLog") |
222 |
+ qatracker.add_error("changelog.missing", xpkg + "/ChangeLog") |
223 |
|
224 |
musedict = {} |
225 |
# metadata.xml file check |
226 |
if "metadata.xml" not in checkdirlist: |
227 |
- stats["metadata.missing"] += 1 |
228 |
- fails["metadata.missing"].append(xpkg + "/metadata.xml") |
229 |
+ qatracker.add_error("metadata.missing", xpkg + "/metadata.xml") |
230 |
# metadata.xml parse check |
231 |
else: |
232 |
metadata_bad = False |
233 |
@@ -576,8 +554,7 @@ for xpkg in effective_scanlist: |
234 |
parser=xml_parser) |
235 |
except (ExpatError, SyntaxError, EnvironmentError) as e: |
236 |
metadata_bad = True |
237 |
- stats["metadata.bad"] += 1 |
238 |
- fails["metadata.bad"].append("%s/metadata.xml: %s" % (xpkg, e)) |
239 |
+ qatracker.add_error("metadata.bad", "%s/metadata.xml: %s" % (xpkg, e)) |
240 |
del e |
241 |
else: |
242 |
if not hasattr(xml_parser, 'parser') or \ |
243 |
@@ -587,8 +564,7 @@ for xpkg in effective_scanlist: |
244 |
pass |
245 |
else: |
246 |
if "XML_DECLARATION" not in xml_info: |
247 |
- stats["metadata.bad"] += 1 |
248 |
- fails["metadata.bad"].append( |
249 |
+ qatracker.add_error("metadata.bad", |
250 |
"%s/metadata.xml: " |
251 |
"xml declaration is missing on first line, " |
252 |
"should be '%s'" % (xpkg, metadata_xml_declaration)) |
253 |
@@ -597,38 +573,34 @@ for xpkg in effective_scanlist: |
254 |
xml_info["XML_DECLARATION"] |
255 |
if xml_encoding is None or \ |
256 |
xml_encoding.upper() != metadata_xml_encoding: |
257 |
- stats["metadata.bad"] += 1 |
258 |
if xml_encoding is None: |
259 |
encoding_problem = "but it is undefined" |
260 |
else: |
261 |
encoding_problem = "not '%s'" % xml_encoding |
262 |
- fails["metadata.bad"].append( |
263 |
+ qatracker.add_error("metadata.bad", |
264 |
"%s/metadata.xml: " |
265 |
"xml declaration encoding should be '%s', %s" % |
266 |
(xpkg, metadata_xml_encoding, encoding_problem)) |
267 |
|
268 |
if "DOCTYPE" not in xml_info: |
269 |
metadata_bad = True |
270 |
- stats["metadata.bad"] += 1 |
271 |
- fails["metadata.bad"].append( |
272 |
+ qatracker.add_error("metadata.bad", |
273 |
"%s/metadata.xml: %s" % (xpkg, "DOCTYPE is missing")) |
274 |
else: |
275 |
doctype_name, doctype_system, doctype_pubid = \ |
276 |
xml_info["DOCTYPE"] |
277 |
if doctype_system != metadata_dtd_uri: |
278 |
- stats["metadata.bad"] += 1 |
279 |
if doctype_system is None: |
280 |
system_problem = "but it is undefined" |
281 |
else: |
282 |
system_problem = "not '%s'" % doctype_system |
283 |
- fails["metadata.bad"].append( |
284 |
+ qatracker.add_error("metadata.bad", |
285 |
"%s/metadata.xml: " |
286 |
"DOCTYPE: SYSTEM should refer to '%s', %s" % |
287 |
(xpkg, metadata_dtd_uri, system_problem)) |
288 |
|
289 |
if doctype_name != metadata_doctype_name: |
290 |
- stats["metadata.bad"] += 1 |
291 |
- fails["metadata.bad"].append( |
292 |
+ qatracker.add_error("metadata.bad", |
293 |
"%s/metadata.xml: " |
294 |
"DOCTYPE: name should be '%s', not '%s'" % |
295 |
(xpkg, metadata_doctype_name, doctype_name)) |
296 |
@@ -638,8 +610,8 @@ for xpkg in effective_scanlist: |
297 |
musedict = utilities.parse_metadata_use(_metadata_xml) |
298 |
except portage.exception.ParseError as e: |
299 |
metadata_bad = True |
300 |
- stats["metadata.bad"] += 1 |
301 |
- fails["metadata.bad"].append("%s/metadata.xml: %s" % (xpkg, e)) |
302 |
+ qatracker.add_error("metadata.bad", |
303 |
+ "%s/metadata.xml: %s" % (xpkg, e)) |
304 |
else: |
305 |
for atom in chain(*musedict.values()): |
306 |
if atom is None: |
307 |
@@ -647,13 +619,11 @@ for xpkg in effective_scanlist: |
308 |
try: |
309 |
atom = Atom(atom) |
310 |
except InvalidAtom as e: |
311 |
- stats["metadata.bad"] += 1 |
312 |
- fails["metadata.bad"].append( |
313 |
+ qatracker.add_error("metadata.bad", |
314 |
"%s/metadata.xml: Invalid atom: %s" % (xpkg, e)) |
315 |
else: |
316 |
if atom.cp != xpkg: |
317 |
- stats["metadata.bad"] += 1 |
318 |
- fails["metadata.bad"].append( |
319 |
+ qatracker.add_error("metadata.bad", |
320 |
"%s/metadata.xml: Atom contains " |
321 |
"unexpected cat/pn: %s" % (xpkg, atom)) |
322 |
|
323 |
@@ -662,16 +632,15 @@ for xpkg in effective_scanlist: |
324 |
utilities.check_metadata(_metadata_xml, herd_base) |
325 |
except (utilities.UnknownHerdsError, ) as e: |
326 |
metadata_bad = True |
327 |
- stats["metadata.bad"] += 1 |
328 |
- fails["metadata.bad"].append("%s/metadata.xml: %s" % (xpkg, e)) |
329 |
+ qatracker.add_error("metadata.bad", |
330 |
+ "%s/metadata.xml: %s" % (xpkg, e)) |
331 |
del e |
332 |
|
333 |
################# |
334 |
# Only carry out if in package directory or check forced |
335 |
if not metadata_bad: |
336 |
if not xmllint.check(checkdir): |
337 |
- stats["metadata.bad"] += 1 |
338 |
- fails["metadata.bad"].append(xpkg + "/metadata.xml") |
339 |
+ qatracker.add_error("metadata.bad", xpkg + "/metadata.xml") |
340 |
|
341 |
################# |
342 |
del metadata_bad |
343 |
@@ -690,13 +659,12 @@ for xpkg in effective_scanlist: |
344 |
|
345 |
if check_changelog and not changelog_modified \ |
346 |
and ebuild.ebuild_path in changed.new_ebuilds: |
347 |
- stats['changelog.ebuildadded'] += 1 |
348 |
- fails['changelog.ebuildadded'].append(ebuild.relative_path) |
349 |
+ qatracker.add_error('changelog.ebuildadded', ebuild.relative_path) |
350 |
|
351 |
if ebuild.untracked(check_ebuild_notadded, y_ebuild, eadded): |
352 |
# ebuild not added to vcs |
353 |
- stats["ebuild.notadded"] += 1 |
354 |
- fails["ebuild.notadded"].append(xpkg + "/" + y_ebuild + ".ebuild") |
355 |
+ qatracker.add_error("ebuild.notadded", |
356 |
+ xpkg + "/" + y_ebuild + ".ebuild") |
357 |
myesplit = portage.pkgsplit(y_ebuild) |
358 |
|
359 |
is_bad_split = myesplit is None or myesplit[0] != xpkg.split("/")[-1] |
360 |
@@ -706,13 +674,13 @@ for xpkg in effective_scanlist: |
361 |
is_pv_toolong2 = pv_toolong_re.search(myesplit[2]) |
362 |
|
363 |
if is_pv_toolong or is_pv_toolong2: |
364 |
- stats["ebuild.invalidname"] += 1 |
365 |
- fails["ebuild.invalidname"].append(xpkg + "/" + y_ebuild + ".ebuild") |
366 |
+ qatracker.add_error("ebuild.invalidname", |
367 |
+ xpkg + "/" + y_ebuild + ".ebuild") |
368 |
continue |
369 |
elif myesplit[0] != pkgdir: |
370 |
print(pkgdir, myesplit[0]) |
371 |
- stats["ebuild.namenomatch"] += 1 |
372 |
- fails["ebuild.namenomatch"].append(xpkg + "/" + y_ebuild + ".ebuild") |
373 |
+ qatracker.add_error("ebuild.namenomatch", |
374 |
+ xpkg + "/" + y_ebuild + ".ebuild") |
375 |
continue |
376 |
|
377 |
pkg = pkgs[y_ebuild] |
378 |
@@ -721,8 +689,8 @@ for xpkg in effective_scanlist: |
379 |
allvalid = False |
380 |
for k, msgs in pkg.invalid.items(): |
381 |
for msg in msgs: |
382 |
- stats[k] += 1 |
383 |
- fails[k].append("%s: %s" % (ebuild.relative_path, msg)) |
384 |
+ qatracker.add_error(k, |
385 |
+ "%s: %s" % (ebuild.relative_path, msg)) |
386 |
continue |
387 |
|
388 |
myaux = pkg._metadata |
389 |
@@ -731,13 +699,11 @@ for xpkg in effective_scanlist: |
390 |
live_ebuild = live_eclasses.intersection(inherited) |
391 |
|
392 |
if repo_settings.repo_config.eapi_is_banned(eapi): |
393 |
- stats["repo.eapi.banned"] += 1 |
394 |
- fails["repo.eapi.banned"].append( |
395 |
+ qatracker.add_error("repo.eapi.banned", |
396 |
"%s: %s" % (ebuild.relative_path, eapi)) |
397 |
|
398 |
elif repo_settings.repo_config.eapi_is_deprecated(eapi): |
399 |
- stats["repo.eapi.deprecated"] += 1 |
400 |
- fails["repo.eapi.deprecated"].append( |
401 |
+ qatracker.add_error("repo.eapi.deprecated", |
402 |
"%s: %s" % (ebuild.relative_path, eapi)) |
403 |
|
404 |
for k, v in myaux.items(): |
405 |
@@ -745,22 +711,18 @@ for xpkg in effective_scanlist: |
406 |
continue |
407 |
m = non_ascii_re.search(v) |
408 |
if m is not None: |
409 |
- stats["variable.invalidchar"] += 1 |
410 |
- fails["variable.invalidchar"].append( |
411 |
+ qatracker.add_error("variable.invalidchar", |
412 |
"%s: %s variable contains non-ASCII " |
413 |
"character at position %s" % |
414 |
(ebuild.relative_path, k, m.start() + 1)) |
415 |
|
416 |
if not src_uri_error: |
417 |
####################### |
418 |
- thirdparty = ThirdPartyMirrors(repoman_settings) |
419 |
+ thirdparty = ThirdPartyMirrors(repoman_settings, qatracker) |
420 |
thirdparty.check(myaux, ebuild.relative_path) |
421 |
- stats["SRC_URI.mirror"] = thirdparty.stats |
422 |
- fails["SRC_URI.mirror"] = thirdparty.fails |
423 |
####################### |
424 |
if myaux.get("PROVIDE"): |
425 |
- stats["virtual.oldstyle"] += 1 |
426 |
- fails["virtual.oldstyle"].append(ebuild.relative_path) |
427 |
+ qatracker.add_error("virtual.oldstyle", ebuild.relative_path) |
428 |
|
429 |
for pos, missing_var in enumerate(missingvars): |
430 |
if not myaux.get(missing_var): |
431 |
@@ -770,26 +732,22 @@ for xpkg in effective_scanlist: |
432 |
if live_ebuild and missing_var == "KEYWORDS": |
433 |
continue |
434 |
myqakey = missingvars[pos] + ".missing" |
435 |
- stats[myqakey] += 1 |
436 |
- fails[myqakey].append(xpkg + "/" + y_ebuild + ".ebuild") |
437 |
+ qatracker.add_error(myqakey, xpkg + "/" + y_ebuild + ".ebuild") |
438 |
|
439 |
if catdir == "virtual": |
440 |
for var in ("HOMEPAGE", "LICENSE"): |
441 |
if myaux.get(var): |
442 |
myqakey = var + ".virtual" |
443 |
- stats[myqakey] += 1 |
444 |
- fails[myqakey].append(ebuild.relative_path) |
445 |
+ qatracker.add_error(myqakey, ebuild.relative_path) |
446 |
|
447 |
if myaux['DESCRIPTION'][-1:] in ['.']: |
448 |
- stats['DESCRIPTION.punctuation'] += 1 |
449 |
- fails['DESCRIPTION.punctuation'].append( |
450 |
+ qatracker.add_error('DESCRIPTION.punctuation', |
451 |
"%s: DESCRIPTION ends with a '%s' character" |
452 |
% (ebuild.relative_path, myaux['DESCRIPTION'][-1:])) |
453 |
|
454 |
# 14 is the length of DESCRIPTION="" |
455 |
if len(myaux['DESCRIPTION']) > max_desc_len: |
456 |
- stats['DESCRIPTION.toolong'] += 1 |
457 |
- fails['DESCRIPTION.toolong'].append( |
458 |
+ qatracker.add_error('DESCRIPTION.toolong', |
459 |
"%s: DESCRIPTION is %d characters (max %d)" % |
460 |
(ebuild.relative_path, len(myaux['DESCRIPTION']), max_desc_len)) |
461 |
|
462 |
@@ -802,8 +760,7 @@ for xpkg in effective_scanlist: |
463 |
if stable_keywords: |
464 |
if ebuild.ebuild_path in changed.new_ebuilds and catdir != "virtual": |
465 |
stable_keywords.sort() |
466 |
- stats["KEYWORDS.stable"] += 1 |
467 |
- fails["KEYWORDS.stable"].append( |
468 |
+ qatracker.add_error("KEYWORDS.stable", |
469 |
"%s/%s.ebuild added with stable keywords: %s" % |
470 |
(xpkg, y_ebuild, " ".join(stable_keywords))) |
471 |
|
472 |
@@ -816,8 +773,7 @@ for xpkg in effective_scanlist: |
473 |
elif ebuild_archs and "*" not in ebuild_archs and not live_ebuild: |
474 |
dropped_keywords = previous_keywords.difference(ebuild_archs) |
475 |
if dropped_keywords: |
476 |
- stats["KEYWORDS.dropped"] += 1 |
477 |
- fails["KEYWORDS.dropped"].append( |
478 |
+ qatracker.add_error("KEYWORDS.dropped", |
479 |
"%s: %s" % |
480 |
(ebuild.relative_path, " ".join(sorted(dropped_keywords)))) |
481 |
|
482 |
@@ -833,8 +789,8 @@ for xpkg in effective_scanlist: |
483 |
if kw in kwlist: |
484 |
haskeyword = True |
485 |
if not haskeyword: |
486 |
- stats["KEYWORDS.stupid"] += 1 |
487 |
- fails["KEYWORDS.stupid"].append(xpkg + "/" + y_ebuild + ".ebuild") |
488 |
+ qatracker.add_error("KEYWORDS.stupid", |
489 |
+ xpkg + "/" + y_ebuild + ".ebuild") |
490 |
|
491 |
""" |
492 |
Ebuilds that inherit a "Live" eclass (darcs,subversion,git,cvs,etc..) should |
493 |
@@ -848,15 +804,13 @@ for xpkg in effective_scanlist: |
494 |
bad_stable_keywords.append(keyword) |
495 |
del keyword |
496 |
if bad_stable_keywords: |
497 |
- stats["LIVEVCS.stable"] += 1 |
498 |
- fails["LIVEVCS.stable"].append( |
499 |
+ qatracker.add_error("LIVEVCS.stable", |
500 |
"%s/%s.ebuild with stable keywords:%s " % |
501 |
(xpkg, y_ebuild, bad_stable_keywords)) |
502 |
del bad_stable_keywords |
503 |
|
504 |
if keywords and not has_global_mask(pkg): |
505 |
- stats["LIVEVCS.unmasked"] += 1 |
506 |
- fails["LIVEVCS.unmasked"].append(ebuild.relative_path) |
507 |
+ qatracker.add_error("LIVEVCS.unmasked", ebuild.relative_path) |
508 |
|
509 |
if options.ignore_arches: |
510 |
arches = [[ |
511 |
@@ -923,8 +877,7 @@ for xpkg in effective_scanlist: |
512 |
if atoms and mytype.endswith("DEPEND"): |
513 |
if runtime and \ |
514 |
"test?" in mydepstr.split(): |
515 |
- stats[mytype + '.suspect'] += 1 |
516 |
- fails[mytype + '.suspect'].append( |
517 |
+ qatracker.add_error(mytype + '.suspect', |
518 |
"%s: 'test?' USE conditional in %s" % |
519 |
(ebuild.relative_path, mytype)) |
520 |
|
521 |
@@ -945,8 +898,7 @@ for xpkg in effective_scanlist: |
522 |
if catdir != "virtual": |
523 |
if not is_blocker and \ |
524 |
atom.cp in suspect_virtual: |
525 |
- stats['virtual.suspect'] += 1 |
526 |
- fails['virtual.suspect'].append( |
527 |
+ qatracker.add_error('virtual.suspect', |
528 |
ebuild.relative_path + |
529 |
": %s: consider using '%s' instead of '%s'" % |
530 |
(mytype, suspect_virtual[atom.cp], atom)) |
531 |
@@ -955,28 +907,25 @@ for xpkg in effective_scanlist: |
532 |
not is_blocker and \ |
533 |
not inherited_java_eclass and \ |
534 |
atom.cp == "virtual/jdk": |
535 |
- stats['java.eclassesnotused'] += 1 |
536 |
- fails['java.eclassesnotused'].append(ebuild.relative_path) |
537 |
+ qatracker.add_error('java.eclassesnotused', |
538 |
+ ebuild.relative_path) |
539 |
elif buildtime and \ |
540 |
not is_blocker and \ |
541 |
not inherited_wxwidgets_eclass and \ |
542 |
atom.cp == "x11-libs/wxGTK": |
543 |
- stats['wxwidgets.eclassnotused'] += 1 |
544 |
- fails['wxwidgets.eclassnotused'].append( |
545 |
+ qatracker.add_error('wxwidgets.eclassnotused', |
546 |
"%s: %ss on x11-libs/wxGTK without inheriting" |
547 |
" wxwidgets.eclass" % (ebuild.relative_path, mytype)) |
548 |
elif runtime: |
549 |
if not is_blocker and \ |
550 |
atom.cp in suspect_rdepend: |
551 |
- stats[mytype + '.suspect'] += 1 |
552 |
- fails[mytype + '.suspect'].append( |
553 |
+ qatracker.add_error(mytype + '.suspect', |
554 |
ebuild.relative_path + ": '%s'" % atom) |
555 |
|
556 |
if atom.operator == "~" and \ |
557 |
portage.versions.catpkgsplit(atom.cpv)[3] != "r0": |
558 |
qacat = 'dependency.badtilde' |
559 |
- stats[qacat] += 1 |
560 |
- fails[qacat].append( |
561 |
+ qatracker.add_error(qacat, |
562 |
"%s: %s uses the ~ operator" |
563 |
" with a non-zero revision: '%s'" % |
564 |
(ebuild.relative_path, mytype, atom)) |
565 |
@@ -988,8 +937,8 @@ for xpkg in effective_scanlist: |
566 |
qacat = "dependency.syntax" |
567 |
else: |
568 |
qacat = m + ".syntax" |
569 |
- stats[qacat] += 1 |
570 |
- fails[qacat].append("%s: %s: %s" % (ebuild.relative_path, m, b)) |
571 |
+ qatracker.add_error(qacat, |
572 |
+ "%s: %s: %s" % (ebuild.relative_path, m, b)) |
573 |
|
574 |
badlicsyntax = len([z for z in type_list if z == "LICENSE"]) |
575 |
badprovsyntax = len([z for z in type_list if z == "PROVIDE"]) |
576 |
@@ -1015,15 +964,14 @@ for xpkg in effective_scanlist: |
577 |
|
578 |
if default_use and not eapi_has_iuse_defaults(eapi): |
579 |
for myflag in default_use: |
580 |
- stats['EAPI.incompatible'] += 1 |
581 |
- fails['EAPI.incompatible'].append( |
582 |
+ qatracker.add_error('EAPI.incompatible', |
583 |
"%s: IUSE defaults" |
584 |
" not supported with EAPI='%s': '%s'" % |
585 |
(ebuild.relative_path, eapi, myflag)) |
586 |
|
587 |
for mypos in range(len(myuse)): |
588 |
- stats["IUSE.invalid"] += 1 |
589 |
- fails["IUSE.invalid"].append(xpkg + "/" + y_ebuild + ".ebuild: %s" % myuse[mypos]) |
590 |
+ qatracker.add_error("IUSE.invalid", |
591 |
+ xpkg + "/" + y_ebuild + ".ebuild: %s" % myuse[mypos]) |
592 |
|
593 |
# Check for outdated RUBY targets |
594 |
old_ruby_eclasses = ["ruby-ng", "ruby-fakegem", "ruby"] |
595 |
@@ -1033,9 +981,9 @@ for xpkg in effective_scanlist: |
596 |
ruby_intersection = pkg.iuse.all.intersection(ruby_deprecated) |
597 |
if ruby_intersection: |
598 |
for myruby in ruby_intersection: |
599 |
- stats["IUSE.rubydeprecated"] += 1 |
600 |
- fails["IUSE.rubydeprecated"].append( |
601 |
- (ebuild.relative_path + ": Deprecated ruby target: %s") % myruby) |
602 |
+ qatracker.add_error("IUSE.rubydeprecated", |
603 |
+ (ebuild.relative_path + ": Deprecated ruby target: %s") |
604 |
+ % myruby) |
605 |
|
606 |
# license checks |
607 |
if not badlicsyntax: |
608 |
@@ -1048,11 +996,11 @@ for xpkg in effective_scanlist: |
609 |
# Need to check for "||" manually as no portage |
610 |
# function will remove it without removing values. |
611 |
if lic not in liclist and lic != "||": |
612 |
- stats["LICENSE.invalid"] += 1 |
613 |
- fails["LICENSE.invalid"].append(xpkg + "/" + y_ebuild + ".ebuild: %s" % lic) |
614 |
+ qatracker.add_error("LICENSE.invalid", |
615 |
+ xpkg + "/" + y_ebuild + ".ebuild: %s" % lic) |
616 |
elif lic in liclist_deprecated: |
617 |
- stats["LICENSE.deprecated"] += 1 |
618 |
- fails["LICENSE.deprecated"].append("%s: %s" % (ebuild.relative_path, lic)) |
619 |
+ qatracker.add_error("LICENSE.deprecated", |
620 |
+ "%s: %s" % (ebuild.relative_path, lic)) |
621 |
|
622 |
# keyword checks |
623 |
myuse = myaux["KEYWORDS"].split() |
624 |
@@ -1064,13 +1012,12 @@ for xpkg in effective_scanlist: |
625 |
if myskey[:1] == "~": |
626 |
myskey = myskey[1:] |
627 |
if myskey not in kwlist: |
628 |
- stats["KEYWORDS.invalid"] += 1 |
629 |
- fails["KEYWORDS.invalid"].append( |
630 |
+ qatracker.add_error("KEYWORDS.invalid", |
631 |
"%s/%s.ebuild: %s" % (xpkg, y_ebuild, mykey)) |
632 |
elif myskey not in profiles: |
633 |
- stats["KEYWORDS.invalid"] += 1 |
634 |
- fails["KEYWORDS.invalid"].append( |
635 |
- "%s/%s.ebuild: %s (profile invalid)" % (xpkg, y_ebuild, mykey)) |
636 |
+ qatracker.add_error("KEYWORDS.invalid", |
637 |
+ "%s/%s.ebuild: %s (profile invalid)" |
638 |
+ % (xpkg, y_ebuild, mykey)) |
639 |
|
640 |
# restrict checks |
641 |
myrestrict = None |
642 |
@@ -1078,31 +1025,29 @@ for xpkg in effective_scanlist: |
643 |
myrestrict = portage.dep.use_reduce( |
644 |
myaux["RESTRICT"], matchall=1, flat=True) |
645 |
except portage.exception.InvalidDependString as e: |
646 |
- stats["RESTRICT.syntax"] += 1 |
647 |
- fails["RESTRICT.syntax"].append( |
648 |
+ qatracker.add_error("RESTRICT.syntax", |
649 |
"%s: RESTRICT: %s" % (ebuild.relative_path, e)) |
650 |
del e |
651 |
if myrestrict: |
652 |
myrestrict = set(myrestrict) |
653 |
mybadrestrict = myrestrict.difference(valid_restrict) |
654 |
if mybadrestrict: |
655 |
- stats["RESTRICT.invalid"] += len(mybadrestrict) |
656 |
for mybad in mybadrestrict: |
657 |
- fails["RESTRICT.invalid"].append(xpkg + "/" + y_ebuild + ".ebuild: %s" % mybad) |
658 |
+ qatracker.add_error("RESTRICT.invalid", |
659 |
+ xpkg + "/" + y_ebuild + ".ebuild: %s" % mybad) |
660 |
# REQUIRED_USE check |
661 |
required_use = myaux["REQUIRED_USE"] |
662 |
if required_use: |
663 |
if not eapi_has_required_use(eapi): |
664 |
- stats['EAPI.incompatible'] += 1 |
665 |
- fails['EAPI.incompatible'].append( |
666 |
+ qatracker.add_error('EAPI.incompatible', |
667 |
"%s: REQUIRED_USE" |
668 |
- " not supported with EAPI='%s'" % (ebuild.relative_path, eapi,)) |
669 |
+ " not supported with EAPI='%s'" |
670 |
+ % (ebuild.relative_path, eapi,)) |
671 |
try: |
672 |
portage.dep.check_required_use( |
673 |
required_use, (), pkg.iuse.is_valid_flag, eapi=eapi) |
674 |
except portage.exception.InvalidDependString as e: |
675 |
- stats["REQUIRED_USE.syntax"] += 1 |
676 |
- fails["REQUIRED_USE.syntax"].append( |
677 |
+ qatracker.add_error("REQUIRED_USE.syntax", |
678 |
"%s: REQUIRED_USE: %s" % (ebuild.relative_path, e)) |
679 |
del e |
680 |
|
681 |
@@ -1120,8 +1065,8 @@ for xpkg in effective_scanlist: |
682 |
mode='r', encoding=_encodings['repo.content']) |
683 |
try: |
684 |
for check_name, e in run_checks(f, pkg): |
685 |
- stats[check_name] += 1 |
686 |
- fails[check_name].append(ebuild.relative_path + ': %s' % e) |
687 |
+ qatracker.add_error(check_name, |
688 |
+ ebuild.relative_path + ': %s' % e) |
689 |
finally: |
690 |
f.close() |
691 |
except UnicodeDecodeError: |
692 |
@@ -1278,16 +1223,14 @@ for xpkg in effective_scanlist: |
693 |
# if we emptied out our list, continue: |
694 |
if not atoms: |
695 |
continue |
696 |
- stats[mykey] += 1 |
697 |
- fails[mykey].append( |
698 |
- "%s: %s: %s(%s) %s" % ( |
699 |
- ebuild.relative_path, mytype, keyword, prof, |
700 |
+ qatracker.add_error(mykey, |
701 |
+ "%s: %s: %s(%s) %s" |
702 |
+ % (ebuild.relative_path, mytype, keyword, prof, |
703 |
repr(atoms))) |
704 |
else: |
705 |
- stats[mykey] += 1 |
706 |
- fails[mykey].append( |
707 |
- "%s: %s: %s(%s) %s" % ( |
708 |
- ebuild.relative_path, mytype, keyword, prof, |
709 |
+ qatracker.add_error(mykey, |
710 |
+ "%s: %s: %s(%s) %s" |
711 |
+ % (ebuild.relative_path, mytype, keyword, prof, |
712 |
repr(atoms))) |
713 |
|
714 |
if not baddepsyntax and unknown_pkgs: |
715 |
@@ -1295,19 +1238,17 @@ for xpkg in effective_scanlist: |
716 |
for mytype, atom in unknown_pkgs: |
717 |
type_map.setdefault(mytype, set()).add(atom) |
718 |
for mytype, atoms in type_map.items(): |
719 |
- stats["dependency.unknown"] += 1 |
720 |
- fails["dependency.unknown"].append( |
721 |
- "%s: %s: %s" % ( |
722 |
- ebuild.relative_path, mytype, ", ".join(sorted(atoms)))) |
723 |
+ qatracker.add_error("dependency.unknown", |
724 |
+ "%s: %s: %s" |
725 |
+ % (ebuild.relative_path, mytype, ", ".join(sorted(atoms)))) |
726 |
|
727 |
# check if there are unused local USE-descriptions in metadata.xml |
728 |
# (unless there are any invalids, to avoid noise) |
729 |
if allvalid: |
730 |
for myflag in muselist.difference(used_useflags): |
731 |
- stats["metadata.warning"] += 1 |
732 |
- fails["metadata.warning"].append( |
733 |
- "%s/metadata.xml: unused local USE-description: '%s'" % |
734 |
- (xpkg, myflag)) |
735 |
+ qatracker.add_error("metadata.warning", |
736 |
+ "%s/metadata.xml: unused local USE-description: '%s'" |
737 |
+ % (xpkg, myflag)) |
738 |
|
739 |
if options.if_modified == "y" and len(effective_scanlist) < 1: |
740 |
logging.warn("--if-modified is enabled, but no modified packages were found!") |
741 |
@@ -1323,7 +1264,7 @@ dowarn = 0 |
742 |
dofull = options.mode != 'full' |
743 |
|
744 |
for x in qacats: |
745 |
- if not stats[x]: |
746 |
+ if x not in qatracker.fails: |
747 |
continue |
748 |
dowarn = 1 |
749 |
if x not in qawarnings: |
750 |
@@ -1354,7 +1295,7 @@ format_outputs = { |
751 |
|
752 |
format_output = format_outputs.get( |
753 |
options.output_style, format_outputs['default']) |
754 |
-format_output(f, stats, fails, dofull, dofail, options, qawarnings) |
755 |
+format_output(f, qatracker.fails, dofull, dofail, options, qawarnings) |
756 |
|
757 |
style_file.flush() |
758 |
del console_writer, f, style_file |
759 |
|
760 |
diff --git a/pym/repoman/qa_data.py b/pym/repoman/qa_data.py |
761 |
index 2929762..d7f0ca9 100644 |
762 |
--- a/pym/repoman/qa_data.py |
763 |
+++ b/pym/repoman/qa_data.py |
764 |
@@ -345,12 +345,11 @@ no_exec = frozenset(["Manifest", "ChangeLog", "metadata.xml"]) |
765 |
|
766 |
|
767 |
def format_qa_output( |
768 |
- formatter, stats, fails, dofull, dofail, options, qawarnings): |
769 |
+ formatter, fails, dofull, dofail, options, qawarnings): |
770 |
"""Helper function that formats output properly |
771 |
|
772 |
Args: |
773 |
formatter - a subclass of Formatter |
774 |
- stats - a dict of qa status items |
775 |
fails - a dict of qa status failures |
776 |
dofull - boolean to print full results or a summary |
777 |
dofail - boolean to decide if failure was hard or soft |
778 |
@@ -360,8 +359,8 @@ def format_qa_output( |
779 |
""" |
780 |
full = options.mode == 'full' |
781 |
# we only want key value pairs where value > 0 |
782 |
- for category, number in \ |
783 |
- filter(lambda myitem: myitem[1] > 0, sorted(stats.items())): |
784 |
+ for category in sorted(fails): |
785 |
+ number = len(fails[category]) |
786 |
formatter.add_literal_data(" " + category.ljust(30)) |
787 |
if category in qawarnings: |
788 |
formatter.push_style("WARN") |
789 |
@@ -383,7 +382,7 @@ def format_qa_output( |
790 |
|
791 |
|
792 |
def format_qa_output_column( |
793 |
- formatter, stats, fails, dofull, dofail, options, qawarnings): |
794 |
+ formatter, fails, dofull, dofail, options, qawarnings): |
795 |
"""Helper function that formats output in a machine-parseable column format |
796 |
|
797 |
@param formatter: an instance of Formatter |
798 |
@@ -403,11 +402,8 @@ def format_qa_output_column( |
799 |
@return: None (modifies formatter) |
800 |
""" |
801 |
full = options.mode == 'full' |
802 |
- for category, number in stats.items(): |
803 |
- # we only want key value pairs where value > 0 |
804 |
- if number < 1: |
805 |
- continue |
806 |
- |
807 |
+ for category in fails.items(): |
808 |
+ number = len(fails[category]) |
809 |
formatter.add_literal_data("NumberOf " + category + " ") |
810 |
if category in qawarnings: |
811 |
formatter.push_style("WARN") |
812 |
|
813 |
diff --git a/pym/repoman/qa_tracker.py b/pym/repoman/qa_tracker.py |
814 |
new file mode 100644 |
815 |
index 0000000..48a7241 |
816 |
--- /dev/null |
817 |
+++ b/pym/repoman/qa_tracker.py |
818 |
@@ -0,0 +1,46 @@ |
819 |
+ |
820 |
+import logging |
821 |
+import sys |
822 |
+ |
823 |
+from repoman.qa_data import qacats, qawarnings |
824 |
+ |
825 |
+ |
826 |
+class QATracker(object): |
827 |
+ '''Track all occurrances of Q/A problems detected''' |
828 |
+ |
829 |
+ def __init__(self): |
830 |
+ self.fails = {} |
831 |
+ self.warns = {} |
832 |
+ |
833 |
+ |
834 |
+ def add_error(self, detected_qa, info): |
835 |
+ '''Add the Q/A error to the database of detected problems |
836 |
+ |
837 |
+ @param detected_qa: string, member of qa_data.qacats list |
838 |
+ @param info: string, details of the detected problem |
839 |
+ ''' |
840 |
+ if detected_qa not in qacats: |
841 |
+ logging.error('QATracker: Exiting on error. unknown detected_qa type passed in ' |
842 |
+ 'to add_error(): %s, %s' % (detected_qa, info)) |
843 |
+ sys.exit(1) |
844 |
+ try: |
845 |
+ self.fails[detected_qa].append(info) |
846 |
+ except KeyError: |
847 |
+ self.fails[detected_qa] = [info] |
848 |
+ |
849 |
+ |
850 |
+ def add_warning(self, detected_qa, info): |
851 |
+ '''Add the Q/A warning to the database of detected problems |
852 |
+ |
853 |
+ @param detected_qa: string, member of qa_data.qawarnings list |
854 |
+ @param info: string, details of the detected problem |
855 |
+ ''' |
856 |
+ if detected_qa not in qawarnings: |
857 |
+ logging.error('QATracker: Exiting on error. unknown detected_qa type passed in ' |
858 |
+ 'to add_warning(): %s, %s' % (detected_qa, info)) |
859 |
+ sys.exit(1) |
860 |
+ try: |
861 |
+ self.warns[detected_qa].append(info) |
862 |
+ except KeyError: |
863 |
+ self.warns[detected_qa] = [info] |
864 |
+ |
865 |
|
866 |
diff --git a/pym/repoman/vcs/vcsstatus.py b/pym/repoman/vcs/vcsstatus.py |
867 |
index 346c20e..eedf866 100644 |
868 |
--- a/pym/repoman/vcs/vcsstatus.py |
869 |
+++ b/pym/repoman/vcs/vcsstatus.py |
870 |
@@ -11,15 +11,14 @@ class VCSStatus(object): |
871 |
'''Determines the status of the vcs repositories |
872 |
to determine if files are not added''' |
873 |
|
874 |
- def __init__(self, vcs_settings, checkdir, checkdir_relative, xpkg): |
875 |
+ def __init__(self, vcs_settings, checkdir, checkdir_relative, xpkg, qatracker): |
876 |
self.vcs_settings = vcs_settings |
877 |
self.vcs = vcs_settings.vcs |
878 |
self.eadded = [] |
879 |
self.checkdir = checkdir |
880 |
self.checkdir_relative = checkdir_relative |
881 |
self.xpkg = xpkg |
882 |
- self.stats = {} |
883 |
- self.fails = {} |
884 |
+ self.qatracker = qatracker |
885 |
|
886 |
|
887 |
def check(self, check_not_added): |
888 |
@@ -31,14 +30,8 @@ class VCSStatus(object): |
889 |
def post_git_hg(self, myf): |
890 |
for l in myf: |
891 |
if l[:-1][-7:] == ".ebuild": |
892 |
- if "ebuild.notadded" in list(self.fails): |
893 |
- self.stats["ebuild.notadded"] += 1 |
894 |
- self.fails["ebuild.notadded"].append( |
895 |
+ self.qatracker.add_error("ebuild.notadded", |
896 |
os.path.join(self.xpkg, os.path.basename(l[:-1]))) |
897 |
- else: |
898 |
- self.stats["ebuild.notadded"] = 1 |
899 |
- self.fails["ebuild.notadded"] = [os.path.join( |
900 |
- self.xpkg, os.path.basename(l[:-1]))] |
901 |
myf.close() |
902 |
|
903 |
|
904 |
@@ -62,14 +55,8 @@ class VCSStatus(object): |
905 |
myl = myf.readlines() |
906 |
myf.close() |
907 |
except IOError: |
908 |
- if "CVS/Entries.IO_error" in list(self.fails): |
909 |
- self.stats["CVS/Entries.IO_error"] += 1 |
910 |
- self.fails["CVS/Entries.IO_error"].append( |
911 |
- self.checkdir + "/CVS/Entries") |
912 |
- else: |
913 |
- self.stats["CVS/Entries.IO_error"] = 1 |
914 |
- self.fails["CVS/Entries.IO_error"] = [ |
915 |
- self.checkdir + "/CVS/Entries"] |
916 |
+ self.qatracker.add_error("CVS/Entries.IO_error", |
917 |
+ self.checkdir + "/CVS/Entries") |
918 |
return True |
919 |
for l in myl: |
920 |
if l[0] != "/": |