1 |
Make the repoman check code significantly more consistent in generating |
2 |
messages (os.path.join() for paths, don't generate a new path when |
3 |
there's an existing variable, etc.) |
4 |
--- |
5 |
bin/repoman | 74 ++++++++++++++++++++++++++++++------------------------------- |
6 |
1 file changed, 37 insertions(+), 37 deletions(-) |
7 |
|
8 |
diff --git a/bin/repoman b/bin/repoman |
9 |
index 3d5dde4..d6d495c 100755 |
10 |
--- a/bin/repoman |
11 |
+++ b/bin/repoman |
12 |
@@ -1416,7 +1416,7 @@ for x in effective_scanlist: |
13 |
if (y in no_exec or y.endswith(".ebuild")) and \ |
14 |
stat.S_IMODE(os.stat(os.path.join(checkdir, y)).st_mode) & 0o111: |
15 |
stats["file.executable"] += 1 |
16 |
- fails["file.executable"].append([os.path.join(checkdir, y)]) |
17 |
+ fails["file.executable"].append([os.path.join(x, y)]) |
18 |
if y.endswith(".ebuild"): |
19 |
pf = y[:-7] |
20 |
ebuildlist.append(pf) |
21 |
@@ -1468,7 +1468,7 @@ for x in effective_scanlist: |
22 |
index = -1 |
23 |
if index != -1: |
24 |
stats["file.name"] += 1 |
25 |
- fails["file.name"].append(["%s/%s" % (checkdir, y), |
26 |
+ fails["file.name"].append([os.path.join(x, y), |
27 |
"char '%s'" % y[index]]) |
28 |
|
29 |
if not (y in ("ChangeLog", "metadata.xml") or y.endswith(".ebuild")): |
30 |
@@ -1488,7 +1488,7 @@ for x in effective_scanlist: |
31 |
line += l2 |
32 |
if l2 != 0: |
33 |
s = s[s.rfind("\n") + 1:] |
34 |
- fails["file.UTF8"].append(["%s/%s" % (checkdir, y), |
35 |
+ fails["file.UTF8"].append([os.path.join(x, y), |
36 |
"line %i, just after: '%s'" % (line, s)]) |
37 |
finally: |
38 |
if f is not None: |
39 |
@@ -1557,7 +1557,7 @@ for x in effective_scanlist: |
40 |
except IOError: |
41 |
if vcs == "cvs": |
42 |
stats["CVS/Entries.IO_error"] += 1 |
43 |
- fails["CVS/Entries.IO_error"].append([checkdir + "/CVS/Entries"]) |
44 |
+ fails["CVS/Entries.IO_error"].append([os.path.join(x, "/CVS/Entries")]) |
45 |
else: |
46 |
raise |
47 |
continue |
48 |
@@ -1595,11 +1595,11 @@ for x in effective_scanlist: |
49 |
for entry in mydigests: |
50 |
if entry not in myfiles_all: |
51 |
stats["digest.unused"] += 1 |
52 |
- fails["digest.unused"].append([checkdir + "::" + entry]) |
53 |
+ fails["digest.unused"].append([os.path.join(x, "Manifest"), entry]) |
54 |
for entry in myfiles_all: |
55 |
if entry not in mydigests: |
56 |
stats["digest.missing"] += 1 |
57 |
- fails["digest.missing"].append([checkdir + "::" + entry]) |
58 |
+ fails["digest.missing"].append([os.path.join(x, "Manifest"), entry]) |
59 |
del myfiles_all |
60 |
|
61 |
if os.path.exists(checkdir + "/files"): |
62 |
@@ -1631,12 +1631,12 @@ for x in effective_scanlist: |
63 |
# 20 KiB and 60 KiB causes a warning, while file size over 60 KiB causes an error. |
64 |
elif mystat.st_size > 61440: |
65 |
stats["file.size.fatal"] += 1 |
66 |
- fails["file.size.fatal"].append([x + "/files/" + y, |
67 |
- "(" + str(mystat.st_size//1024) + " KiB)"]) |
68 |
+ fails["file.size.fatal"].append([os.path.join(x, "files", y), |
69 |
+ str(mystat.st_size//1024) + " KiB"]) |
70 |
elif mystat.st_size > 20480: |
71 |
stats["file.size"] += 1 |
72 |
- fails["file.size"].append([x + "/files/" + y, |
73 |
- "(" + str(mystat.st_size//1024) + " KiB)"]) |
74 |
+ fails["file.size"].append([os.path.join(x, "files", y), |
75 |
+ str(mystat.st_size//1024) + " KiB"]) |
76 |
|
77 |
index = repo_config.find_invalid_path_char(y) |
78 |
if index != -1: |
79 |
@@ -1649,19 +1649,19 @@ for x in effective_scanlist: |
80 |
index = -1 |
81 |
if index != -1: |
82 |
stats["file.name"] += 1 |
83 |
- fails["file.name"].append(["%s/files/%s" % (checkdir, y), |
84 |
+ fails["file.name"].append([os.path.join(x, "files", y), |
85 |
"char '%s'" % y[index]]) |
86 |
del mydigests |
87 |
|
88 |
if check_changelog and "ChangeLog" not in checkdirlist: |
89 |
stats["changelog.missing"] += 1 |
90 |
- fails["changelog.missing"].append([x + "/ChangeLog"]) |
91 |
+ fails["changelog.missing"].append([os.path.join(x, "ChangeLog")]) |
92 |
|
93 |
musedict = {} |
94 |
# metadata.xml file check |
95 |
if "metadata.xml" not in checkdirlist: |
96 |
stats["metadata.missing"] += 1 |
97 |
- fails["metadata.missing"].append([x + "/metadata.xml"]) |
98 |
+ fails["metadata.missing"].append([os.path.join(x, "metadata.xml")]) |
99 |
# metadata.xml parse check |
100 |
else: |
101 |
metadata_bad = False |
102 |
@@ -1677,7 +1677,7 @@ for x in effective_scanlist: |
103 |
except (ExpatError, SyntaxError, EnvironmentError) as e: |
104 |
metadata_bad = True |
105 |
stats["metadata.bad"] += 1 |
106 |
- fails["metadata.bad"].append(["%s/metadata.xml" % x, e]) |
107 |
+ fails["metadata.bad"].append([os.path.join(x, "metadata.xml"), e]) |
108 |
del e |
109 |
else: |
110 |
if not hasattr(xml_parser, 'parser') or \ |
111 |
@@ -1688,7 +1688,7 @@ for x in effective_scanlist: |
112 |
else: |
113 |
if "XML_DECLARATION" not in xml_info: |
114 |
stats["metadata.bad"] += 1 |
115 |
- fails["metadata.bad"].append(["%s/metadata.xml" % x, |
116 |
+ fails["metadata.bad"].append([os.path.join(x, "metadata.xml"), |
117 |
"xml declaration is missing on first line, " |
118 |
"should be '%s'" % metadata_xml_declaration]) |
119 |
else: |
120 |
@@ -1701,14 +1701,14 @@ for x in effective_scanlist: |
121 |
encoding_problem = "but it is undefined" |
122 |
else: |
123 |
encoding_problem = "not '%s'" % xml_encoding |
124 |
- fails["metadata.bad"].append(["%s/metadata.xml" % x, |
125 |
+ fails["metadata.bad"].append([os.path.join(x, "metadata.xml"), |
126 |
"xml declaration encoding should be '%s', %s" % |
127 |
(metadata_xml_encoding, encoding_problem)]) |
128 |
|
129 |
if "DOCTYPE" not in xml_info: |
130 |
metadata_bad = True |
131 |
stats["metadata.bad"] += 1 |
132 |
- fails["metadata.bad"].append(["%s/metadata.xml" % x, |
133 |
+ fails["metadata.bad"].append([os.path.join(x, "metadata.xml"), |
134 |
"DOCTYPE is missing"]) |
135 |
else: |
136 |
doctype_name, doctype_system, doctype_pubid = \ |
137 |
@@ -1719,13 +1719,13 @@ for x in effective_scanlist: |
138 |
system_problem = "but it is undefined" |
139 |
else: |
140 |
system_problem = "not '%s'" % doctype_system |
141 |
- fails["metadata.bad"].append(["%s/metadata.xml" % x, |
142 |
+ fails["metadata.bad"].append([os.path.join(x, "metadata.xml"), |
143 |
"DOCTYPE: SYSTEM should refer to '%s', %s" % |
144 |
(metadata_dtd_uri, system_problem)]) |
145 |
|
146 |
if doctype_name != metadata_doctype_name: |
147 |
stats["metadata.bad"] += 1 |
148 |
- fails["metadata.bad"].append(["%s/metadata.xml" % x, |
149 |
+ fails["metadata.bad"].append([os.path.join(x, "metadata.xml"), |
150 |
"DOCTYPE: name should be '%s', not '%s'" % |
151 |
(metadata_doctype_name, doctype_name)]) |
152 |
|
153 |
@@ -1735,7 +1735,7 @@ for x in effective_scanlist: |
154 |
except portage.exception.ParseError as e: |
155 |
metadata_bad = True |
156 |
stats["metadata.bad"] += 1 |
157 |
- fails["metadata.bad"].append(["%s/metadata.xml" % x, e]) |
158 |
+ fails["metadata.bad"].append([os.path.join(x, "metadata.xml"), e]) |
159 |
else: |
160 |
for atom in chain(*musedict.values()): |
161 |
if atom is None: |
162 |
@@ -1745,12 +1745,12 @@ for x in effective_scanlist: |
163 |
except InvalidAtom as e: |
164 |
stats["metadata.bad"] += 1 |
165 |
fails["metadata.bad"].append([ |
166 |
- "%s/metadata.xml" % x, "Invalid atom: %s" % e]) |
167 |
+ os.path.join(x, "metadata.xml"), "Invalid atom: %s" % e]) |
168 |
else: |
169 |
if atom.cp != x: |
170 |
stats["metadata.bad"] += 1 |
171 |
fails["metadata.bad"].append([ |
172 |
- "%s/metadata.xml" % x, "Atom contains " |
173 |
+ os.path.join(x, "metadata.xml"), "Atom contains " |
174 |
"unexpected cat/pn: %s" % atom]) |
175 |
|
176 |
# Run other metadata.xml checkers |
177 |
@@ -1759,7 +1759,7 @@ for x in effective_scanlist: |
178 |
except (utilities.UnknownHerdsError, ) as e: |
179 |
metadata_bad = True |
180 |
stats["metadata.bad"] += 1 |
181 |
- fails["metadata.bad"].append(["%s/metadata.xml" %x, e]) |
182 |
+ fails["metadata.bad"].append([os.path.join(x, "metadata.xml"), e]) |
183 |
del e |
184 |
|
185 |
# Only carry out if in package directory or check forced |
186 |
@@ -1775,7 +1775,7 @@ for x in effective_scanlist: |
187 |
for z in out.splitlines(): |
188 |
print(red("!!! ") + z) |
189 |
stats["metadata.bad"] += 1 |
190 |
- fails["metadata.bad"].append([x + "/metadata.xml"]) |
191 |
+ fails["metadata.bad"].append([os.path.join(x, "metadata.xml")]) |
192 |
|
193 |
del metadata_bad |
194 |
muselist = frozenset(musedict) |
195 |
@@ -1803,18 +1803,18 @@ for x in effective_scanlist: |
196 |
if vcs in ("cvs", "svn", "bzr") and check_ebuild_notadded and y not in eadded: |
197 |
# ebuild not added to vcs |
198 |
stats["ebuild.notadded"] += 1 |
199 |
- fails["ebuild.notadded"].append([x + "/" + y + ".ebuild"]) |
200 |
+ fails["ebuild.notadded"].append([relative_path]) |
201 |
myesplit = portage.pkgsplit(y) |
202 |
if myesplit is None or myesplit[0] != x.split("/")[-1] \ |
203 |
or pv_toolong_re.search(myesplit[1]) \ |
204 |
or pv_toolong_re.search(myesplit[2]): |
205 |
stats["ebuild.invalidname"] += 1 |
206 |
- fails["ebuild.invalidname"].append([x + "/" + y + ".ebuild"]) |
207 |
+ fails["ebuild.invalidname"].append([relative_path]) |
208 |
continue |
209 |
elif myesplit[0] != pkgdir: |
210 |
print(pkgdir, myesplit[0]) |
211 |
stats["ebuild.namenomatch"] += 1 |
212 |
- fails["ebuild.namenomatch"].append([x + "/" + y + ".ebuild"]) |
213 |
+ fails["ebuild.namenomatch"].append([relative_path]) |
214 |
continue |
215 |
|
216 |
pkg = pkgs[y] |
217 |
@@ -1881,7 +1881,7 @@ for x in effective_scanlist: |
218 |
continue |
219 |
myqakey = missingvars[pos] + ".missing" |
220 |
stats[myqakey] += 1 |
221 |
- fails[myqakey].append([x + "/" + y + ".ebuild"]) |
222 |
+ fails[myqakey].append([relative_path]) |
223 |
|
224 |
if catdir == "virtual": |
225 |
for var in ("HOMEPAGE", "LICENSE"): |
226 |
@@ -1908,7 +1908,7 @@ for x in effective_scanlist: |
227 |
stable_keywords.sort() |
228 |
stats["KEYWORDS.stable"] += 1 |
229 |
fails["KEYWORDS.stable"].append( |
230 |
- [x + "/" + y + ".ebuild", "added with stable keywords: %s" % \ |
231 |
+ [relative_path, "added with stable keywords: %s" % \ |
232 |
" ".join(stable_keywords)]) |
233 |
|
234 |
ebuild_archs = set(kw.lstrip("~") for kw in keywords \ |
235 |
@@ -1936,7 +1936,7 @@ for x in effective_scanlist: |
236 |
haskeyword = True |
237 |
if not haskeyword: |
238 |
stats["KEYWORDS.stupid"] += 1 |
239 |
- fails["KEYWORDS.stupid"].append([x + "/" + y + ".ebuild"]) |
240 |
+ fails["KEYWORDS.stupid"].append([relative_path]) |
241 |
|
242 |
""" |
243 |
Ebuilds that inherit a "Live" eclass (darcs,subversion,git,cvs,etc..) should |
244 |
@@ -1952,7 +1952,7 @@ for x in effective_scanlist: |
245 |
if bad_stable_keywords: |
246 |
stats["LIVEVCS.stable"] += 1 |
247 |
fails["LIVEVCS.stable"].append([ |
248 |
- x + "/" + y + ".ebuild", "with stable keywords:%s " % \ |
249 |
+ relative_path, "with stable keywords:%s " % \ |
250 |
bad_stable_keywords]) |
251 |
del bad_stable_keywords |
252 |
|
253 |
@@ -2121,7 +2121,7 @@ for x in effective_scanlist: |
254 |
|
255 |
for mypos in range(len(myuse)): |
256 |
stats["IUSE.invalid"] += 1 |
257 |
- fails["IUSE.invalid"].append([x + "/" + y + ".ebuild", myuse[mypos]]) |
258 |
+ fails["IUSE.invalid"].append([relative_path, myuse[mypos]]) |
259 |
|
260 |
# Check for outdated RUBY targets |
261 |
if "ruby-ng" in inherited or "ruby-fakegem" in inherited or "ruby" in inherited: |
262 |
@@ -2144,7 +2144,7 @@ for x in effective_scanlist: |
263 |
# function will remove it without removing values. |
264 |
if lic not in liclist and lic != "||": |
265 |
stats["LICENSE.invalid"] += 1 |
266 |
- fails["LICENSE.invalid"].append([x + "/" + y + ".ebuild", lic]) |
267 |
+ fails["LICENSE.invalid"].append([relative_path, lic]) |
268 |
elif lic in liclist_deprecated: |
269 |
stats["LICENSE.deprecated"] += 1 |
270 |
fails["LICENSE.deprecated"].append([relative_path, lic]) |
271 |
@@ -2160,10 +2160,10 @@ for x in effective_scanlist: |
272 |
myskey = myskey[1:] |
273 |
if myskey not in kwlist: |
274 |
stats["KEYWORDS.invalid"] += 1 |
275 |
- fails["KEYWORDS.invalid"].append([x + "/" + y + ".ebuild", mykey]) |
276 |
+ fails["KEYWORDS.invalid"].append([relative_path, mykey]) |
277 |
elif myskey not in profiles: |
278 |
stats["KEYWORDS.invalid"] += 1 |
279 |
- fails["KEYWORDS.invalid"].append([x + "/" + y + ".ebuild", "%s (profile invalid)" % mykey]) |
280 |
+ fails["KEYWORDS.invalid"].append([relative_path, "%s (profile invalid)" % mykey]) |
281 |
|
282 |
# restrict checks |
283 |
myrestrict = None |
284 |
@@ -2180,7 +2180,7 @@ for x in effective_scanlist: |
285 |
if mybadrestrict: |
286 |
stats["RESTRICT.invalid"] += len(mybadrestrict) |
287 |
for mybad in mybadrestrict: |
288 |
- fails["RESTRICT.invalid"].append([x + "/" + y + ".ebuild", mybad]) |
289 |
+ fails["RESTRICT.invalid"].append([relative_path, mybad]) |
290 |
# REQUIRED_USE check |
291 |
required_use = myaux["REQUIRED_USE"] |
292 |
if required_use: |
293 |
@@ -2387,7 +2387,7 @@ for x in effective_scanlist: |
294 |
if allvalid: |
295 |
for myflag in muselist.difference(used_useflags): |
296 |
stats["metadata.warning"] += 1 |
297 |
- fails["metadata.warning"].append(["%s/metadata.xml" % x, |
298 |
+ fails["metadata.warning"].append([os.path.join(x, "metadata.xml"), |
299 |
"unused local USE-description: '%s'" % myflag]) |
300 |
|
301 |
if options.if_modified == "y" and len(effective_scanlist) < 1: |
302 |
-- |
303 |
1.8.5.3 |