1 |
commit: be66151836a778223804a22382975308859cd014 |
2 |
Author: Magnus Granberg <zorry <AT> gentoo <DOT> org> |
3 |
AuthorDate: Sat Jan 4 14:24:30 2014 +0000 |
4 |
Commit: Magnus Granberg <zorry <AT> gentoo <DOT> org> |
5 |
CommitDate: Sat Jan 4 14:24:30 2014 +0000 |
6 |
URL: http://git.overlays.gentoo.org/gitweb/?p=dev/zorry.git;a=commit;h=be661518 |
7 |
|
8 |
add package metadata and emerge --info to db |
9 |
|
10 |
--- |
11 |
.gitignore | 2 + |
12 |
.../gobs/files/zobcs_portage_Scheduler.patch | 10 +- |
13 |
gobs/pym/Scheduler.py | 8 +- |
14 |
gobs/pym/actions.py | 67 +++---- |
15 |
gobs/pym/build_depgraph.py | 2 +- |
16 |
gobs/pym/build_log.py | 212 ++++++++++++++------- |
17 |
gobs/pym/manifest.py | 5 +- |
18 |
gobs/pym/mysql_querys.py | 61 +++++- |
19 |
gobs/pym/package.py | 41 ++-- |
20 |
9 files changed, 271 insertions(+), 137 deletions(-) |
21 |
|
22 |
diff --git a/.gitignore b/.gitignore |
23 |
index ce3cbfc..88cd6f3 100644 |
24 |
--- a/.gitignore |
25 |
+++ b/.gitignore |
26 |
@@ -1,2 +1,4 @@ |
27 |
# Backup files |
28 |
*~ |
29 |
+# Python compiled files |
30 |
+*.pyc |
31 |
\ No newline at end of file |
32 |
|
33 |
diff --git a/ebuild/dev-python/gobs/files/zobcs_portage_Scheduler.patch b/ebuild/dev-python/gobs/files/zobcs_portage_Scheduler.patch |
34 |
index a4f8a97..df76a7b 100644 |
35 |
--- a/ebuild/dev-python/gobs/files/zobcs_portage_Scheduler.patch |
36 |
+++ b/ebuild/dev-python/gobs/files/zobcs_portage_Scheduler.patch |
37 |
@@ -24,7 +24,7 @@ |
38 |
if not self._terminated_tasks: |
39 |
self._failed_pkg_msg(self._failed_pkgs[-1], "install", "to") |
40 |
self._status_display.failed = len(self._failed_pkgs) |
41 |
-+ add_buildlog_main(settings, pkg) |
42 |
++ add_buildlog_main(settings, pkg, trees) |
43 |
return |
44 |
|
45 |
self._task_complete(pkg) |
46 |
@@ -32,7 +32,7 @@ |
47 |
self._pkg_cache.pop(pkg_to_replace, None) |
48 |
|
49 |
if pkg.installed: |
50 |
-+ add_buildlog_main(settings, pkg) |
51 |
++ add_buildlog_main(settings, pkg, trees) |
52 |
return |
53 |
|
54 |
# Call mtimedb.commit() after each merge so that |
55 |
@@ -40,7 +40,7 @@ |
56 |
if not mtimedb["resume"]["mergelist"]: |
57 |
del mtimedb["resume"] |
58 |
mtimedb.commit() |
59 |
-+ add_buildlog_main(settings, pkg) |
60 |
++ add_buildlog_main(settings, pkg, trees) |
61 |
|
62 |
def _build_exit(self, build): |
63 |
self._running_tasks.pop(id(build), None) |
64 |
@@ -49,7 +49,7 @@ |
65 |
else: |
66 |
settings = build.settings |
67 |
+ trees = self.trees |
68 |
-+ pkg=build.pkg |
69 |
++ pkg = build.pkg |
70 |
build_dir = settings.get("PORTAGE_BUILDDIR") |
71 |
build_log = settings.get("PORTAGE_LOG_FILE") |
72 |
|
73 |
@@ -57,7 +57,7 @@ |
74 |
self._failed_pkg_msg(self._failed_pkgs[-1], "emerge", "for") |
75 |
self._status_display.failed = len(self._failed_pkgs) |
76 |
self._deallocate_config(build.settings) |
77 |
-+ add_buildlog_main(settings, pkg) |
78 |
++ add_buildlog_main(settings, pkg, trees) |
79 |
self._jobs -= 1 |
80 |
self._status_display.running = self._jobs |
81 |
self._schedule() |
82 |
|
83 |
diff --git a/gobs/pym/Scheduler.py b/gobs/pym/Scheduler.py |
84 |
index 3aaf147..944dcf7 100644 |
85 |
--- a/gobs/pym/Scheduler.py |
86 |
+++ b/gobs/pym/Scheduler.py |
87 |
@@ -1269,7 +1269,7 @@ class Scheduler(PollScheduler): |
88 |
if not self._terminated_tasks: |
89 |
self._failed_pkg_msg(self._failed_pkgs[-1], "install", "to") |
90 |
self._status_display.failed = len(self._failed_pkgs) |
91 |
- add_buildlog_main(settings, pkg) |
92 |
+ add_buildlog_main(settings, pkg, trees) |
93 |
return |
94 |
|
95 |
self._task_complete(pkg) |
96 |
@@ -1288,7 +1288,7 @@ class Scheduler(PollScheduler): |
97 |
self._pkg_cache.pop(pkg_to_replace, None) |
98 |
|
99 |
if pkg.installed: |
100 |
- add_buildlog_main(settings, pkg) |
101 |
+ add_buildlog_main(settings, pkg, trees) |
102 |
return |
103 |
|
104 |
# Call mtimedb.commit() after each merge so that |
105 |
@@ -1299,7 +1299,7 @@ class Scheduler(PollScheduler): |
106 |
if not mtimedb["resume"]["mergelist"]: |
107 |
del mtimedb["resume"] |
108 |
mtimedb.commit() |
109 |
- add_buildlog_main(settings, pkg) |
110 |
+ add_buildlog_main(settings, pkg, trees) |
111 |
|
112 |
def _build_exit(self, build): |
113 |
self._running_tasks.pop(id(build), None) |
114 |
@@ -1337,7 +1337,7 @@ class Scheduler(PollScheduler): |
115 |
self._failed_pkg_msg(self._failed_pkgs[-1], "emerge", "for") |
116 |
self._status_display.failed = len(self._failed_pkgs) |
117 |
self._deallocate_config(build.settings) |
118 |
- add_buildlog_main(settings, pkg) |
119 |
+ add_buildlog_main(settings, pkg, trees) |
120 |
self._jobs -= 1 |
121 |
self._status_display.running = self._jobs |
122 |
self._schedule() |
123 |
|
124 |
diff --git a/gobs/pym/actions.py b/gobs/pym/actions.py |
125 |
index e29d8e0..9c38c1a 100644 |
126 |
--- a/gobs/pym/actions.py |
127 |
+++ b/gobs/pym/actions.py |
128 |
@@ -609,7 +609,7 @@ def action_depclean(settings, trees, ldpath_mtimes, |
129 |
# The calculation is done in a separate function so that depgraph |
130 |
# references go out of scope and the corresponding memory |
131 |
# is freed before we call unmerge(). |
132 |
- rval, cleanlist, ordered, req_pkg_count = \ |
133 |
+ rval, cleanlist, ordered, req_pkg_count, unresolvable = \ |
134 |
calc_depclean(settings, trees, ldpath_mtimes, |
135 |
myopts, action, args_set, spinner) |
136 |
|
137 |
@@ -812,7 +812,7 @@ def calc_depclean(settings, trees, ldpath_mtimes, |
138 |
resolver.display_problems() |
139 |
|
140 |
if not success: |
141 |
- return 1, [], False, 0 |
142 |
+ return 1, [], False, 0, [] |
143 |
|
144 |
def unresolved_deps(): |
145 |
|
146 |
@@ -823,7 +823,7 @@ def calc_depclean(settings, trees, ldpath_mtimes, |
147 |
unresolvable.add((dep.atom, dep.parent.cpv)) |
148 |
|
149 |
if not unresolvable: |
150 |
- return False |
151 |
+ return None |
152 |
|
153 |
if unresolvable and not allow_missing_deps: |
154 |
|
155 |
@@ -873,11 +873,12 @@ def calc_depclean(settings, trees, ldpath_mtimes, |
156 |
"dependencies then use %s." % good("--nodeps")) |
157 |
writemsg_level("".join("%s%s\n" % (prefix, line) for line in msg), |
158 |
level=logging.ERROR, noiselevel=-1) |
159 |
- return True |
160 |
- return False |
161 |
+ return unresolvable |
162 |
+ return None |
163 |
|
164 |
- if unresolved_deps(): |
165 |
- return 1, [], False, 0 |
166 |
+ unresolvable = unresolved_deps(): |
167 |
+ if not unresolvable is None: |
168 |
+ return 1, [], False, 0, unresolvable |
169 |
|
170 |
graph = resolver._dynamic_config.digraph.copy() |
171 |
required_pkgs_total = 0 |
172 |
@@ -1156,7 +1157,7 @@ def calc_depclean(settings, trees, ldpath_mtimes, |
173 |
priority=UnmergeDepPriority(runtime=True), |
174 |
root=pkg.root)): |
175 |
resolver.display_problems() |
176 |
- return 1, [], False, 0 |
177 |
+ return 1, [], False, 0, [] |
178 |
|
179 |
writemsg_level("\nCalculating dependencies ") |
180 |
success = resolver._complete_graph( |
181 |
@@ -1164,9 +1165,10 @@ def calc_depclean(settings, trees, ldpath_mtimes, |
182 |
writemsg_level("\b\b... done!\n") |
183 |
resolver.display_problems() |
184 |
if not success: |
185 |
- return 1, [], False, 0 |
186 |
- if unresolved_deps(): |
187 |
- return 1, [], False, 0 |
188 |
+ return 1, [], False, 0, [] |
189 |
+ unresolvable = unresolved_deps(): |
190 |
+ if not unresolvable is None: |
191 |
+ return 1, [], False, 0, unresolvable |
192 |
|
193 |
graph = resolver._dynamic_config.digraph.copy() |
194 |
required_pkgs_total = 0 |
195 |
@@ -1175,7 +1177,7 @@ def calc_depclean(settings, trees, ldpath_mtimes, |
196 |
required_pkgs_total += 1 |
197 |
cleanlist = create_cleanlist() |
198 |
if not cleanlist: |
199 |
- return 0, [], False, required_pkgs_total |
200 |
+ return 0, [], False, required_pkgs_total, [] |
201 |
clean_set = set(cleanlist) |
202 |
|
203 |
if clean_set: |
204 |
@@ -1285,8 +1287,8 @@ def calc_depclean(settings, trees, ldpath_mtimes, |
205 |
graph.remove(node) |
206 |
cleanlist.append(node.cpv) |
207 |
|
208 |
- return 0, cleanlist, ordered, required_pkgs_total |
209 |
- return 0, [], False, required_pkgs_total |
210 |
+ return 0, cleanlist, ordered, required_pkgs_total, [] |
211 |
+ return 0, [], False, required_pkgs_total, [] |
212 |
|
213 |
def action_deselect(settings, trees, opts, atoms): |
214 |
enter_invalid = '--ask-enter-invalid' in opts |
215 |
@@ -1396,6 +1398,7 @@ def action_info(settings, trees, myopts, myfiles): |
216 |
# See if we can find any packages installed matching the strings |
217 |
# passed on the command line |
218 |
mypkgs = [] |
219 |
+ output_buffer = [] |
220 |
eroot = settings['EROOT'] |
221 |
vardb = trees[eroot]["vartree"].dbapi |
222 |
portdb = trees[eroot]['porttree'].dbapi |
223 |
@@ -1471,9 +1474,8 @@ def action_info(settings, trees, myopts, myfiles): |
224 |
writemsg(" nothing similar found.\n" |
225 |
, noiselevel=-1) |
226 |
|
227 |
- return 1 |
228 |
+ return output_buffer |
229 |
|
230 |
- output_buffer = [] |
231 |
append = output_buffer.append |
232 |
root_config = trees[settings['EROOT']]['root_config'] |
233 |
running_eroot = trees._running_eroot |
234 |
@@ -1688,8 +1690,6 @@ def action_info(settings, trees, myopts, myfiles): |
235 |
append("Unset: "+", ".join(unset_vars)) |
236 |
append("") |
237 |
append("") |
238 |
- writemsg_stdout("\n".join(output_buffer), |
239 |
- noiselevel=-1) |
240 |
|
241 |
# If some packages were found... |
242 |
if mypkgs: |
243 |
@@ -1703,9 +1703,9 @@ def action_info(settings, trees, myopts, myfiles): |
244 |
# Loop through each package |
245 |
# Only print settings if they differ from global settings |
246 |
header_title = "Package Settings" |
247 |
- print(header_width * "=") |
248 |
- print(header_title.rjust(int(header_width/2 + len(header_title)/2))) |
249 |
- print(header_width * "=") |
250 |
+ append(header_width * "=") |
251 |
+ append(header_title.rjust(int(header_width/2 + len(header_title)/2))) |
252 |
+ append(header_width * "=") |
253 |
from portage.output import EOutput |
254 |
out = EOutput() |
255 |
for mypkg in mypkgs: |
256 |
@@ -1725,28 +1725,23 @@ def action_info(settings, trees, myopts, myfiles): |
257 |
root_config=root_config, type_name=pkg_type) |
258 |
|
259 |
if pkg_type == "installed": |
260 |
- print("\n%s was built with the following:" % \ |
261 |
- colorize("INFORM", str(pkg.cpv))) |
262 |
+ append("\n%s was built with the following:" % str(pkg.cpv)) |
263 |
elif pkg_type == "ebuild": |
264 |
- print("\n%s would be build with the following:" % \ |
265 |
- colorize("INFORM", str(pkg.cpv))) |
266 |
+ append("\n%s would be build with the following:" % str(pkg.cpv)) |
267 |
elif pkg_type == "binary": |
268 |
- print("\n%s (non-installed binary) was built with the following:" % \ |
269 |
- colorize("INFORM", str(pkg.cpv))) |
270 |
+ append("\n%s (non-installed binary) was built with the following:" % str(pkg.cpv)) |
271 |
|
272 |
- writemsg_stdout('%s\n' % pkg_use_display(pkg, myopts), |
273 |
- noiselevel=-1) |
274 |
+ append('%s\n' % pkg_use_display(pkg, myopts)) |
275 |
if pkg_type == "installed": |
276 |
for myvar in mydesiredvars: |
277 |
if metadata[myvar].split() != settings.get(myvar, '').split(): |
278 |
- print("%s=\"%s\"" % (myvar, metadata[myvar])) |
279 |
- print() |
280 |
+ append("%s=\"%s\"" % (myvar, metadata[myvar])) |
281 |
|
282 |
if metadata['DEFINED_PHASES']: |
283 |
if 'info' not in metadata['DEFINED_PHASES'].split(): |
284 |
continue |
285 |
|
286 |
- print(">>> Attempting to run pkg_info() for '%s'" % pkg.cpv) |
287 |
+ append(">>> Attempting to run pkg_info() for '%s'" % pkg.cpv) |
288 |
|
289 |
if pkg_type == "installed": |
290 |
ebuildpath = vardb.findname(pkg.cpv) |
291 |
@@ -1782,6 +1777,7 @@ def action_info(settings, trees, myopts, myfiles): |
292 |
mydbapi=trees[settings['EROOT']]["bintree"].dbapi, |
293 |
tree="bintree") |
294 |
shutil.rmtree(tmpdir) |
295 |
+ return output_buffer |
296 |
|
297 |
def action_metadata(settings, portdb, myopts, porttrees=None): |
298 |
if porttrees is None: |
299 |
@@ -3893,7 +3889,12 @@ def run_action(settings, trees, mtimedb, myaction, myopts, myfiles, build_dict, |
300 |
level=logging.ERROR, noiselevel=-1) |
301 |
return 1 |
302 |
|
303 |
- return action_info(settings, trees, myopts, valid_atoms) |
304 |
+ msg_list = action_info(settings, trees, myopts, valid_atoms) |
305 |
+ if msg_list == []: |
306 |
+ return 1 |
307 |
+ writemsg_stdout("\n".join(msg_list), |
308 |
+ noiselevel=-1) |
309 |
+ return 0 |
310 |
|
311 |
# "update", "system", or just process files: |
312 |
else: |
313 |
|
314 |
diff --git a/gobs/pym/build_depgraph.py b/gobs/pym/build_depgraph.py |
315 |
index 37b9722..e0002cd 100644 |
316 |
--- a/gobs/pym/build_depgraph.py |
317 |
+++ b/gobs/pym/build_depgraph.py |
318 |
@@ -27,7 +27,7 @@ def build_mydepgraph(settings, trees, mtimedb, myopts, myparams, myaction, myfil |
319 |
repeat_times = 0 |
320 |
while repeat: |
321 |
if mydepgraph._dynamic_config._needed_p_mask_changes: |
322 |
- build_dict['type_fail'] = "Mask packages" |
323 |
+ build_dict['type_fail'] = "Mask package or dep" |
324 |
build_dict['check_fail'] = True |
325 |
elif mydepgraph._dynamic_config._needed_use_config_changes: |
326 |
mydepgraph._display_autounmask() |
327 |
|
328 |
diff --git a/gobs/pym/build_log.py b/gobs/pym/build_log.py |
329 |
index 0c170e1..38e2e25 100644 |
330 |
--- a/gobs/pym/build_log.py |
331 |
+++ b/gobs/pym/build_log.py |
332 |
@@ -11,6 +11,12 @@ from portage.util import writemsg, \ |
333 |
from portage import _encodings |
334 |
from portage import _unicode_encode |
335 |
|
336 |
+from _emerge.main import parse_opts |
337 |
+ |
338 |
+portage.proxy.lazyimport.lazyimport(globals(), |
339 |
+ 'gobs.actions:action_info', |
340 |
+) |
341 |
+ |
342 |
from gobs.repoman_gobs import gobs_repoman |
343 |
from gobs.text import get_log_text_list |
344 |
from gobs.package import gobs_package |
345 |
@@ -19,7 +25,8 @@ from gobs.flags import gobs_use_flags |
346 |
from gobs.ConnectionManager import connectionManager |
347 |
from gobs.mysql_querys import add_gobs_logs, get_config_id, get_ebuild_id_db_checksum, add_new_buildlog, \ |
348 |
update_manifest_sql, get_package_id, get_build_job_id, get_use_id, get_fail_querue_dict, \ |
349 |
- add_fail_querue_dict, update_fail_times, get_config, get_hilight_info, get error_info_list |
350 |
+ add_fail_querue_dict, update_fail_times, get_config, get_hilight_info, get_error_info_list, \ |
351 |
+ add_old_ebuild |
352 |
|
353 |
def get_build_dict_db(conn, config_id, settings, pkg): |
354 |
myportdb = portage.portdbapi(mysettings=settings) |
355 |
@@ -60,7 +67,20 @@ def get_build_dict_db(conn, config_id, settings, pkg): |
356 |
pkgdir = myportdb.getRepositoryPath(repo) + "/" + categories + "/" + package |
357 |
ebuild_version_checksum_tree = portage.checksum.sha256hash(pkgdir+ "/" + package + "-" + ebuild_version + ".ebuild")[0] |
358 |
build_dict['checksum'] = ebuild_version_checksum_tree |
359 |
- ebuild_id = get_ebuild_id_db_checksum(conn, build_dict) |
360 |
+ ebuild_id_list = get_ebuild_id_db_checksum(conn, build_dict) |
361 |
+ if ebuild_id_list is None: |
362 |
+ ebuild_id = None |
363 |
+ elif len(ebuild_id_list) >= 2: |
364 |
+ old_ebuild_id_list = [] |
365 |
+ for ebuild_id in ebuild_id_list: |
366 |
+ if ebuild_id != ebuild_id_list[0]: |
367 |
+ log_msg = "%s:%s:%s Dups of checksums" % (pkg.cpv, repo, ebuild_id,) |
368 |
+ add_gobs_logs(conn, log_msg, "error", config_id) |
369 |
+ old_ebuild_id_list.append(ebuild_id) |
370 |
+ add_old_ebuild(conn, package_id, old_ebuild_id_list) |
371 |
+ ebuild_id = ebuild_id_list[0] |
372 |
+ else: |
373 |
+ ebuild_id = ebuild_id_list[0] |
374 |
if ebuild_id is None: |
375 |
log_msg = "%s:%s Don't have any ebuild_id!" % (pkg.cpv, repo,) |
376 |
add_gobs_logs(conn, log_msg, "info", config_id) |
377 |
@@ -84,14 +104,15 @@ def get_build_dict_db(conn, config_id, settings, pkg): |
378 |
def search_buildlog(conn, logfile_text): |
379 |
log_search_list = get_hilight_info(conn) |
380 |
index = 0 |
381 |
- hilight_list = [] |
382 |
+ new_hilight_dict = {} |
383 |
for textline in logfile_text: |
384 |
index = index + 1 |
385 |
for search_pattern in log_search_list: |
386 |
if re.search(search_pattern['hilight_search'], textline): |
387 |
hilight_tmp = {} |
388 |
+ hilight_tmp['searchline'] = index |
389 |
hilight_tmp['startline'] = index - search_pattern['hilight_start'] |
390 |
- hilight_tmp['hilight'] =search_pattern ['hilight_css'] |
391 |
+ hilight_tmp['hilight'] = search_pattern ['hilight_css'] |
392 |
if search_pattern['hilight_search_end'] is None: |
393 |
hilight_tmp['endline'] = index + search_pattern['hilight_end'] |
394 |
else: |
395 |
@@ -105,47 +126,86 @@ def search_buildlog(conn, logfile_text): |
396 |
else: |
397 |
hilight_tmp['endline'] = i |
398 |
else: |
399 |
- i = i +1 |
400 |
- hilight_list.append(hilight_tmp) |
401 |
- new_hilight_dict = {} |
402 |
- for hilight_tmp in hilight_list: |
403 |
- add_new_hilight = True |
404 |
- add_new_hilight_middel = None |
405 |
- for k, v in sorted(new_hilight_dict.iteritems()): |
406 |
- if hilight_tmp['startline'] == hilight_tmp['endline']: |
407 |
- if v['endline'] == hilight_tmp['startline'] or v['startline'] == hilight_tmp['startline']: |
408 |
- add_new_hilight = False |
409 |
- if hilight_tmp['startline'] > v['startline'] and hilight_tmp['startline'] < v['endline']: |
410 |
- add_new_hilight = False |
411 |
- add_new_hilight_middel = k |
412 |
- else: |
413 |
- if v['endline'] == hilight_tmp['startline'] or v['startline'] == hilight_tmp['startline']: |
414 |
- add_new_hilight = False |
415 |
- if hilight_tmp['startline'] > v['startline'] and hilight_tmp['startline'] < v['endline']: |
416 |
- add_new_hilight = False |
417 |
- if add_new_hilight is True: |
418 |
- adict = {} |
419 |
- adict['startline'] = hilight_tmp['startline'] |
420 |
- adict['hilight'] = hilight_tmp['hilight'] |
421 |
- adict['endline'] = hilight_tmp['endline'] |
422 |
- new_hilight_dict[hilight_tmp['startline']] = adict |
423 |
- if not add_new_hilight_middel is None: |
424 |
- adict1 = {} |
425 |
- adict2 = {} |
426 |
- adict3 = {} |
427 |
- adict1['startline'] = new_hilight_dict[add_new_hilight_middel]['startline'] |
428 |
- adict1['endline'] = hilight_tmp['startline'] -1 |
429 |
- adict1['hilight'] = new_hilight_dict[add_new_hilight_middel]['hilight'] |
430 |
- adict2['startline'] = hilight_tmp['startline'] |
431 |
- adict2['hilight'] = hilight_tmp['hilight'] |
432 |
- adict2['endline'] = hilight_tmp['endline'] |
433 |
- adict3['startline'] = hilight_tmp['endline'] + 1 |
434 |
- adict3['hilight'] = new_hilight_dict[add_new_hilight_middel]['hilight'] |
435 |
- adict3['endline'] = new_hilight_dict[add_new_hilight_middel]['endline'] |
436 |
- del new_hilight_dict[add_new_hilight_middel] |
437 |
- new_hilight_dict[adict1['startline']] = adict1 |
438 |
- new_hilight_dict[adict2['startline']] = adict2 |
439 |
- new_hilight_dict[adict3['startline']] = adict3 |
440 |
+ i = i + 1 |
441 |
+ if not hilight_tmp['startline'] in new_hilight_dict: |
442 |
+ if hilight_tmp['startline'] != index and hilight_tmp['endline'] == index: |
443 |
+ i = hilight_tmp['startline'] |
444 |
+ while i == index: |
445 |
+ adict = {} |
446 |
+ adict['hilight'] = None |
447 |
+ new_hilight_dict[hilight_tmp[index]] = adict |
448 |
+ i = i + 1 |
449 |
+ adict = {} |
450 |
+ adict['hilight'] = hilight_tmp['hilight'] |
451 |
+ new_hilight_dict[hilight_tmp[index]] = adict |
452 |
+ elif hilight_tmp['startline'] == index and hilight_tmp['endline'] != index: |
453 |
+ i = index |
454 |
+ while i == hilight_tmp['endline']: |
455 |
+ adict = {} |
456 |
+ if hilight_tmp['startline'] == index: |
457 |
+ adict['hilight'] = hilight_tmp['hilight'] |
458 |
+ else: |
459 |
+ adict['hilight'] = None |
460 |
+ new_hilight_dict[hilight_tmp[i] = adict |
461 |
+ i = i + 1 |
462 |
+ elif hilight_tmp['startline'] != index and hilight_tmp['endline'] != index: |
463 |
+ i = hilight_tmp['startline'] |
464 |
+ while i == hilight_tmp['endline']: |
465 |
+ adict = {} |
466 |
+ if i == index: |
467 |
+ adict['hilight'] = hilight_tmp['hilight'] |
468 |
+ else: |
469 |
+ adict['hilight'] = None |
470 |
+ new_hilight_dict[hilight_tmp[i]] = adict |
471 |
+ i = i + 1 |
472 |
+ adict = {} |
473 |
+ adict['hilight'] = hilight_tmp['hilight'] |
474 |
+ new_hilight_dict[hilight_tmp[i] = adict |
475 |
+ else: |
476 |
+ adict = {} |
477 |
+ adict['hilight'] = hilight_tmp['hilight'] |
478 |
+ new_hilight_dict[hilight_tmp[index]] = adict |
479 |
+ else: |
480 |
+ if not index in new_hilight_dict: |
481 |
+ i = index - 1 |
482 |
+ add_new_line = True |
483 |
+ while add_new_line is True: |
484 |
+ if i in new_hilight_dict: |
485 |
+ add_new_line = False |
486 |
+ else: |
487 |
+ adict = {} |
488 |
+ adict['hilight'] = None |
489 |
+ new_hilight_dict[hilight_tmp[i]] = adict |
490 |
+ i = i - 1 |
491 |
+ adict = {} |
492 |
+ adict['hilight'] = hilight_tmp['hilight'] |
493 |
+ new_hilight_dict[hilight_tmp[index]] = adict |
494 |
+ i = index + 1 |
495 |
+ if hilight_tmp['endline'] != index: |
496 |
+ while i == hilight_tmp['endline']: |
497 |
+ adict = {} |
498 |
+ adict['hilight'] = None |
499 |
+ new_hilight_dict[hilight_tmp[i]] = adict |
500 |
+ i = i + 1 |
501 |
+ adict = {} |
502 |
+ adict['hilight'] = hilight_tmp['hilight'] |
503 |
+ new_hilight_dict[hilight_tmp[i] = adict |
504 |
+ elif index in new_hilight_dict: |
505 |
+ if new_hilight_dict[hilight_tmp[index]][['hilight'] == None: |
506 |
+ new_hilight_dict[hilight_tmp[index]][['hilight'] = hilight_tmp['hilight'] |
507 |
+ if hilight_tmp['endline'] != index: |
508 |
+ i = index + 1 |
509 |
+ while i == hilight_tmp['endline']: |
510 |
+ if not i in new_hilight_dict: |
511 |
+ adict = {} |
512 |
+ adict['hilight'] = None |
513 |
+ new_hilight_dict[hilight_tmp[i]] = adict |
514 |
+ i = i + 1 |
515 |
+ if not i in new_hilight_dict: |
516 |
+ adict = {} |
517 |
+ adict['hilight'] = hilight_tmp['hilight'] |
518 |
+ new_hilight_dict[hilight_tmp[i] = adict |
519 |
+ |
520 |
return new_hilight_dict |
521 |
|
522 |
def get_buildlog_info(conn, settings, pkg, build_dict): |
523 |
@@ -158,17 +218,17 @@ def get_buildlog_info(conn, settings, pkg, build_dict): |
524 |
qa_error_list = [] |
525 |
repoman_error_list = [] |
526 |
sum_build_log_list = [] |
527 |
- error_info_list = get error_info_list(conn) |
528 |
+ error_info_list = get_error_info_list(conn) |
529 |
for k, v in sorted(hilight_dict.iteritems()): |
530 |
if v['startline'] == v['endline']: |
531 |
error_log_list.append(logfile_text[k -1]) |
532 |
- if v['hilight'] == "qa": |
533 |
+ if v['hilight'] == "3": # qa = 3 |
534 |
qa_error_list.append(logfile_text[k -1]) |
535 |
else: |
536 |
i = k |
537 |
while i != (v['endline'] + 1): |
538 |
error_log_list.append(logfile_text[i -1]) |
539 |
- if v['hilight'] == "qa": |
540 |
+ if v['hilight'] == "3": # qa = 3 |
541 |
qa_error_list.append(logfile_text[i -1]) |
542 |
i = i +1 |
543 |
|
544 |
@@ -178,8 +238,8 @@ def get_buildlog_info(conn, settings, pkg, build_dict): |
545 |
sum_build_log_list.append("1") # repoman = 1 |
546 |
if qa_error_list != []: |
547 |
sum_build_log_list.append("2") # qa = 2 |
548 |
- for sum_log_line in sum_build_log_list |
549 |
- if re.search(^ \\* ERROR: , sum_log_line): |
550 |
+ for sum_log_line in sum_build_log_list: |
551 |
+ if re.search("ERROR: ", sum_log_line): |
552 |
for error_info in error_info_list: |
553 |
if re.search(error_info['error_search'], sum_log_line): |
554 |
sum_build_log_list.append(error_info['error_id']) |
555 |
@@ -202,28 +262,28 @@ def write_msg_file(msg, log_path): |
556 |
if log_path is not None: |
557 |
try: |
558 |
f = open(_unicode_encode(log_path, |
559 |
- encoding=_encodings['fs'], errors='strict'), |
560 |
- mode='ab') |
561 |
+ encoding=_encodings['fs'], errors='strict'), |
562 |
+ mode='ab') |
563 |
f_real = f |
564 |
except IOError as e: |
565 |
if e.errno not in (errno.ENOENT, errno.ESTALE): |
566 |
raise |
567 |
if not msg_shown: |
568 |
writemsg_level(msg, level=level, noiselevel=noiselevel) |
569 |
- else: |
570 |
- if log_path.endswith('.gz'): |
571 |
- # NOTE: The empty filename argument prevents us from |
572 |
- # triggering a bug in python3 which causes GzipFile |
573 |
- # to raise AttributeError if fileobj.name is bytes |
574 |
- # instead of unicode. |
575 |
- f = gzip.GzipFile(filename='', mode='ab', fileobj=f) |
576 |
+ else: |
577 |
+ if log_path.endswith('.gz'): |
578 |
+ # NOTE: The empty filename argument prevents us from |
579 |
+ # triggering a bug in python3 which causes GzipFile |
580 |
+ # to raise AttributeError if fileobj.name is bytes |
581 |
+ # instead of unicode. |
582 |
+ f = gzip.GzipFile(filename='', mode='ab', fileobj=f) |
583 |
|
584 |
- f.write(_unicode_encode(msg)) |
585 |
- f.close() |
586 |
- if f_real is not f: |
587 |
- f_real.close() |
588 |
+ f.write(_unicode_encode(msg)) |
589 |
+ f.close() |
590 |
+ if f_real is not f: |
591 |
+ f_real.close() |
592 |
|
593 |
-def add_buildlog_main(settings, pkg): |
594 |
+def add_buildlog_main(settings, pkg, trees): |
595 |
CM = connectionManager() |
596 |
conn = CM.newConnection() |
597 |
if not conn.is_connected() is True: |
598 |
@@ -257,17 +317,25 @@ def add_buildlog_main(settings, pkg): |
599 |
add_gobs_logs(conn, log_msg, "info", config_id) |
600 |
log_id = add_new_buildlog(conn, build_dict, build_log_dict) |
601 |
|
602 |
- msg = "" |
603 |
- # emerge_info_logfilename = settings.get("PORTAGE_LOG_FILE")[:-3] + "emerge_log.log" |
604 |
if log_id is None: |
605 |
- # os.chmod(settings.get("PORTAGE_LOG_FILE"), 0o664) |
606 |
log_msg = "Package %s:%s is NOT logged." % (pkg.cpv, pkg.repo,) |
607 |
add_gobs_logs(conn, log_msg, "info", config_id) |
608 |
else: |
609 |
- # for msg_line in msg: |
610 |
- # write_msg_file(msg_line, emerge_info_logfilename) |
611 |
+ args = [] |
612 |
+ args.append("--info") |
613 |
+ args.append("=%s" % pkg.cpv) |
614 |
+ myaction, myopts, myfiles = parse_opts(args, silent=True) |
615 |
+ emerge_info_list = action_info(settings, trees, myopts, myfiles) |
616 |
+ emerge_info = "" |
617 |
+ e_info_hash = hashlib.sha256() |
618 |
+ for e_info in emerge_info_list: |
619 |
+ emerge_info = emerge_info + e_info |
620 |
+ e_info_hash.update(e_info) |
621 |
+ e_info_id = add_e_info(conn, log_id, emerge_info, e_info_hash) |
622 |
+ if not e_info_id is None: |
623 |
+ log_msg = "New Emerge --info is logged." |
624 |
+ add_gobs_logs(conn, log_msg, "info", config_id) |
625 |
os.chmod(settings.get("PORTAGE_LOG_FILE"), 0o664) |
626 |
- # os.chmod(emerge_info_logfilename, 0o664) |
627 |
log_msg = "Package: %s:%s is logged." % (pkg.cpv, pkg.repo,) |
628 |
add_gobs_logs(conn, log_msg, "info", config_id) |
629 |
print("\n>>> Logging %s:%s\n" % (pkg.cpv, pkg.repo,)) |
630 |
@@ -286,7 +354,7 @@ def log_fail_queru(conn, build_dict, settings): |
631 |
print('fail_querue_dict', fail_querue_dict) |
632 |
add_fail_querue_dict(conn, fail_querue_dict) |
633 |
else: |
634 |
- if fail_querue_dict['fail_times'] < 6: |
635 |
+ if fail_querue_dict['fail_times'] < 3: |
636 |
fail_querue_dict['fail_times'] = fail_querue_dict['fail_times']+ 1 |
637 |
fail_querue_dict['build_job_id'] = build_dict['build_job_id'] |
638 |
fail_querue_dict['fail_type'] = build_dict['type_fail'] |
639 |
@@ -298,7 +366,7 @@ def log_fail_queru(conn, build_dict, settings): |
640 |
qa_error_list = [] |
641 |
repoman_error_list = [] |
642 |
sum_build_log_list = [] |
643 |
- sum_build_log_list.append("True") |
644 |
+ sum_build_log_list.append("2") |
645 |
error_log_list.append(build_dict['type_fail']) |
646 |
build_log_dict['repoman_error_list'] = repoman_error_list |
647 |
build_log_dict['qa_error_list'] = qa_error_list |
648 |
|
649 |
diff --git a/gobs/pym/manifest.py b/gobs/pym/manifest.py |
650 |
index 20a4aeb..91a7c51 100644 |
651 |
--- a/gobs/pym/manifest.py |
652 |
+++ b/gobs/pym/manifest.py |
653 |
@@ -115,7 +115,8 @@ class gobs_manifest(object): |
654 |
except: |
655 |
self._mysettings.lock() |
656 |
return "Can't fetch the file." |
657 |
- self._mysettings.lock() |
658 |
+ finally: |
659 |
+ self._mysettings.lock() |
660 |
try: |
661 |
my_manifest.checkCpvHashes(cpv, checkDistfiles=True, onlyDistfiles=False, checkMiscfiles=True) |
662 |
except: |
663 |
@@ -124,4 +125,4 @@ class gobs_manifest(object): |
664 |
portdb.fetch_check(cpv, useflags=build_use_flags_list, mysettings=self._mysettings, all=False) |
665 |
except: |
666 |
return "Fetch check failed." |
667 |
- return None |
668 |
\ No newline at end of file |
669 |
+ return |
670 |
\ No newline at end of file |
671 |
|
672 |
diff --git a/gobs/pym/mysql_querys.py b/gobs/pym/mysql_querys.py |
673 |
index dfd423a..b5c2953 100644 |
674 |
--- a/gobs/pym/mysql_querys.py |
675 |
+++ b/gobs/pym/mysql_querys.py |
676 |
@@ -156,6 +156,31 @@ def add_new_manifest_sql(connection, cp, repo): |
677 |
cursor.close() |
678 |
return package_id |
679 |
|
680 |
+def get_package_metadata_sql(connection, package_id): |
681 |
+ cursor = connection.cursor() |
682 |
+ sqlQ ='SELECT checksum FROM packages_metadata WHERE package_id = %s' |
683 |
+ cursor.execute(sqlQ, (package_id,)) |
684 |
+ entries = cursor.fetchone() |
685 |
+ cursor.close() |
686 |
+ if not entries is None: |
687 |
+ return entries[0] |
688 |
+ return None |
689 |
+ |
690 |
+def update_package_metadata((connection, package_metadataDict): |
691 |
+ cursor = connection.cursor() |
692 |
+ sqlQ1 ='SELECT package_id FROM packages_metadata WHERE package_id = %s' |
693 |
+ sqlQ2 = 'UPDATE packages_metadata SET checksum = %s, email = %s, active = %s WHERE package_id = %s' |
694 |
+ sqlQ3 = 'INSERT INTO packages_metadata (checksum, email, package_id) VALUES ( %s, %s, %s )' |
695 |
+ for k, v in package_metadataDict.iteritems(): |
696 |
+ cursor.execute(sqlQ1, (k,)) |
697 |
+ entries = cursor.fetchone() |
698 |
+ if not entries is None: |
699 |
+ cursor.execute(sqlQ2, (v['metadata_xml_checksum'], v['metadata_xml_email'][0], k,)) |
700 |
+ else: |
701 |
+ cursor.execute(sqlQ3, (v['metadata_xml_checksum'], v['metadata_xml_email'][0], k,)) |
702 |
+ connection.commit() |
703 |
+ cursor.close() |
704 |
+ |
705 |
def get_restriction_id(connection, restriction): |
706 |
cursor = connection.cursor() |
707 |
sqlQ ='SELECT restriction_id FROM restrictions WHERE restriction = %s' |
708 |
@@ -499,10 +524,14 @@ def get_ebuild_id_db_checksum(connection, build_dict): |
709 |
cursor = connection.cursor() |
710 |
sqlQ = "SELECT ebuild_id FROM ebuilds WHERE version = %s AND checksum = %s AND package_id = %s AND active = 'True'" |
711 |
cursor.execute(sqlQ, (build_dict['ebuild_version'], build_dict['checksum'], build_dict['package_id'])) |
712 |
- ebuild_id = cursor.fetchone() |
713 |
+ ebuild_id_list = [] |
714 |
+ entries = cursor.fetchall() |
715 |
cursor.close() |
716 |
- if not ebuild_id is None: |
717 |
- return ebuild_id[0] |
718 |
+ if entries != []: |
719 |
+ for ebuild_id in entries: |
720 |
+ ebuild_id_list.append(ebuild_id[0]) |
721 |
+ return ebuild_id_list |
722 |
+ |
723 |
|
724 |
def get_build_job_id(connection, build_dict): |
725 |
cursor = connection.cursor() |
726 |
@@ -547,9 +576,9 @@ def get_hilight_info(connection): |
727 |
hilight.append(aadict) |
728 |
return hilight |
729 |
|
730 |
-def get error_info_list(connection): |
731 |
+def get_error_info_list(connection): |
732 |
cursor = connection.cursor() |
733 |
- sqlQ = 'SELECT error_id, error_name, error_search FROM error' |
734 |
+ sqlQ = 'SELECT error_id, error_name, error_search FROM errors_info' |
735 |
cursor.execute(sqlQ) |
736 |
entries = cursor.fetchall() |
737 |
cursor.close() |
738 |
@@ -650,3 +679,25 @@ def add_new_buildlog(connection, build_dict, build_log_dict): |
739 |
del_old_build_jobs(connection, build_dict['build_job_id']) |
740 |
cursor.close() |
741 |
return build_log_id |
742 |
+ |
743 |
+def add_e_info(connection, log_id, emerge_info, e_info_hash): |
744 |
+ cursor = connection.cursor() |
745 |
+ sqlQ1 = 'SELECT einfo_id FROM emerge_info WHERE checksum = %s' |
746 |
+ sqlQ2 = 'UPDATE build_logs SET einfo_id = %s WHERE build_log_id = %s' |
747 |
+ sqlQ3 ='INSERT INTO emerge_info (checksum, emerge_info_text) VALUES (%s, %s)' |
748 |
+ sqlQ4 = 'SELECT LAST_INSERT_ID()' |
749 |
+ cursor.execute(sqlQ1, (e_info_hash,)) |
750 |
+ entries = cursor.fetchall() |
751 |
+ if entries != []: |
752 |
+ cursor.execute(sqlQ2, (entries[0],)) |
753 |
+ connection.commit() |
754 |
+ cursor.close() |
755 |
+ return None |
756 |
+ cursor.execute(sqlQ3, (e_info_hash, emerge_info,)) |
757 |
+ cursor.execute(sqlQ4) |
758 |
+ entries = cursor.fetchall() |
759 |
+ cursor.execute(sqlQ2, (entries[0],)) |
760 |
+ connection.commit() |
761 |
+ cursor.close() |
762 |
+ return entries[0] |
763 |
+ |
764 |
|
765 |
diff --git a/gobs/pym/package.py b/gobs/pym/package.py |
766 |
index 7c37f7c..12f1afa 100644 |
767 |
--- a/gobs/pym/package.py |
768 |
+++ b/gobs/pym/package.py |
769 |
@@ -1,5 +1,6 @@ |
770 |
from __future__ import print_function |
771 |
import portage |
772 |
+from portage.xml.metadata import MetaDataXML |
773 |
from gobs.flags import gobs_use_flags |
774 |
from gobs.manifest import gobs_manifest |
775 |
from gobs.text import get_ebuild_cvs_revision |
776 |
@@ -153,22 +154,27 @@ class gobs_package(object): |
777 |
add_gobs_logs(self._conn, log_msg, "info", self._config_id) |
778 |
i = i +1 |
779 |
|
780 |
- def get_package_metadataDict(self, pkgdir, package): |
781 |
+ def get_package_metadataDict(self, pkgdir, package_id): |
782 |
# Make package_metadataDict |
783 |
attDict = {} |
784 |
package_metadataDict = {} |
785 |
- changelog_checksum_tree = portage.checksum.sha256hash(pkgdir + "/ChangeLog") |
786 |
- changelog_text_tree = get_file_text(pkgdir + "/ChangeLog") |
787 |
- metadata_xml_checksum_tree = portage.checksum.sha256hash(pkgdir + "/metadata.xml") |
788 |
- metadata_xml_text_tree = get_file_text(pkgdir + "/metadata.xml") |
789 |
- attDict['changelog_checksum'] = changelog_checksum_tree[0] |
790 |
- attDict['changelog_text'] = changelog_text_tree |
791 |
- attDict['metadata_xml_checksum'] = metadata_xml_checksum_tree[0] |
792 |
- attDict['metadata_xml_text'] = metadata_xml_text_tree |
793 |
- package_metadataDict[package] = attDict |
794 |
+ # changelog_checksum_tree = portage.checksum.sha256hash(pkgdir + "/ChangeLog") |
795 |
+ # changelog_text_tree = get_file_text(pkgdir + "/ChangeLog") |
796 |
+ pkg_md = MetaDataXMLpkgdir + "/metadata.xml") |
797 |
+ #metadata_xml_text_tree = get_file_text(pkgdir + "/metadata.xml") |
798 |
+ # attDict['changelog_checksum'] = changelog_checksum_tree[0] |
799 |
+ # attDict['changelog_text'] = changelog_text_tree |
800 |
+ # attDict['metadata_xml_herds'] = pkg_md.herds() |
801 |
+ md_email_list = [] |
802 |
+ for maint in pkg_md.maintainers(): |
803 |
+ md_email_list.append(maint.email) |
804 |
+ attDict['metadata_xml_email'] = md_email_list |
805 |
+ attDict['metadata_xml_checksum'] = portage.checksum.sha256hash(pkgdir + "/metadata.xml")[0] |
806 |
+ #attDict['metadata_xml_text'] = metadata_xml_text_tree |
807 |
+ package_metadataDict[package_id] = attDict |
808 |
return package_metadataDict |
809 |
|
810 |
- def add_package(self, packageDict, package_id, new_ebuild_id_list, old_ebuild_id_list, manifest_checksum_tree): |
811 |
+ def add_package(self, packageDict, package_metadataDict, package_id, new_ebuild_id_list, old_ebuild_id_list, manifest_checksum_tree): |
812 |
# Use packageDict to update the db |
813 |
ebuild_id_list = add_new_ebuild_sql(self._conn, package_id, packageDict) |
814 |
|
815 |
@@ -182,6 +188,10 @@ class gobs_package(object): |
816 |
if not old_ebuild_id_list == []: |
817 |
add_old_ebuild(self._conn, package_id, old_ebuild_id_list) |
818 |
|
819 |
+ package_metadata_checksum_sql = get_package_metadata_sql(self._conn, package_id) |
820 |
+ if package_metadata_checksum_sql is None or package_metadata_checksum_sql != package_metadataDict['package_id']['metadata_xml_checksum']: |
821 |
+ update_package_metadata(self._conn, package_metadataDict) |
822 |
+ |
823 |
# update the cp manifest checksum |
824 |
update_manifest_sql(self._conn, package_id, manifest_checksum_tree) |
825 |
|
826 |
@@ -215,7 +225,8 @@ class gobs_package(object): |
827 |
add_gobs_logs(self._conn, log_msg, "info", self._config_id) |
828 |
return |
829 |
package_id = add_new_manifest_sql(self._conn, cp, repo) |
830 |
- |
831 |
+ |
832 |
+ package_metadataDict = get_package_metadataDict(self, pkgdir, package_id) |
833 |
# Get the ebuild list for cp |
834 |
mytree = [] |
835 |
mytree.append(repodir) |
836 |
@@ -234,7 +245,7 @@ class gobs_package(object): |
837 |
for cpv in sorted(ebuild_list_tree): |
838 |
packageDict[cpv] = self.get_packageDict(pkgdir, cpv, repo) |
839 |
|
840 |
- self.add_package(packageDict, package_id, new_ebuild_id_list, old_ebuild_id_list, manifest_checksum_tree) |
841 |
+ self.add_package(packageDict, package_metadataDict, package_id, new_ebuild_id_list, old_ebuild_id_list, manifest_checksum_tree) |
842 |
log_msg = "C %s:%s ... Done." % (cp, repo) |
843 |
add_gobs_logs(self._conn, log_msg, "info", self._config_id) |
844 |
|
845 |
@@ -318,8 +329,8 @@ class gobs_package(object): |
846 |
# Remove cpv from packageDict and add ebuild to new ebuils list |
847 |
del packageDict[cpv] |
848 |
new_ebuild_id_list.append(get_ebuild_id_db(self._conn, ebuild_version_checksum_tree, package_id)[0]) |
849 |
- |
850 |
- self.add_package(packageDict, package_id, new_ebuild_id_list, old_ebuild_id_list, manifest_checksum_tree) |
851 |
+ package_metadataDict = get_package_metadataDict(self, pkgdir, package_id) |
852 |
+ self.add_package(packageDict, package_metadataDict, package_id, new_ebuild_id_list, old_ebuild_id_list, manifest_checksum_tree) |
853 |
|
854 |
log_msg = "C %s:%s ... Done." % (cp, repo) |
855 |
add_gobs_logs(self._conn, log_msg, "info", self._config_id) |