1 |
#!/usr/bin/python |
2 |
|
3 |
# A lot of the code comes from ftpadmin, see |
4 |
# http://git.gnome.org/browse/sysadmin-bin/tree/ftpadmin |
5 |
# Written by Olav Vitters |
6 |
|
7 |
# basic modules: |
8 |
import os |
9 |
import os.path |
10 |
import sys |
11 |
import re |
12 |
import subprocess |
13 |
|
14 |
# command line parsing, error handling: |
15 |
import argparse |
16 |
import errno |
17 |
|
18 |
# overwriting files by moving them (safer): |
19 |
import tempfile |
20 |
import shutil |
21 |
|
22 |
# version comparison: |
23 |
import rpm |
24 |
|
25 |
# opening tarballs: |
26 |
import tarfile |
27 |
import gzip |
28 |
import bz2 |
29 |
import lzma # pyliblzma |
30 |
|
31 |
# getting links from HTML document: |
32 |
from sgmllib import SGMLParser |
33 |
import urllib2 |
34 |
import urlparse |
35 |
|
36 |
MEDIA="Core Release Source" |
37 |
URL="http://download.gnome.org/sources/" |
38 |
PKGROOT='~/pkgs' |
39 |
|
40 |
re_version = re.compile(r'([-.]|\d+|[^-.\d]+)') |
41 |
|
42 |
def version_cmp(a, b): |
43 |
"""Compares two versions |
44 |
|
45 |
Returns |
46 |
-1 if a < b |
47 |
0 if a == b |
48 |
1 if a > b |
49 |
""" |
50 |
|
51 |
return rpm.labelCompare(('1', a, '1'), ('1', b, '1')) |
52 |
|
53 |
def get_latest_version(versions, max_version=None): |
54 |
"""Gets the latest version number |
55 |
|
56 |
if max_version is specified, gets the latest version number before |
57 |
max_version""" |
58 |
latest = None |
59 |
for version in versions: |
60 |
if ( latest is None or version_cmp(version, latest) > 0 ) \ |
61 |
and ( max_version is None or version_cmp(version, max_version) < 0 ): |
62 |
latest = version |
63 |
return latest |
64 |
|
65 |
def line_input (file): |
66 |
for line in file: |
67 |
if line[-1] == '\n': |
68 |
yield line[:-1] |
69 |
else: |
70 |
yield line |
71 |
|
72 |
def call_editor(filename): |
73 |
"""Return a sequence of possible editor binaries for the current platform""" |
74 |
|
75 |
editors = [] |
76 |
|
77 |
for varname in 'VISUAL', 'EDITOR': |
78 |
if varname in os.environ: |
79 |
editors.append(os.environ[varname]) |
80 |
|
81 |
editors.extend(('/usr/bin/editor', 'vi', 'pico', 'nano', 'joe')) |
82 |
|
83 |
for editor in editors: |
84 |
try: |
85 |
ret = subprocess.call([editor, filename]) |
86 |
except OSError, e: |
87 |
if e.errno == 2: |
88 |
continue |
89 |
raise |
90 |
|
91 |
if ret == 127: |
92 |
continue |
93 |
|
94 |
return True |
95 |
|
96 |
class urllister(SGMLParser): |
97 |
def reset(self): |
98 |
SGMLParser.reset(self) |
99 |
self.urls = [] |
100 |
|
101 |
def start_a(self, attrs): |
102 |
href = [v for k, v in attrs if k=='href'] |
103 |
if href: |
104 |
self.urls.extend(href) |
105 |
|
106 |
class XzTarFile(tarfile.TarFile): |
107 |
|
108 |
OPEN_METH = tarfile.TarFile.OPEN_METH.copy() |
109 |
OPEN_METH["xz"] = "xzopen" |
110 |
|
111 |
@classmethod |
112 |
def xzopen(cls, name, mode="r", fileobj=None, **kwargs): |
113 |
"""Open gzip compressed tar archive name for reading or writing. |
114 |
Appending is not allowed. |
115 |
""" |
116 |
if len(mode) > 1 or mode not in "rw": |
117 |
raise ValueError("mode must be 'r' or 'w'") |
118 |
|
119 |
if fileobj is not None: |
120 |
fileobj = _LMZAProxy(fileobj, mode) |
121 |
else: |
122 |
fileobj = lzma.LZMAFile(name, mode) |
123 |
|
124 |
try: |
125 |
# lzma doesn't immediately return an error |
126 |
# try and read a bit of data to determine if it is a valid xz file |
127 |
fileobj.read(_LZMAProxy.blocksize) |
128 |
fileobj.seek(0) |
129 |
t = cls.taropen(name, mode, fileobj, **kwargs) |
130 |
except IOError: |
131 |
raise tarfile.ReadError("not a xz file") |
132 |
except lzma.error: |
133 |
raise tarfile.ReadError("not a xz file") |
134 |
t._extfileobj = False |
135 |
return t |
136 |
|
137 |
if not hasattr(tarfile.TarFile, 'xzopen'): |
138 |
tarfile.open = XzTarFile.open |
139 |
|
140 |
class SpecFile(object): |
141 |
re_update_version = re.compile(r'^(?P<pre>Version:\s*)(?P<version>.+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE) |
142 |
re_update_release = re.compile(r'^(?P<pre>Release:\s*)(?P<release>%mkrel \d+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE) |
143 |
|
144 |
def __init__(self, path): |
145 |
self.path = path |
146 |
self.cwd = os.path.dirname(path) |
147 |
|
148 |
@property |
149 |
def version(self): |
150 |
return subprocess.check_output(["rpm", "--specfile", self.path, "--queryformat", "%{VERSION}\n"]).splitlines()[0] |
151 |
|
152 |
def update(self, version): |
153 |
"""Update specfile (increase version)""" |
154 |
cur_version = self.version |
155 |
|
156 |
compare = version_cmp(version, cur_version) |
157 |
|
158 |
if compare == 0: |
159 |
print >>sys.stderr, "ERROR: Already at version %s!" % (cur_version) |
160 |
return False |
161 |
|
162 |
if compare != 1: |
163 |
print >>sys.stderr, "ERROR: Version %s is older than current version %s!" % (version, cur_version) |
164 |
return False |
165 |
|
166 |
# XXX - os.path.join is hackish |
167 |
if subprocess.check_output(["svn", "diff", os.path.join(self.path, '..')]) != '': |
168 |
print >>sys.stderr, "ERROR: Package has uncommitted changes!" |
169 |
return False |
170 |
|
171 |
with open(self.path, "rw") as f: |
172 |
data = f.read() |
173 |
|
174 |
if data.count("%mkrel") != 1: |
175 |
print >>sys.stderr, "ERROR: Multiple %mkrel found; don't know what to do!" |
176 |
return False |
177 |
|
178 |
data, nr = self.re_update_version.subn(r'\g<pre>%s\g<post>' % version, data, 1) |
179 |
if nr != 1: |
180 |
print >>sys.stderr, "ERROR: Could not increase version!" |
181 |
return False |
182 |
|
183 |
data, nr = self.re_update_release.subn(r'\g<pre>%mkrel 1\g<post>', data, 1) |
184 |
if nr != 1: |
185 |
print >>sys.stderr, "ERROR: Could not reset release!" |
186 |
return False |
187 |
|
188 |
# Overwrite file with new version number |
189 |
write_file(self.path, data) |
190 |
|
191 |
|
192 |
# Verify that RPM also agrees that version number has changed |
193 |
if self.version != version: |
194 |
print "ERROR: Increased version to %s, but RPM doesn't agree!?!" % version |
195 |
return False |
196 |
|
197 |
try: |
198 |
# Download new tarball |
199 |
subprocess.check_call(['mgarepo', 'sync', '-d'], cwd=self.cwd) |
200 |
# Check patches still apply |
201 |
subprocess.check_call(['bm', '-p', '--nodeps'], cwd=self.cwd) |
202 |
except subprocess.CalledProcessError: |
203 |
return False |
204 |
|
205 |
return True |
206 |
|
207 |
class Patch(object): |
208 |
"""Do things with patches""" |
209 |
|
210 |
re_dep3 = re.compile(r'^(?:#\s*)?(?P<header>[-A-Za-z0-9]+?):\s*(?P<data>.*)$') |
211 |
re_dep3_cont = re.compile(r'^#?\s+(?P<data>.*)$') |
212 |
|
213 |
def __init__(self, path, show_path=False): |
214 |
"""Path: path to patch (might not exist)""" |
215 |
self.path = path |
216 |
self.show_path = show_path |
217 |
|
218 |
def __str__(self): |
219 |
return self.path if self.show_path else os.path.basename(self.path) |
220 |
|
221 |
def add_dep3(self): |
222 |
"""Add DEP-3 headers to a patch file""" |
223 |
if self.dep3['valid']: |
224 |
return False |
225 |
|
226 |
new_headers = ( |
227 |
('Author', self.svn_author), |
228 |
('Subject', ''), |
229 |
('Applied-Upstream', ''), |
230 |
('Forwarded', ''), |
231 |
('Bug', ''), |
232 |
) |
233 |
|
234 |
with tempfile.NamedTemporaryFile(dir=os.path.dirname(self.path), delete=False) as fdst: |
235 |
with open(self.path, "r") as fsrc: |
236 |
# Start with any existing DEP3 headers |
237 |
for i in range(self.dep3['last_nr']): |
238 |
fdst.write(fsrc.read()) |
239 |
|
240 |
# After that add the DEP3 headers |
241 |
add_line = False |
242 |
for header, data in new_headers: |
243 |
if header in self.dep3['headers']: |
244 |
continue |
245 |
|
246 |
# XXX - wrap this at 80 chars |
247 |
add_line = True |
248 |
print >>fdst, "%s: %s" % (header, "" if data is None else data) |
249 |
|
250 |
if add_line: print >>fdst, "" |
251 |
# Now copy any other data and the patch |
252 |
shutil.copyfileobj(fsrc, fdst) |
253 |
|
254 |
fdst.flush() |
255 |
os.rename(fdst.name, self.path) |
256 |
|
257 |
call_editor(self.path) |
258 |
|
259 |
#Author: fwang |
260 |
#Subject: Build fix: Fix glib header inclusion |
261 |
#Applied-Upstream: commit:30602 |
262 |
#Forwarded: yes |
263 |
#Bug: http://bugzilla.abisource.com/show_bug.cgi?id=13247 |
264 |
|
265 |
def _read_dep3(self): |
266 |
"""Read DEP-3 headers from an existing patch file |
267 |
|
268 |
This will also parse git headers""" |
269 |
dep3 = {} |
270 |
headers = {} |
271 |
|
272 |
last_header = None |
273 |
last_nr = 0 |
274 |
nr = 0 |
275 |
try: |
276 |
with open(self.path, "r") as f: |
277 |
for line in line_input(f): |
278 |
nr += 1 |
279 |
# stop trying to parse when real patch begins |
280 |
if line == '---': |
281 |
break |
282 |
|
283 |
r = self.re_dep3.match(line) |
284 |
if r: |
285 |
info = r.groupdict() |
286 |
|
287 |
# Avoid matching URLS |
288 |
if info['data'].startswith('//') and info['header'].lower () == info['header']: |
289 |
continue |
290 |
|
291 |
headers[info['header']] = info['data'] |
292 |
last_header = info['header'] |
293 |
last_nr = nr |
294 |
continue |
295 |
|
296 |
r = self.re_dep3_cont.match(line) |
297 |
if r: |
298 |
info = r.groupdict() |
299 |
if last_header: |
300 |
headers[last_header] = " ".join((headers[last_header], info['data'])) |
301 |
last_nr = nr |
302 |
continue |
303 |
|
304 |
last_header = None |
305 |
except IOError: |
306 |
pass |
307 |
|
308 |
dep3['valid'] = \ |
309 |
(('Description' in headers and headers['Description'].strip() != '') |
310 |
or ('Subject' in headers and headers['Subject'].strip() != '')) \ |
311 |
and (('Origin' in headers and headers['Origin'].strip() != '') \ |
312 |
or ('Author' in headers and headers['Author'].strip() != '') \ |
313 |
or ('From' in headers and headers['From'].strip() != '')) |
314 |
dep3['last_nr'] = last_nr |
315 |
dep3['headers'] = headers |
316 |
|
317 |
self._dep3 = dep3 |
318 |
|
319 |
@property |
320 |
def dep3(self): |
321 |
if not hasattr(self, '_dep3'): |
322 |
self._read_dep3() |
323 |
|
324 |
return self._dep3 |
325 |
|
326 |
@property |
327 |
def svn_author(self): |
328 |
if not hasattr(self, '_svn_author'): |
329 |
try: |
330 |
contents = subprocess.check_output(['svn', 'log', '-q', "--", self.path], close_fds=True).strip("\n").splitlines() |
331 |
|
332 |
for line in contents: |
333 |
if ' | ' not in line: |
334 |
continue |
335 |
|
336 |
fields = line.split(' | ') |
337 |
if len(fields) >= 3: |
338 |
self._svn_author = fields[1] |
339 |
except subprocess.CalledProcessError: |
340 |
pass |
341 |
|
342 |
if not hasattr(self, '_svn_author'): |
343 |
return None |
344 |
|
345 |
return self._svn_author |
346 |
|
347 |
def get_upstream_names(): |
348 |
urlopen = urllib2.build_opener() |
349 |
|
350 |
good_dir = re.compile('^[-A-Za-z0-9_+.]+/$') |
351 |
|
352 |
# Get the files |
353 |
usock = urlopen.open(URL) |
354 |
parser = urllister() |
355 |
parser.feed(usock.read()) |
356 |
usock.close() |
357 |
parser.close() |
358 |
files = parser.urls |
359 |
|
360 |
tarballs = set([filename.replace('/', '') for filename in files if good_dir.search(filename)]) |
361 |
|
362 |
return tarballs |
363 |
|
364 |
def get_downstream_names(): |
365 |
re_file = re.compile(r'^(?P<module>.*?)[_-](?:(?P<oldversion>([0-9]+[\.])*[0-9]+)-)?(?P<version>([0-9]+[\.\-])*[0-9]+)\.(?P<format>(?:tar\.|diff\.)?[a-z][a-z0-9]*)$') |
366 |
|
367 |
contents = subprocess.check_output(['urpmf', '--files', '.', "--media", MEDIA], close_fds=True).strip("\n").splitlines() |
368 |
|
369 |
FILES = {} |
370 |
TARBALLS = {} |
371 |
|
372 |
for line in contents: |
373 |
try: |
374 |
srpm, filename = line.split(":") |
375 |
except ValueError: |
376 |
print >>sys.stderr, line |
377 |
continue |
378 |
|
379 |
if '.tar' in filename: |
380 |
r = re_file.match(filename) |
381 |
if r: |
382 |
fileinfo = r.groupdict() |
383 |
module = fileinfo['module'] |
384 |
|
385 |
if module not in TARBALLS: |
386 |
TARBALLS[module] = set() |
387 |
TARBALLS[module].add(srpm) |
388 |
|
389 |
if srpm not in FILES: |
390 |
FILES[srpm] = set() |
391 |
FILES[srpm].add(filename) |
392 |
|
393 |
return TARBALLS, FILES |
394 |
|
395 |
|
396 |
def write_file(path, data): |
397 |
with tempfile.NamedTemporaryFile(dir=os.path.dirname(path), delete=False) as fdst: |
398 |
fdst.write(data) |
399 |
fdst.flush() |
400 |
os.rename(fdst.name, path) |
401 |
|
402 |
def cmd_co(options, parser): |
403 |
upstream = get_upstream_names() |
404 |
downstream, downstream_files = get_downstream_names() |
405 |
|
406 |
cwd = os.path.expanduser(PKGROOT) |
407 |
|
408 |
matches = upstream & set(downstream.keys()) |
409 |
for module in matches: |
410 |
print module, "\t".join(downstream[module]) |
411 |
for package in downstream[module]: |
412 |
subprocess.call(['mgarepo', 'co', package], cwd=cwd) |
413 |
|
414 |
def cmd_ls(options, parser): |
415 |
upstream = get_upstream_names() |
416 |
downstream, downstream_files = get_downstream_names() |
417 |
|
418 |
matches = upstream & set(downstream.keys()) |
419 |
for module in matches: |
420 |
print "\n".join(sorted(downstream[module])) |
421 |
|
422 |
def cmd_patches(options, parser): |
423 |
upstream = get_upstream_names() |
424 |
downstream, downstream_files = get_downstream_names() |
425 |
|
426 |
path = os.path.expanduser(PKGROOT) |
427 |
|
428 |
import pprint |
429 |
|
430 |
matches = upstream & set(downstream.keys()) |
431 |
for module in sorted(matches): |
432 |
for srpm in downstream[module]: |
433 |
for filename in downstream_files[srpm]: |
434 |
if '.patch' in filename or '.diff' in filename: |
435 |
|
436 |
p = Patch(os.path.join(path, srpm, "SOURCES", filename), show_path=options.path) |
437 |
valid = "" |
438 |
forwarded = "" |
439 |
if p.dep3['headers']: |
440 |
forwarded = p.dep3['headers'].get('Forwarded', "no") |
441 |
if p.dep3['valid']: |
442 |
valid="VALID" |
443 |
print "\t".join((module, srpm, str(p), forwarded, valid)) |
444 |
|
445 |
def cmd_dep3(options, parser): |
446 |
p = Patch(options.patch) |
447 |
p.add_dep3() |
448 |
|
449 |
def cmd_package_new_version(options, parser): |
450 |
# Determine the package name |
451 |
if options.upstream: |
452 |
downstream, downstream_files = get_downstream_names() |
453 |
|
454 |
if options.package not in downstream: |
455 |
print >>sys.stderr, "ERROR: No packages for upstream name: %s" % options.package |
456 |
sys.exit(1) |
457 |
|
458 |
if len(downstream[options.package]) != 1: |
459 |
# XXX - Make it more intelligent |
460 |
print >>sys.stderr, "ERROR: Multiple packages found for %s: %s" % (options.package, ", ".join(downstream[options.package])) |
461 |
sys.exit(1) |
462 |
|
463 |
package = list(downstream[options.package])[0] |
464 |
else: |
465 |
package = options.package |
466 |
|
467 |
# Directories packages are located in |
468 |
root = os.path.expanduser(PKGROOT) |
469 |
cwd = os.path.join(root, package) |
470 |
|
471 |
# Checkout package to ensure the checkout reflects the latest changes |
472 |
try: |
473 |
subprocess.check_call(['mgarepo', 'co', package], cwd=root) |
474 |
except subprocess.CalledProcessError: |
475 |
sys.exit(1) |
476 |
|
477 |
# SpecFile class handles the actual version+release change |
478 |
s = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)) |
479 |
print "%s => %s" % (s.version, options.version) |
480 |
if not s.update(options.version): |
481 |
sys.exit(1) |
482 |
|
483 |
# We can even checkin and submit :-) |
484 |
if options.submit: |
485 |
try: |
486 |
# checkin changes |
487 |
subprocess.check_call(['mgarepo', 'ci', '-m', 'new version %s' % options.version], cwd=cwd) |
488 |
# and submit |
489 |
subprocess.check_call(['mgarepo', 'submit'], cwd=cwd) |
490 |
except subprocess.CalledProcessError: |
491 |
sys.exit(1) |
492 |
|
493 |
|
494 |
def main(): |
495 |
description = """Mageia GNOME commands.""" |
496 |
epilog="""Report bugs to Olav Vitters""" |
497 |
parser = argparse.ArgumentParser(description=description,epilog=epilog) |
498 |
|
499 |
# SUBPARSERS |
500 |
subparsers = parser.add_subparsers(title='subcommands') |
501 |
# install |
502 |
subparser = subparsers.add_parser('co', help='checkout all GNOME modules') |
503 |
subparser.set_defaults( |
504 |
func=cmd_co |
505 |
) |
506 |
|
507 |
subparser = subparsers.add_parser('packages', help='list all GNOME packages') |
508 |
subparser.set_defaults( |
509 |
func=cmd_ls |
510 |
) |
511 |
|
512 |
subparser = subparsers.add_parser('patches', help='list all GNOME patches') |
513 |
subparser.add_argument("-p", "--path", action="store_true", dest="path", |
514 |
help="Show full path to patch") |
515 |
subparser.set_defaults( |
516 |
func=cmd_patches, path=False |
517 |
) |
518 |
|
519 |
subparser = subparsers.add_parser('dep3', help='Add dep3 headers') |
520 |
subparser.add_argument("patch", help="Patch") |
521 |
subparser.set_defaults( |
522 |
func=cmd_dep3, path=False |
523 |
) |
524 |
|
525 |
subparser = subparsers.add_parser('increase', help='Increase version number') |
526 |
subparser.add_argument("package", help="Package name") |
527 |
subparser.add_argument("version", help="Version number") |
528 |
subparser.add_argument("-u", "--upstream", action="store_true", dest="upstream", |
529 |
help="Package name reflects the upstream name") |
530 |
subparser.add_argument("-s", "--submit", action="store_true", dest="submit", |
531 |
help="Commit changes and submit") |
532 |
subparser.set_defaults( |
533 |
func=cmd_package_new_version, submit=False, upstream=False |
534 |
) |
535 |
|
536 |
if len(sys.argv) == 1: |
537 |
parser.print_help() |
538 |
sys.exit(2) |
539 |
|
540 |
options = parser.parse_args() |
541 |
|
542 |
try: |
543 |
options.func(options, parser) |
544 |
except KeyboardInterrupt: |
545 |
print('Interrupted') |
546 |
sys.exit(1) |
547 |
except EOFError: |
548 |
print('EOF') |
549 |
sys.exit(1) |
550 |
except IOError, e: |
551 |
if e.errno != errno.EPIPE: |
552 |
raise |
553 |
sys.exit(0) |
554 |
|
555 |
if __name__ == "__main__": |
556 |
main() |