/[soft]/mga-gnome/trunk/mga-gnome
ViewVC logotype

Contents of /mga-gnome/trunk/mga-gnome

Parent Directory Parent Directory | Revision Log Revision Log


Revision 8389 - (show annotations) (download)
Sat May 25 14:48:34 2013 UTC (11 years, 4 months ago) by ovitters
File size: 38252 byte(s)
checkout gnome modules 5 at a time
1 #!/usr/bin/python -u
2
3 # A lot of the code comes from ftpadmin, see
4 # http://git.gnome.org/browse/sysadmin-bin/tree/ftpadmin
5 # Written by Olav Vitters
6
7 # basic modules:
8 import os
9 import os.path
10 import sys
11 import re
12 import subprocess
13
14 # command line parsing, error handling:
15 import argparse
16 import errno
17
18 # overwriting files by moving them (safer):
19 import tempfile
20 import shutil
21
22 # version comparison:
23 import rpm
24
25 # opening tarballs:
26 import tarfile
27 import gzip
28 import bz2
29 import lzma # pyliblzma
30
31 # getting links from HTML document:
32 from sgmllib import SGMLParser
33 import urllib2
34 import urlparse
35
36 # for checking hashes
37 import hashlib
38
39 # for parsing ftp-release-list emails
40 import email
41 from email.mime.text import MIMEText
42
43 # to be able to sleep for a while
44 import time
45
46 # version freeze
47 import datetime
48
49 # packages --sort
50 import itertools
51
52 # check-latest
53 import requests
54
55 import multiprocessing
56
57 SLEEP_INITIAL=180
58 SLEEP_REPEAT=30
59 SLEEP_TIMES=30
60
61 re_majmin = re.compile(r'^([0-9]+\.[0-9]+).*')
62 re_version = re.compile(r'([-.]|\d+|[^-.\d]+)')
63
64 def version_cmp(a, b):
65 """Compares two versions
66
67 Returns
68 -1 if a < b
69 0 if a == b
70 1 if a > b
71 """
72
73 return rpm.labelCompare(('1', a, '1'), ('1', b, '1'))
74
75 def get_latest_version(versions, max_version=None):
76 """Gets the latest version number
77
78 if max_version is specified, gets the latest version number before
79 max_version"""
80 latest = None
81 for version in versions:
82 if ( latest is None or version_cmp(version, latest) > 0 ) \
83 and ( max_version is None or version_cmp(version, max_version) < 0 ):
84 latest = version
85 return latest
86
87 MAJOR_VERSIONS = {
88 # NAMES MUST BE IN LOWERCASE!
89 'networkmanager': set(('0.9',)),
90 'networkmanager-applet': set(('0.9',)),
91 'networkmanager-openconnect': set(('0.9',)),
92 'networkmanager-openvpn': set(('0.9',)),
93 'networkmanager-pptp': set(('0.9',)),
94 'networkmanager-vpnc': set(('0.9',))
95 }
96
97 def get_majmin(version, module=None):
98 nrs = version.split('.')
99
100 if module and module.lower() in MAJOR_VERSIONS:
101 module_versions = [version.split(".") for version in MAJOR_VERSIONS[module.lower()]]
102
103 nrstest = nrs[:]
104
105 while len(nrstest) >= 2:
106 if nrstest in module_versions:
107 return (".".join(nrs[:len(nrstest)]), nrs[len(nrstest)])
108
109 nrstest.pop()
110
111 return (nrs[0], nrs[1])
112
113
114 def get_safe_max_version(version, module=None):
115 if not re_majmin.match(version):
116 return None
117
118 majmin = get_majmin(version, module)
119
120 min_nr = long(majmin[1])
121
122 if min_nr % 2 == 0:
123 return "%s.%d" % (majmin[0], min_nr + 1)
124 else:
125 return "%s.%d" % (majmin[0], min_nr + 2)
126
127 def judge_version_increase(version_old, version_new, module=None):
128 """Judge quality of version increase:
129
130 Returns a tuple containing judgement and message
131
132 Judgement:
133 Less than 0: Error
134 0 to 4: Better not
135 5+: Ok"""
136 versions = (version_old, version_new)
137
138 # First do a basic version comparison to ensure version_new is actually newer
139 compare = version_cmp(version_new, version_old)
140
141 if compare == 0:
142 # 1.0.0 -> 1.0.1
143 return (-2, "Already at version %s!" % (version_old))
144
145 if compare != 1:
146 # 1.0.1 -> 1.0.0
147 return (-3, "Version %s is older than current version %s!" % (version_new, version_old))
148
149 # Version is newer, but we don't want to see if it follows the GNOME versioning scheme
150 majmins = [get_majmin(ver, module) for ver in versions if re_majmin.match(ver) is not None]
151
152 if len(majmins) == 1:
153 return (-1, "Version number scheme changes: %s" % (", ".join(versions)))
154
155 if len(majmins) == 0:
156 return (0, "Unsupported version numbers: %s" % (", ".join(versions)))
157
158 # Follows GNOME versioning scheme
159 # Meaning: x.y.z
160 # x = major
161 # y = minor : even if stable
162 # z = micro
163
164 # Major+minor the same? Then go ahead and upgrade!
165 if majmins[0] == majmins[1]:
166 # Majmin of both versions are the same, looks good!
167 # 1.1.x -> 1.1.x or 1.0.x -> 1.0.x
168 return (10, None)
169
170 # Check/ensure major version number is the same
171 if majmins[0][0] != majmins[1][0]:
172 # 1.0.x -> 2.0.x
173 return (1, "Major version number increase")
174
175 # Minor indicates stable/unstable
176 devstate = (long(majmins[0][1]) % 2 == 0, long(majmins[1][1]) % 2 == 0)
177
178 # Upgrading to unstable is weird
179 if not devstate[1]:
180 if devstate[0]:
181 # 1.2.x -> 1.3.x
182 return (1, "Stable to unstable increase")
183
184 # 1.3.x -> 1.5.x
185 return (4, "Unstable to unstable version increase")
186
187 # Unstable => stable is always ok
188 if not devstate[0]:
189 # 1.1.x -> 1.2.x
190 return (5, "Unstable to stable")
191
192 # Can only be increase of minors from one stable to the next
193 # 1.0.x -> 1.2.x
194 return (6, "Stable version increase")
195
196 def line_input (file):
197 for line in file:
198 if line[-1] == '\n':
199 yield line[:-1]
200 else:
201 yield line
202
203 def call_editor(filename):
204 """Return a sequence of possible editor binaries for the current platform"""
205
206 editors = []
207
208 for varname in 'VISUAL', 'EDITOR':
209 if varname in os.environ:
210 editors.append(os.environ[varname])
211
212 editors.extend(('/usr/bin/editor', 'vi', 'pico', 'nano', 'joe'))
213
214 for editor in editors:
215 try:
216 ret = subprocess.call([editor, filename])
217 except OSError, e:
218 if e.errno == 2:
219 continue
220 raise
221
222 if ret == 127:
223 continue
224
225 return True
226
227 class urllister(SGMLParser):
228 def reset(self):
229 SGMLParser.reset(self)
230 self.urls = []
231
232 def start_a(self, attrs):
233 href = [v for k, v in attrs if k=='href']
234 if href:
235 self.urls.extend(href)
236
237 class XzTarFile(tarfile.TarFile):
238
239 OPEN_METH = tarfile.TarFile.OPEN_METH.copy()
240 OPEN_METH["xz"] = "xzopen"
241
242 @classmethod
243 def xzopen(cls, name, mode="r", fileobj=None, **kwargs):
244 """Open gzip compressed tar archive name for reading or writing.
245 Appending is not allowed.
246 """
247 if len(mode) > 1 or mode not in "rw":
248 raise ValueError("mode must be 'r' or 'w'")
249
250 if fileobj is not None:
251 fileobj = _LMZAProxy(fileobj, mode)
252 else:
253 fileobj = lzma.LZMAFile(name, mode)
254
255 try:
256 # lzma doesn't immediately return an error
257 # try and read a bit of data to determine if it is a valid xz file
258 fileobj.read(_LZMAProxy.blocksize)
259 fileobj.seek(0)
260 t = cls.taropen(name, mode, fileobj, **kwargs)
261 except IOError:
262 raise tarfile.ReadError("not a xz file")
263 except lzma.error:
264 raise tarfile.ReadError("not a xz file")
265 t._extfileobj = False
266 return t
267
268 if not hasattr(tarfile.TarFile, 'xzopen'):
269 tarfile.open = XzTarFile.open
270
271 def is_valid_hash(path, algo, hexdigest):
272 if algo not in hashlib.algorithms:
273 raise ValueError("Unknown hash algorithm: %s" % algo)
274
275 local_hash = getattr(hashlib, algo)()
276
277 with open(path, 'rb') as fp:
278 data = fp.read(32768)
279 while data:
280 local_hash.update(data)
281 data = fp.read(32768)
282
283 return local_hash.hexdigest() == hexdigest
284
285 class SpecFile(object):
286 re_update_version = re.compile(r'^(?P<pre>Version:\s*)(?P<version>.+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE)
287 re_update_release = re.compile(r'^(?P<pre>Release:\s*)(?P<release>%mkrel \d+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE)
288
289 def __init__(self, path, module=None):
290 self.path = path
291 self.cwd = os.path.dirname(path)
292 self.module = module
293
294 @property
295 def version(self):
296 return subprocess.check_output(["rpm", "--specfile", self.path, "--queryformat", "%{VERSION}\n"]).splitlines()[0]
297 @property
298 def sources(self):
299 ts = rpm.ts()
300 spec = ts.parseSpec(self.path)
301 srclist = spec.sources if isinstance(spec.sources, (list, tuple)) \
302 else spec.sources()
303 return dict((os.path.basename(name), name) for name, no, flags in srclist)
304
305 def update(self, version, force=False):
306 """Update specfile (increase version)"""
307 cur_version = self.version
308
309 (judgement, msg) = judge_version_increase(cur_version, version, self.module)
310
311 if judgement < 0:
312 print >>sys.stderr, "ERROR: %s!" % (msg)
313 return False
314
315 if judgement < 5:
316 print "WARNING: %s!" % (msg)
317 if not force: return False
318
319 # XXX - os.path.join is hackish
320 svn_diff_output = subprocess.check_output(["svn", "diff", os.path.join(self.path, '..')])
321 if svn_diff_output != '':
322 print svn_diff_output
323 print >>sys.stderr, "ERROR: Package has uncommitted changes!"
324 if not force:
325 return False
326
327 # Forcing package submission: revert changes
328 try:
329 print >>sys.stderr, "WARNING: Force used; reverting svn changes"
330 subprocess.check_call(["svn", "revert", "-R", os.path.join(self.path, '..')])
331 except subprocess.CalledProcessError:
332 return False
333
334 with open(self.path, "rw") as f:
335 data = f.read()
336
337 if data.count("%subrel") != 0:
338 print >>sys.stderr, "ERROR: %subrel found; don't know what to do!"
339 return False
340
341 if data.count("%mkrel") != 1:
342 print >>sys.stderr, "ERROR: Multiple %mkrel found; don't know what to do!"
343 return False
344
345 data, nr = self.re_update_version.subn(r'\g<pre>%s\g<post>' % version, data, 1)
346 if nr != 1:
347 print >>sys.stderr, "ERROR: Could not increase version!"
348 return False
349
350 data, nr = self.re_update_release.subn(r'\g<pre>%mkrel 1\g<post>', data, 1)
351 if nr != 1:
352 print >>sys.stderr, "ERROR: Could not reset release!"
353 return False
354
355 # Overwrite file with new version number
356 write_file(self.path, data)
357
358
359 # Verify that RPM also agrees that version number has changed
360 if self.version != version:
361 print "ERROR: Increased version to %s, but RPM doesn't agree!?!" % version
362 return False
363
364
365 # Try to download the new tarball various times and wait between attempts
366 tries = 0
367 while tries < SLEEP_TIMES:
368 tries += 1
369 if tries > 1: time.sleep(SLEEP_REPEAT * 2 ** (tries // 10))
370 try:
371 # Download new tarball
372 subprocess.check_call(['mgarepo', 'sync', '-d'], cwd=self.cwd)
373 # success, so exit loop
374 break
375 except subprocess.CalledProcessError, e:
376 # mgarepo sync returns 1 if the tarball cannot be downloaded
377 if e.returncode != 1:
378 subprocess.check_call(["svn", "revert", "-R", os.path.join(self.path, '..')])
379 return False
380 else:
381 # failed to download tarball
382 subprocess.check_call(["svn", "revert", "-R", os.path.join(self.path, '..')])
383 return False
384
385
386 try:
387 # Check patches still apply
388 subprocess.check_call(['bm', '-p', '--nodeps'], cwd=self.cwd)
389 except subprocess.CalledProcessError:
390 logfile = os.path.join(os.path.dirname(self.path), 'log.%s' % os.path.splitext(os.path.basename(self.path))[0])
391 if os.path.exists(logfile):
392 subprocess.call(['tail', '-n', '15', logfile])
393 return False
394
395 return True
396
397 class Patch(object):
398 """Do things with patches"""
399
400 re_dep3 = re.compile(r'^(?:#\s*)?(?P<header>[-A-Za-z0-9]+?):\s*(?P<data>.*)$')
401 re_dep3_cont = re.compile(r'^#?\s+(?P<data>.*)$')
402
403 def __init__(self, path, show_path=False):
404 """Path: path to patch (might not exist)"""
405 self.path = path
406 self.show_path = show_path
407
408 def __str__(self):
409 return self.path if self.show_path else os.path.basename(self.path)
410
411 def add_dep3(self):
412 """Add DEP-3 headers to a patch file"""
413 if self.dep3['valid']:
414 return False
415
416 new_headers = (
417 ('Author', self.svn_author),
418 ('Subject', ''),
419 ('Applied-Upstream', ''),
420 ('Forwarded', ''),
421 ('Bug', ''),
422 )
423
424 with tempfile.NamedTemporaryFile(dir=os.path.dirname(self.path), delete=False) as fdst:
425 with open(self.path, "r") as fsrc:
426 # Start with any existing DEP3 headers
427 for i in range(self.dep3['last_nr']):
428 fdst.write(fsrc.read())
429
430 # After that add the DEP3 headers
431 add_line = False
432 for header, data in new_headers:
433 if header in self.dep3['headers']:
434 continue
435
436 # XXX - wrap this at 80 chars
437 add_line = True
438 print >>fdst, "%s: %s" % (header, "" if data is None else data)
439
440 if add_line: print >>fdst, ""
441 # Now copy any other data and the patch
442 shutil.copyfileobj(fsrc, fdst)
443
444 fdst.flush()
445 os.rename(fdst.name, self.path)
446
447 call_editor(self.path)
448
449 #Author: fwang
450 #Subject: Build fix: Fix glib header inclusion
451 #Applied-Upstream: commit:30602
452 #Forwarded: yes
453 #Bug: http://bugzilla.abisource.com/show_bug.cgi?id=13247
454
455 def _read_dep3(self):
456 """Read DEP-3 headers from an existing patch file
457
458 This will also parse git headers"""
459 dep3 = {}
460 headers = {}
461
462 last_header = None
463 last_nr = 0
464 nr = 0
465 try:
466 with open(self.path, "r") as f:
467 for line in line_input(f):
468 nr += 1
469 # stop trying to parse when real patch begins
470 if line == '---':
471 break
472
473 r = self.re_dep3.match(line)
474 if r:
475 info = r.groupdict()
476
477 # Avoid matching URLS
478 if info['data'].startswith('//') and info['header'].lower () == info['header']:
479 continue
480
481 headers[info['header']] = info['data']
482 last_header = info['header']
483 last_nr = nr
484 continue
485
486 r = self.re_dep3_cont.match(line)
487 if r:
488 info = r.groupdict()
489 if last_header:
490 headers[last_header] = " ".join((headers[last_header], info['data']))
491 last_nr = nr
492 continue
493
494 last_header = None
495 except IOError:
496 pass
497
498 dep3['valid'] = \
499 (('Description' in headers and headers['Description'].strip() != '')
500 or ('Subject' in headers and headers['Subject'].strip() != '')) \
501 and (('Origin' in headers and headers['Origin'].strip() != '') \
502 or ('Author' in headers and headers['Author'].strip() != '') \
503 or ('From' in headers and headers['From'].strip() != ''))
504 dep3['last_nr'] = last_nr
505 dep3['headers'] = headers
506
507 self._dep3 = dep3
508
509 @property
510 def dep3(self):
511 if not hasattr(self, '_dep3'):
512 self._read_dep3()
513
514 return self._dep3
515
516 @property
517 def svn_author(self):
518 if not hasattr(self, '_svn_author'):
519 try:
520 contents = subprocess.check_output(['svn', 'log', '-q', "--", self.path], close_fds=True).strip("\n").splitlines()
521
522 for line in contents:
523 if ' | ' not in line:
524 continue
525
526 fields = line.split(' | ')
527 if len(fields) >= 3:
528 self._svn_author = fields[1]
529 except subprocess.CalledProcessError:
530 pass
531
532 if not hasattr(self, '_svn_author'):
533 return None
534
535 return self._svn_author
536
537
538 class Upstream(object):
539
540 URL="http://download.gnome.org/sources/"
541 limit = None
542 _cache_versions = {}
543
544 def __init__(self):
545 urlopen = urllib2.build_opener()
546
547 good_dir = re.compile('^[-A-Za-z0-9_+.]+/$')
548
549 # Get the files
550 usock = urlopen.open(self.URL)
551 parser = urllister()
552 parser.feed(usock.read())
553 usock.close()
554 parser.close()
555 files = parser.urls
556
557 tarballs = set([filename.replace('/', '') for filename in files if good_dir.search(filename)])
558 if self.limit is not None:
559 tarballs.intersection_update(self.limit)
560
561 self.names = tarballs
562
563 @classmethod
564 def versions(cls, module):
565 # XXX - ugly
566 if module not in cls._cache_versions:
567 versions = None
568
569 url = '%s%s/cache.json' % (cls.URL, module)
570 r = requests.get(url)
571 j = r.json
572 if j is not None and len(j) > 2 and module in j[2]:
573 versions = j[2][module]
574
575 cls._cache_versions[module] = versions
576
577 return cls._cache_versions[module]
578
579 class Downstream(object):
580 re_file = re.compile(r'^(?P<module>.*?)[_-](?:(?P<oldversion>([0-9]+[\.])*[0-9]+)-)?(?P<version>([0-9]+[\.\-])*[0-9]+)\.(?P<format>(?:tar\.|diff\.)?[a-z][a-z0-9]*)$')
581
582 MEDIA="Core Release Source"
583 PKGROOT='~/pkgs'
584 DISTRO=None
585
586 def __init__(self):
587 contents = subprocess.check_output(['urpmf', '--qf', '%name|%version|%files', '.', "--media", self.MEDIA], close_fds=True).strip("\n").splitlines()
588
589 FILES = {}
590 TARBALLS = {}
591
592 for line in contents:
593 try:
594 srpm, version, filename = line.split("|")
595 except ValueError:
596 print >>sys.stderr, line
597 continue
598
599 if '.tar' in filename:
600 r = self.re_file.match(filename)
601 if r:
602 fileinfo = r.groupdict()
603 module = fileinfo['module']
604
605 if module not in TARBALLS:
606 TARBALLS[module] = {}
607
608 if srpm in TARBALLS[module]:
609 # srpm seen before, check if version is newer
610 if version_cmp(TARBALLS[module][srpm], version) == 1:
611 TARBALLS[module][srpm] = version
612 else:
613 TARBALLS[module][srpm] = version
614
615 if srpm not in FILES:
616 FILES[srpm] = set()
617 FILES[srpm].add(filename)
618
619 self.tarballs = TARBALLS
620 self.files = FILES
621
622 @classmethod
623 def co(cls, package, cwd=None):
624 if cwd is None:
625 cwd = os.path.expanduser(cls.PKGROOT)
626
627 cmd = ['mgarepo', 'co']
628 if cls.DISTRO:
629 cmd.extend(('-d', cls.DISTRO))
630 cmd.append(package)
631 return subprocess.check_call(cmd, cwd=cwd)
632
633 def get_downstream_from_upstream(self, upstream, version):
634 if upstream not in self.tarballs:
635 raise ValueError("No packages for upstream name: %s" % upstream)
636
637 if len(self.tarballs[upstream]) == 1:
638 return self.tarballs[upstream].keys()
639
640 # Directories packages are located in
641 root = os.path.expanduser(self.PKGROOT)
642
643 packages = {}
644 for package in self.tarballs[upstream].keys():
645 cwd = os.path.join(root, package)
646
647 # Checkout package to ensure the checkout reflects the latest changes
648 try:
649 self.co(package, cwd=root)
650 except subprocess.CalledProcessError:
651 raise ValueError("Multiple packages found and cannot checkout %s" % package)
652
653 # Determine version from spec file
654 try:
655 packages[package] = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package), module=upstream).version
656 except subprocess.CalledProcessError:
657 raise ValueError("Multiple packages found and cannot determine version of %s" % package)
658
659 # Return all packages reflecting the current version
660 matches = [package for package in packages if packages[package] == version]
661 if len(matches):
662 return matches
663
664 # Return all packages reflecting the version before the current version
665 latest_version = get_latest_version(packages.values(), max_version=version)
666 matches = [package for package in packages if packages[package] == latest_version]
667 if len(matches):
668 return matches
669
670 # Give up
671 raise ValueError("Multiple packages found and cannot determine package for version %s" % version)
672
673 def write_file(path, data):
674 with tempfile.NamedTemporaryFile(dir=os.path.dirname(path), delete=False) as fdst:
675 fdst.write(data)
676 fdst.flush()
677 os.rename(fdst.name, path)
678
679 def cmd_co_multi(l):
680 package, module, package_version, spec_version, downstream_files = l
681
682 print "%s => %s" % (module, package)
683 try:
684 Downstream.co(package)
685 except subprocess.CalledProcessError:
686 pass
687
688 def cmd_co(options, parser):
689 p = multiprocessing.Pool(5)
690 p.map(cmd_co_multi, sorted(join_streams()))
691
692 def join_streams(show_version=False, only_diff_version=False):
693 root = os.path.expanduser(Downstream.PKGROOT)
694
695 upstream = Upstream().names
696 downstream = Downstream()
697
698 matches = upstream & set(downstream.tarballs.keys())
699 for module in matches:
700 for package in downstream.tarballs[module].keys():
701 package_version = downstream.tarballs[module][package]
702 spec_version = None
703 if show_version or only_diff_version:
704 cwd = os.path.join(root, package)
705 try:
706 spec_version = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package), module=module).version
707 except subprocess.CalledProcessError:
708 spec_version = 'N/A'
709
710 if only_diff_version and package_version == spec_version:
711 continue
712
713 yield (package, module, package_version, spec_version, downstream.files[package])
714
715 def cmd_group_owner(options, parser):
716 groups = set(options.group)
717
718 output = [pkg.split("\t") for pkg in subprocess.check_output(["urpmf", "-F|", "--qf", "%group\t%name\t%sourcerpm\t%version\t%release", "."]).splitlines()]
719 if not output: return
720
721 # Filter by groups
722 output = [pkg for pkg in output if pkg[0] in groups]
723 if not output: return
724
725 packages = {}
726 for group, name, sourcerpm, version, release in output:
727 if group not in packages:
728 packages[group] = {}
729
730 source = sourcerpm if sourcerpm else name
731 end = ".src.rpm"
732 if source.endswith(end): source = source[:len(source) - len(end)]
733 end = "-%s-%s" %(version, release)
734 if source.endswith(end): source = source[:len(source) - len(end)]
735
736 if source not in packages[group]: packages[group][source] = set()
737
738 packages[group][source].add(name)
739
740
741 maints = dict([line.rpartition(" ")[::2] for line in subprocess.check_output(["mgarepo", "maintdb", "get"]).splitlines()])
742
743 def get_output(source, maints, packages):
744 for source in packages.keys():
745 maint = maints.get(source, "?")
746
747 yield "\t".join((maint, source, ",".join(sorted(packages[source]))))
748
749 first = True
750 for group in packages.keys():
751 if first:
752 first = False
753 else:
754 print ""
755 print ""
756 print group
757 print ""
758
759 for line in sorted(get_output(source, maints, packages[group])):
760 print line
761
762 def cmd_ls(options, parser):
763 streams = join_streams(show_version=options.show_version, only_diff_version=options.diff)
764 if options.sort:
765 SORT=dict(zip(options.sort.read().splitlines(), itertools.count()))
766
767 streams = sorted(streams, key=lambda a: (SORT.get(a[1], 9999), a[0]))
768 else:
769 streams = sorted(streams)
770
771 for package, module, package_version, spec_version, downstream_files in streams:
772 sys.stdout.write(package)
773 if options.upstream: sys.stdout.write("\t%s" % module)
774 if options.show_version: sys.stdout.write("\t%s\t%s" % (spec_version, package_version))
775 print
776
777 def cmd_check_latest(options, parser):
778 streams = join_streams(show_version=True)
779
780 for package, module, package_version, spec_version, downstream_files in streams:
781 upgrade=set()
782 sys.stdout.write(package)
783 sys.stdout.write("\t%s\t%s" % (spec_version, package_version))
784
785 safe_max_version = get_safe_max_version(spec_version, module=module)
786
787 versions = Upstream.versions(module)
788 if versions:
789 latest_version = get_latest_version(versions)
790 safe_version = get_latest_version(versions, safe_max_version)
791
792 cmp_latest = version_cmp(latest_version, spec_version)
793 if cmp_latest < 0:
794 latest_version = 'N/A'
795 upgrade.add('l')
796 elif cmp_latest > 0:
797 upgrade.add('L')
798
799 cmp_safe = version_cmp(safe_version, spec_version)
800 if cmp_safe < 0:
801 safe_version = 'N/A'
802 upgrade.add('s')
803 elif cmp_safe > 0:
804 upgrade.add('S')
805
806 sys.stdout.write("\t%s" % latest_version)
807 sys.stdout.write("\t%s" % safe_version)
808 sys.stdout.write("\t%s" % "".join(sorted(upgrade)))
809
810 print
811
812 def cmd_patches(options, parser):
813 root = os.path.expanduser(Downstream.PKGROOT)
814
815 for package, module, package_version, spec_version, downstream_files in sorted(join_streams()):
816 for filename in downstream_files:
817 if '.patch' in filename or '.diff' in filename:
818
819 p = Patch(os.path.join(root, package, "SOURCES", filename), show_path=options.path)
820 valid = ""
821 forwarded = ""
822 if p.dep3['headers']:
823 forwarded = p.dep3['headers'].get('Forwarded', "no")
824 if p.dep3['valid']:
825 valid="VALID"
826 print "\t".join((module, package, str(p), forwarded, valid))
827
828 def cmd_dep3(options, parser):
829 p = Patch(options.patch)
830 p.add_dep3()
831
832 def cmd_package_new_version(options, parser):
833 # Determine the package name
834 if options.upstream:
835 try:
836 package = Downstream().get_downstream_from_upstream(options.package, options.version)[0]
837 except ValueError, e:
838 print >>sys.stderr, "ERROR: %s" % e
839 sys.exit(1)
840 else:
841 package = options.package
842
843 # Directories packages are located in
844 root = os.path.expanduser(Downstream.PKGROOT)
845 cwd = os.path.join(root, package)
846
847 # Checkout package to ensure the checkout reflects the latest changes
848 try:
849 Downstream.co(package, cwd=root)
850 except subprocess.CalledProcessError:
851 sys.exit(1)
852
853 # SpecFile class handles the actual version+release change
854 # XXX - module should reflect upstream name, this gives it the package name
855 s = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package), module=package)
856 print "%s => %s" % (s.version, options.version)
857 if not s.update(options.version, force=options.force):
858 sys.exit(1)
859
860 # Check hash, if given
861 if options.hexdigest is not None:
862 sources = [name for name, origname in s.sources.iteritems() if '://' in origname]
863 if not len(sources):
864 print >>sys.stderr, "ERROR: Cannot determine source file (for hash check)!"
865 sys.stderr(1)
866
867 # If there are multiple sources, try to see if there is a preferred name
868 # --> needed for metacity hash check (multiple tarball sources)
869 if len(sources) > 1:
870 preferred_name = '%s-%s.tar.xz' % (package, options.version)
871 if preferred_name in sources:
872 sources = [preferred_name]
873
874 for filename in sources:
875 path = os.path.join(cwd, "SOURCES", filename)
876 if not is_valid_hash(path, options.algo, options.hexdigest):
877 print >>sys.stderr, "ERROR: Hash file failed check for %s!" % path
878 print >>sys.stderr, "ERROR: Reverting changes!"
879 subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd)
880 sys.exit(1)
881
882 try:
883 # If we made it this far, checkin the changes
884 subprocess.check_call(['mgarepo', 'ci', '-m', 'new version %s' % options.version], cwd=cwd)
885
886 # Submit is optional
887 if options.submit:
888 cmd = ['mgarepo', 'submit']
889 if Downstream.DISTRO:
890 cmd.extend(('--define', 'section=core/updates_testing', '-t', Downstream.DISTRO))
891 subprocess.check_call(cmd, cwd=cwd)
892 except subprocess.CalledProcessError:
893 sys.exit(1)
894
895 def cmd_parse_ftp_release_list(options, parser):
896 def _send_reply_mail(contents, orig_msg, to, packages=[], error=False):
897 """Send an reply email"""
898 contents.seek(0)
899 msg = MIMEText(contents.read(), _charset='utf-8')
900
901 if error:
902 # XXX - ugly
903 contents.seek(0)
904 lastline = contents.read().rstrip().splitlines()[-1]
905 # Remove things like "ERROR: " and so on from the last line
906 lastline = re.sub(r'^(?:[^ :]+:\s+)+', '', lastline)
907 # Remove things like " - " (youri output from mgarepo submit)
908 lastline = re.sub(r'^\s+-\s+', '', lastline)
909 subjecterror = " (ERROR: %s)" % lastline if lastline else " (ERROR)"
910 else:
911 subjecterror = ""
912
913 if packages:
914 subject = "%s %s%s" % (", ".join(packages), orig_msg['X-Module-Version'], subjecterror)
915 else:
916 subject = "Re: %s%s" % (orig_msg['Subject'], subjecterror)
917
918 msg['Subject'] = subject
919 msg['To'] = to
920 msg["In-Reply-To"] = orig_msg["Message-ID"]
921 msg["References"] = orig_msg["Message-ID"]
922
923 # Call sendmail program directly so it doesn't matter if the service is running
924 cmd = ['/usr/sbin/sendmail', '-oi', '--']
925 cmd.extend([to])
926 p = subprocess.Popen(cmd, stdin=subprocess.PIPE)
927 p.stdin.write(msg.as_string())
928 p.stdin.flush()
929 p.stdin.close()
930 p.wait()
931
932
933 msg = email.email.message_from_file(sys.stdin)
934
935 if options.mail:
936 stdout = tempfile.TemporaryFile()
937 stderr = stdout
938 else:
939 stdout = sys.stdout
940 stderr = sys.stderr
941
942 try:
943 module = msg['X-Module-Name']
944 version = msg['X-Module-Version']
945 hexdigest = msg['X-Module-SHA256-tar.xz']
946 except KeyError, e:
947 print >>stderr, "ERROR: %s" % e
948 if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True)
949 sys.exit(1)
950
951 try:
952 packages = Downstream().get_downstream_from_upstream(module, version)
953 except ValueError, e:
954 print >>stderr, "ERROR: %s" % e
955 if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True)
956 sys.exit(1)
957
958 if options.wait:
959 # maildrop aborts and will try to deliver after 5min
960 # fork to avoid this
961 if os.fork() != 0: sys.exit(0)
962 # wait SLEEP_INITIAL after the message was sent
963 secs = SLEEP_INITIAL
964 t = email.utils.parsedate_tz(msg['Date'])
965 if t is not None:
966 msg_time = email.utils.mktime_tz(t)
967 secs = SLEEP_INITIAL - (time.time() - msg_time)
968
969 if secs > 0: time.sleep(secs)
970
971 error = False
972 for package in packages:
973 cmd = ['mga-gnome', 'increase', '--hash', hexdigest]
974 if options.submit:
975 cmd.append('--submit')
976 if options.force:
977 cmd.append('--force')
978 cmd.extend((package, version))
979 if subprocess.call(cmd, stdout=stdout, stderr=stderr):
980 error = True
981
982 if options.mail: _send_reply_mail(stdout, msg, options.mail, packages=packages, error=error)
983
984 def main():
985 description = """Mageia GNOME commands."""
986 epilog="""Report bugs to Olav Vitters"""
987 parser = argparse.ArgumentParser(description=description,epilog=epilog)
988 parser.add_argument("-l", "--limit", type=argparse.FileType('r', 0),
989 dest="limit_upstream", metavar="FILE",
990 help="File containing upstream names")
991 parser.add_argument("-d", "--distro", action="store", dest="distro",
992 help="Distribution release")
993
994 # SUBPARSERS
995 subparsers = parser.add_subparsers(title='subcommands')
996 # install
997 subparser = subparsers.add_parser('co', help='checkout all GNOME modules')
998 subparser.set_defaults(
999 func=cmd_co
1000 )
1001
1002 subparser = subparsers.add_parser('packages', help='list all GNOME packages')
1003 subparser.add_argument("-m", "--m", action="store_true", dest="upstream",
1004 help="Show upstream module")
1005 subparser.add_argument( "--version", action="store_true", dest="show_version",
1006 help="Show version numbers")
1007 subparser.add_argument( "--diff", action="store_true", dest="diff",
1008 help="Only show packages with different version")
1009 subparser.add_argument( "--sort", type=argparse.FileType('r', 0),
1010 dest="sort", metavar="FILE",
1011 help="Sort packages according to order in given FILE")
1012
1013 subparser.set_defaults(
1014 func=cmd_ls, upstream=False, show_version=False, diff=False
1015 )
1016
1017 subparser = subparsers.add_parser('group-owner', help='list packages by group')
1018 subparser.add_argument('group', metavar="GROUP", nargs='+')
1019
1020 subparser.set_defaults(
1021 func=cmd_group_owner
1022 )
1023
1024 subparser = subparsers.add_parser('check-latest', help='check for latest version of packages')
1025 subparser.set_defaults(
1026 func=cmd_check_latest
1027 )
1028
1029 subparser = subparsers.add_parser('patches', help='list all GNOME patches')
1030 subparser.add_argument("-p", "--path", action="store_true", dest="path",
1031 help="Show full path to patch")
1032 subparser.set_defaults(
1033 func=cmd_patches, path=False
1034 )
1035
1036 subparser = subparsers.add_parser('dep3', help='Add dep3 headers')
1037 subparser.add_argument("patch", help="Patch")
1038 subparser.set_defaults(
1039 func=cmd_dep3, path=False
1040 )
1041
1042 subparser = subparsers.add_parser('increase', help='Increase version number')
1043 subparser.add_argument("package", help="Package name")
1044 subparser.add_argument("version", help="Version number")
1045 subparser.add_argument("-f", "--force", action="store_true", dest="force",
1046 help="Override warnings, just do it")
1047 subparser.add_argument("-u", "--upstream", action="store_true", dest="upstream",
1048 help="Package name reflects the upstream name")
1049 subparser.add_argument("-s", "--submit", action="store_true", dest="submit",
1050 help="Commit changes and submit")
1051 subparser.add_argument( "--no-submit", action="store_false", dest="submit",
1052 help="Do not commit changes and submit")
1053 subparser.add_argument("-a", "--algorithm", choices=hashlib.algorithms, dest="algo",
1054 help="Hash algorithm")
1055 subparser.add_argument("--hash", dest="hexdigest",
1056 help="Hexdigest of the hash")
1057 subparser.set_defaults(
1058 func=cmd_package_new_version, submit=argparse.SUPPRESS, upstream=False, hexdigest=None, algo="sha256",
1059 force=False
1060 )
1061
1062 subparser = subparsers.add_parser('gnome-release-email', help='Submit packages based on GNOME ftp-release-list email')
1063 subparser.add_argument("-m", "--mail", help="Email address to send the progress to")
1064 subparser.add_argument("-w", "--wait", action="store_true",
1065 help="Wait before trying to retrieve the new version")
1066 subparser.add_argument("-s", "--submit", action="store_true", dest="submit",
1067 help="Commit changes and submit")
1068 subparser.add_argument("-f", "--force", action="store_true",
1069 help="Force submission")
1070 subparser.set_defaults(
1071 func=cmd_parse_ftp_release_list, force=False, wait=False
1072 )
1073
1074 if len(sys.argv) == 1:
1075 parser.print_help()
1076 sys.exit(2)
1077
1078 options = parser.parse_args()
1079 if options.limit_upstream:
1080 Upstream.limit = set(options.limit_upstream.read().strip("\n").splitlines())
1081
1082 if not hasattr(options, 'submit'):
1083 options.submit = not options.distro
1084
1085 if options.distro:
1086 Downstream.PKGROOT = os.path.join('~/pkgs', options.distro)
1087 Downstream.MEDIA = "Core Release {0} Source,Core {0} Updates Source,Core {0} Updates Testing Source".format(options.distro)
1088 Downstream.DISTRO = options.distro
1089
1090 try:
1091 options.func(options, parser)
1092 except KeyboardInterrupt:
1093 print('Interrupted')
1094 sys.exit(1)
1095 except EOFError:
1096 print('EOF')
1097 sys.exit(1)
1098 except IOError, e:
1099 if e.errno != errno.EPIPE:
1100 raise
1101 sys.exit(0)
1102
1103 if __name__ == "__main__":
1104 os.environ['PYTHONUNBUFFERED'] = '1'
1105 main()

Properties

Name Value
svn:executable *

  ViewVC Help
Powered by ViewVC 1.1.30