/[soft]/mga-gnome/trunk/mga-gnome
ViewVC logotype

Contents of /mga-gnome/trunk/mga-gnome

Parent Directory Parent Directory | Revision Log Revision Log


Revision 3559 - (show annotations) (download)
Mon Mar 19 08:33:32 2012 UTC (12 years, 3 months ago) by ovitters
File size: 28489 byte(s)
determine downstream name in case of multiple possibilities
1 #!/usr/bin/python -u
2
3 # A lot of the code comes from ftpadmin, see
4 # http://git.gnome.org/browse/sysadmin-bin/tree/ftpadmin
5 # Written by Olav Vitters
6
7 # basic modules:
8 import os
9 import os.path
10 import sys
11 import re
12 import subprocess
13
14 # command line parsing, error handling:
15 import argparse
16 import errno
17
18 # overwriting files by moving them (safer):
19 import tempfile
20 import shutil
21
22 # version comparison:
23 import rpm
24
25 # opening tarballs:
26 import tarfile
27 import gzip
28 import bz2
29 import lzma # pyliblzma
30
31 # getting links from HTML document:
32 from sgmllib import SGMLParser
33 import urllib2
34 import urlparse
35
36 # for checking hashes
37 import hashlib
38
39 # for parsing ftp-release-list emails
40 import email
41 from email.mime.text import MIMEText
42
43 # to be able to sleep for a while
44 import time
45
46 # version freeze
47 import datetime
48
49 MEDIA="Core Release Source"
50 URL="http://download.gnome.org/sources/"
51 PKGROOT='~/pkgs'
52 SLEEP_INITIAL=180
53 SLEEP_REPEAT=30
54 SLEEP_TIMES=20
55
56 re_majmin = re.compile(r'^([0-9]+\.[0-9]+).*')
57 re_version = re.compile(r'([-.]|\d+|[^-.\d]+)')
58
59 def version_cmp(a, b):
60 """Compares two versions
61
62 Returns
63 -1 if a < b
64 0 if a == b
65 1 if a > b
66 """
67
68 return rpm.labelCompare(('1', a, '1'), ('1', b, '1'))
69
70 def get_latest_version(versions, max_version=None):
71 """Gets the latest version number
72
73 if max_version is specified, gets the latest version number before
74 max_version"""
75 latest = None
76 for version in versions:
77 if ( latest is None or version_cmp(version, latest) > 0 ) \
78 and ( max_version is None or version_cmp(version, max_version) < 0 ):
79 latest = version
80 return latest
81
82 def judge_version_increase(version_old, version_new):
83 """Judge quality of version increase:
84
85 Returns a tuple containing judgement and message
86
87 Judgement:
88 Less than 0: Error
89 0 to 4: Better not
90 5+: Ok"""
91 versions = (version_old, version_new)
92
93 # First do a basic version comparison to ensure version_new is actually newer
94 compare = version_cmp(version_new, version_old)
95
96 if compare == 0:
97 # 1.0.0 -> 1.0.1
98 return (-2, "Already at version %s!" % (version_old))
99
100 if compare != 1:
101 # 1.0.1 -> 1.0.0
102 return (-3, "Version %s is older than current version %s!" % (version_new, version_old))
103
104 # Version is newer, but we don't want to see if it follows the GNOME versioning scheme
105 majmins = [re_majmin.sub(r'\1', ver) for ver in versions if re_majmin.match(ver) is not None]
106
107 if len(majmins) == 1:
108 return (-1, "Version number scheme changes: %s" % (", ".join(versions)))
109
110 if len(majmins) == 0:
111 return (0, "Unsupported version numbers: %s" % (", ".join(versions)))
112
113 # Follows GNOME versioning scheme
114 # Meaning: x.y.z
115 # x = major
116 # y = minor : even if stable
117 # z = micro
118
119 # Major+minor the same? Then go ahead and upgrade!
120 if majmins[0] == majmins[1]:
121 # Majmin of both versions are the same, looks good!
122 # 1.1.x -> 1.1.x or 1.0.x -> 1.0.x
123 return (10, None)
124
125 # More detailed analysis needed, so figure out the numbers
126 majmin_nrs = [map(long, ver.split('.')) for ver in majmins]
127
128 # Check/ensure major version number is the same
129 if majmin_nrs[0][0] != majmin_nrs[1][0]:
130 # 1.0.x -> 2.0.x
131 return (1, "Major version number increase")
132
133 # Minor indicates stable/unstable
134 devstate = (majmin_nrs[0][1] % 2 == 0, majmin_nrs[1][1] % 2 == 0)
135
136 # Upgrading to unstable is weird
137 if not devstate[1]:
138 if devstate[0]:
139 # 1.2.x -> 1.3.x
140 return (1, "Stable to unstable increase")
141
142 # 1.3.x -> 1.5.x
143 return (4, "Unstable to unstable version increase")
144
145 # Unstable => stable is always ok
146 if not devstate[0]:
147 # 1.1.x -> 1.2.x
148 return (5, "Unstable to stable")
149
150 # Can only be increase of minors from one stable to the next
151 # 1.0.x -> 1.2.x
152 return (6, "Stable version increase")
153
154 def line_input (file):
155 for line in file:
156 if line[-1] == '\n':
157 yield line[:-1]
158 else:
159 yield line
160
161 def call_editor(filename):
162 """Return a sequence of possible editor binaries for the current platform"""
163
164 editors = []
165
166 for varname in 'VISUAL', 'EDITOR':
167 if varname in os.environ:
168 editors.append(os.environ[varname])
169
170 editors.extend(('/usr/bin/editor', 'vi', 'pico', 'nano', 'joe'))
171
172 for editor in editors:
173 try:
174 ret = subprocess.call([editor, filename])
175 except OSError, e:
176 if e.errno == 2:
177 continue
178 raise
179
180 if ret == 127:
181 continue
182
183 return True
184
185 class urllister(SGMLParser):
186 def reset(self):
187 SGMLParser.reset(self)
188 self.urls = []
189
190 def start_a(self, attrs):
191 href = [v for k, v in attrs if k=='href']
192 if href:
193 self.urls.extend(href)
194
195 class XzTarFile(tarfile.TarFile):
196
197 OPEN_METH = tarfile.TarFile.OPEN_METH.copy()
198 OPEN_METH["xz"] = "xzopen"
199
200 @classmethod
201 def xzopen(cls, name, mode="r", fileobj=None, **kwargs):
202 """Open gzip compressed tar archive name for reading or writing.
203 Appending is not allowed.
204 """
205 if len(mode) > 1 or mode not in "rw":
206 raise ValueError("mode must be 'r' or 'w'")
207
208 if fileobj is not None:
209 fileobj = _LMZAProxy(fileobj, mode)
210 else:
211 fileobj = lzma.LZMAFile(name, mode)
212
213 try:
214 # lzma doesn't immediately return an error
215 # try and read a bit of data to determine if it is a valid xz file
216 fileobj.read(_LZMAProxy.blocksize)
217 fileobj.seek(0)
218 t = cls.taropen(name, mode, fileobj, **kwargs)
219 except IOError:
220 raise tarfile.ReadError("not a xz file")
221 except lzma.error:
222 raise tarfile.ReadError("not a xz file")
223 t._extfileobj = False
224 return t
225
226 if not hasattr(tarfile.TarFile, 'xzopen'):
227 tarfile.open = XzTarFile.open
228
229 def is_valid_hash(path, algo, hexdigest):
230 if algo not in hashlib.algorithms:
231 raise ValueError("Unknown hash algorithm: %s" % algo)
232
233 local_hash = getattr(hashlib, algo)()
234
235 with open(path, 'rb') as fp:
236 data = fp.read(32768)
237 while data:
238 local_hash.update(data)
239 data = fp.read(32768)
240
241 return local_hash.hexdigest() == hexdigest
242
243 class SpecFile(object):
244 re_update_version = re.compile(r'^(?P<pre>Version:\s*)(?P<version>.+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE)
245 re_update_release = re.compile(r'^(?P<pre>Release:\s*)(?P<release>%mkrel \d+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE)
246
247 def __init__(self, path):
248 self.path = path
249 self.cwd = os.path.dirname(path)
250
251 @property
252 def version(self):
253 return subprocess.check_output(["rpm", "--specfile", self.path, "--queryformat", "%{VERSION}\n"]).splitlines()[0]
254 @property
255 def sources(self):
256 ts = rpm.ts()
257 spec = ts.parseSpec(self.path)
258 srclist = spec.sources if isinstance(spec.sources, (list, tuple)) \
259 else spec.sources()
260 return dict((os.path.basename(name), name) for name, no, flags in srclist)
261
262 def update(self, version, force=False):
263 """Update specfile (increase version)"""
264 cur_version = self.version
265
266 (judgement, msg) = judge_version_increase(cur_version, version)
267
268 if judgement < 0:
269 print >>sys.stderr, "ERROR: %s!" % (msg)
270 return False
271
272 if judgement < 5:
273 print "WARNING: %s!" % (msg)
274 if not force: return False
275
276 # XXX - os.path.join is hackish
277 if subprocess.check_output(["svn", "diff", os.path.join(self.path, '..')]) != '':
278 print >>sys.stderr, "ERROR: Package has uncommitted changes!"
279 return False
280
281 with open(self.path, "rw") as f:
282 data = f.read()
283
284 if data.count("%mkrel") != 1:
285 print >>sys.stderr, "ERROR: Multiple %mkrel found; don't know what to do!"
286 return False
287
288 data, nr = self.re_update_version.subn(r'\g<pre>%s\g<post>' % version, data, 1)
289 if nr != 1:
290 print >>sys.stderr, "ERROR: Could not increase version!"
291 return False
292
293 data, nr = self.re_update_release.subn(r'\g<pre>%mkrel 1\g<post>', data, 1)
294 if nr != 1:
295 print >>sys.stderr, "ERROR: Could not reset release!"
296 return False
297
298 # Overwrite file with new version number
299 write_file(self.path, data)
300
301
302 # Verify that RPM also agrees that version number has changed
303 if self.version != version:
304 print "ERROR: Increased version to %s, but RPM doesn't agree!?!" % version
305 return False
306
307
308 # Try to download the new tarball various times and wait between attempts
309 tries = 0
310 while tries < SLEEP_TIMES:
311 tries += 1
312 if tries > 1: time.sleep(SLEEP_REPEAT)
313 try:
314 # Download new tarball
315 subprocess.check_call(['mgarepo', 'sync', '-d'], cwd=self.cwd)
316 # success, so exit loop
317 break
318 except subprocess.CalledProcessError, e:
319 # mgarepo sync returns 1 if the tarball cannot be downloaded
320 if e.returncode != 1:
321 return False
322 else:
323 return False
324
325
326 try:
327 # Check patches still apply
328 subprocess.check_call(['bm', '-p', '--nodeps'], cwd=self.cwd)
329 except subprocess.CalledProcessError:
330 return False
331
332 return True
333
334 class Patch(object):
335 """Do things with patches"""
336
337 re_dep3 = re.compile(r'^(?:#\s*)?(?P<header>[-A-Za-z0-9]+?):\s*(?P<data>.*)$')
338 re_dep3_cont = re.compile(r'^#?\s+(?P<data>.*)$')
339
340 def __init__(self, path, show_path=False):
341 """Path: path to patch (might not exist)"""
342 self.path = path
343 self.show_path = show_path
344
345 def __str__(self):
346 return self.path if self.show_path else os.path.basename(self.path)
347
348 def add_dep3(self):
349 """Add DEP-3 headers to a patch file"""
350 if self.dep3['valid']:
351 return False
352
353 new_headers = (
354 ('Author', self.svn_author),
355 ('Subject', ''),
356 ('Applied-Upstream', ''),
357 ('Forwarded', ''),
358 ('Bug', ''),
359 )
360
361 with tempfile.NamedTemporaryFile(dir=os.path.dirname(self.path), delete=False) as fdst:
362 with open(self.path, "r") as fsrc:
363 # Start with any existing DEP3 headers
364 for i in range(self.dep3['last_nr']):
365 fdst.write(fsrc.read())
366
367 # After that add the DEP3 headers
368 add_line = False
369 for header, data in new_headers:
370 if header in self.dep3['headers']:
371 continue
372
373 # XXX - wrap this at 80 chars
374 add_line = True
375 print >>fdst, "%s: %s" % (header, "" if data is None else data)
376
377 if add_line: print >>fdst, ""
378 # Now copy any other data and the patch
379 shutil.copyfileobj(fsrc, fdst)
380
381 fdst.flush()
382 os.rename(fdst.name, self.path)
383
384 call_editor(self.path)
385
386 #Author: fwang
387 #Subject: Build fix: Fix glib header inclusion
388 #Applied-Upstream: commit:30602
389 #Forwarded: yes
390 #Bug: http://bugzilla.abisource.com/show_bug.cgi?id=13247
391
392 def _read_dep3(self):
393 """Read DEP-3 headers from an existing patch file
394
395 This will also parse git headers"""
396 dep3 = {}
397 headers = {}
398
399 last_header = None
400 last_nr = 0
401 nr = 0
402 try:
403 with open(self.path, "r") as f:
404 for line in line_input(f):
405 nr += 1
406 # stop trying to parse when real patch begins
407 if line == '---':
408 break
409
410 r = self.re_dep3.match(line)
411 if r:
412 info = r.groupdict()
413
414 # Avoid matching URLS
415 if info['data'].startswith('//') and info['header'].lower () == info['header']:
416 continue
417
418 headers[info['header']] = info['data']
419 last_header = info['header']
420 last_nr = nr
421 continue
422
423 r = self.re_dep3_cont.match(line)
424 if r:
425 info = r.groupdict()
426 if last_header:
427 headers[last_header] = " ".join((headers[last_header], info['data']))
428 last_nr = nr
429 continue
430
431 last_header = None
432 except IOError:
433 pass
434
435 dep3['valid'] = \
436 (('Description' in headers and headers['Description'].strip() != '')
437 or ('Subject' in headers and headers['Subject'].strip() != '')) \
438 and (('Origin' in headers and headers['Origin'].strip() != '') \
439 or ('Author' in headers and headers['Author'].strip() != '') \
440 or ('From' in headers and headers['From'].strip() != ''))
441 dep3['last_nr'] = last_nr
442 dep3['headers'] = headers
443
444 self._dep3 = dep3
445
446 @property
447 def dep3(self):
448 if not hasattr(self, '_dep3'):
449 self._read_dep3()
450
451 return self._dep3
452
453 @property
454 def svn_author(self):
455 if not hasattr(self, '_svn_author'):
456 try:
457 contents = subprocess.check_output(['svn', 'log', '-q', "--", self.path], close_fds=True).strip("\n").splitlines()
458
459 for line in contents:
460 if ' | ' not in line:
461 continue
462
463 fields = line.split(' | ')
464 if len(fields) >= 3:
465 self._svn_author = fields[1]
466 except subprocess.CalledProcessError:
467 pass
468
469 if not hasattr(self, '_svn_author'):
470 return None
471
472 return self._svn_author
473
474 def get_upstream_names():
475 urlopen = urllib2.build_opener()
476
477 good_dir = re.compile('^[-A-Za-z0-9_+.]+/$')
478
479 # Get the files
480 usock = urlopen.open(URL)
481 parser = urllister()
482 parser.feed(usock.read())
483 usock.close()
484 parser.close()
485 files = parser.urls
486
487 tarballs = set([filename.replace('/', '') for filename in files if good_dir.search(filename)])
488
489 return tarballs
490
491 def get_downstream_names():
492 re_file = re.compile(r'^(?P<module>.*?)[_-](?:(?P<oldversion>([0-9]+[\.])*[0-9]+)-)?(?P<version>([0-9]+[\.\-])*[0-9]+)\.(?P<format>(?:tar\.|diff\.)?[a-z][a-z0-9]*)$')
493
494 contents = subprocess.check_output(['urpmf', '--qf', '%name|%version|%files', '.', "--media", MEDIA], close_fds=True).strip("\n").splitlines()
495
496 FILES = {}
497 TARBALLS = {}
498
499 for line in contents:
500 try:
501 srpm, version, filename = line.split("|")
502 except ValueError:
503 print >>sys.stderr, line
504 continue
505
506 if '.tar' in filename:
507 r = re_file.match(filename)
508 if r:
509 fileinfo = r.groupdict()
510 module = fileinfo['module']
511
512 if module not in TARBALLS:
513 TARBALLS[module] = {}
514 TARBALLS[module][srpm] = version
515
516 if srpm not in FILES:
517 FILES[srpm] = set()
518 FILES[srpm].add(filename)
519
520 return TARBALLS, FILES
521
522 def get_downstream_from_upstream(upstream, version):
523 # Determine the package name
524 downstream, downstream_files = get_downstream_names()
525
526 if upstream not in downstream:
527 raise ValueError("No packages for upstream name: %s" % upstream)
528
529 if len(downstream[upstream]) == 1:
530 return downstream[upstream].keys()
531
532 # Directories packages are located in
533 root = os.path.expanduser(PKGROOT)
534
535 packages = {}
536 for package in downstream[upstream].keys():
537 cwd = os.path.join(root, package)
538
539 # Checkout package to ensure the checkout reflects the latest changes
540 try:
541 subprocess.check_call(['mgarepo', 'co', package], cwd=root)
542 except subprocess.CalledProcessError:
543 raise ValueError("Multiple packages found and cannot checkout %s" % package)
544
545 # Determine version from spec file
546 try:
547 packages[package] = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)).version
548 except subprocess.CalledProcessError:
549 raise ValueError("Multiple packages found and cannot determine version of %s" % package)
550
551 # Return all packages reflecting the current version
552 matches = [package for package in packages if packages[package] == version]
553 if len(matches):
554 return matches
555
556 # Return all packages reflecting the version before the current version
557 latest_version = get_latest_version(packages.values(), max_version=version)
558 matches = [package for package in packages if packages[package] == latest_version]
559 if len(matches):
560 return matches
561
562 # Give up
563 raise ValueError("Multiple packages found and cannot determine package for version %s" % version)
564
565 def write_file(path, data):
566 with tempfile.NamedTemporaryFile(dir=os.path.dirname(path), delete=False) as fdst:
567 fdst.write(data)
568 fdst.flush()
569 os.rename(fdst.name, path)
570
571 def cmd_co(options, parser):
572 root = os.path.expanduser(PKGROOT)
573
574 for package, module, package_version, spec_version, downstream_files in sorted(join_streams()):
575 print "%s => %s" % (module, package)
576 subprocess.call(['mgarepo', 'co', package], cwd=root)
577
578 def join_streams(show_version=False, only_diff_version=False):
579 root = os.path.expanduser(PKGROOT)
580
581 upstream = get_upstream_names()
582 downstream, downstream_files = get_downstream_names()
583
584 matches = upstream & set(downstream.keys())
585 for module in matches:
586 for package in downstream[module].keys():
587 package_version = downstream[module][package]
588 spec_version = None
589 if show_version or only_diff_version:
590 cwd = os.path.join(root, package)
591 try:
592 spec_version = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)).version
593 except subprocess.CalledProcessError:
594 spec_version = 'N/A'
595
596 if only_diff_version and package_version == spec_version:
597 continue
598
599 yield (package, module, package_version, spec_version, downstream_files[package])
600
601 def cmd_ls(options, parser):
602 for package, module, package_version, spec_version, downstream_files in sorted(join_streams(show_version=options.show_version, only_diff_version=options.diff)):
603 print package,"\t",
604 if options.upstream: print module, "\t",
605 if options.show_version: print spec_version, "\t", package_version, "\t",
606 print
607
608 def cmd_patches(options, parser):
609 root = os.path.expanduser(PKGROOT)
610
611 for package, module, package_version, spec_version, downstream_files in sorted(join_streams()):
612 for filename in downstream_files:
613 if '.patch' in filename or '.diff' in filename:
614
615 p = Patch(os.path.join(root, package, "SOURCES", filename), show_path=options.path)
616 valid = ""
617 forwarded = ""
618 if p.dep3['headers']:
619 forwarded = p.dep3['headers'].get('Forwarded', "no")
620 if p.dep3['valid']:
621 valid="VALID"
622 print "\t".join((module, package, str(p), forwarded, valid))
623
624 def cmd_dep3(options, parser):
625 p = Patch(options.patch)
626 p.add_dep3()
627
628 def cmd_package_new_version(options, parser):
629 # Determine the package name
630 if options.upstream:
631 try:
632 package = get_downstream_from_upstream(options.package, options.version)[0]
633 except ValueError, e:
634 print >>sys.stderr, "ERROR: %s" % e
635 sys.exit(1)
636 else:
637 package = options.package
638
639 # Directories packages are located in
640 root = os.path.expanduser(PKGROOT)
641 cwd = os.path.join(root, package)
642
643 # Checkout package to ensure the checkout reflects the latest changes
644 try:
645 subprocess.check_call(['mgarepo', 'co', package], cwd=root)
646 except subprocess.CalledProcessError:
647 sys.exit(1)
648
649 # SpecFile class handles the actual version+release change
650 s = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package))
651 print "%s => %s" % (s.version, options.version)
652 if not s.update(options.version, force=options.force):
653 sys.exit(1)
654
655 # Check hash, if given
656 if options.hexdigest is not None:
657 sources = [name for name, origname in s.sources.iteritems() if '://' in origname]
658 if not len(sources):
659 print >>sys.stderr, "ERROR: Cannot determine source file (for hash check)!"
660 sys.stderr(1)
661
662 for filename in sources:
663 if not is_valid_hash(os.path.join(cwd, "SOURCES", filename), options.algo, options.hexdigest):
664 print >>sys.stderr, "ERROR: Hash file failed check for %s!" % path
665 print >>sys.stderr, "ERROR: Reverting changes!"
666 subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd)
667 sys.exit(1)
668
669 # We can even checkin and submit :-)
670 if options.submit:
671 try:
672 # checkin changes
673 subprocess.check_call(['mgarepo', 'ci', '-m', 'new version %s' % options.version], cwd=cwd)
674 # and submit
675 subprocess.check_call(['mgarepo', 'submit'], cwd=cwd)
676 except subprocess.CalledProcessError:
677 sys.exit(1)
678
679 def cmd_parse_ftp_release_list(options, parser):
680 def _send_reply_mail(contents, orig_msg, to, error=False):
681 """Send an reply email"""
682 contents.seek(0)
683 msg = MIMEText(contents.read(), _charset='utf-8')
684 if error:
685 # XXX - ugly
686 contents.seek(0)
687 lastline = contents.read().splitlines()[-1]
688 # Remove things like "ERROR: " and so on from the last line
689 lastline = re.sub(r'^(?:[^ :]+:\s+)+', '', lastline)
690 subjecterror = " (ERROR: %s)" % lastline if lastline else " (ERROR)"
691 else:
692 subjecterror = ""
693 msg['Subject'] = "Re: %s%s" % (orig_msg['Subject'], subjecterror)
694 msg['To'] = to
695 msg["In-Reply-To"] = orig_msg["Message-ID"]
696 msg["References"] = orig_msg["Message-ID"]
697
698 # Call sendmail program directly so it doesn't matter if the service is running
699 cmd = ['/usr/sbin/sendmail', '-oi', '--']
700 cmd.extend([to])
701 p = subprocess.Popen(cmd, stdin=subprocess.PIPE)
702 p.stdin.write(msg.as_string())
703 p.stdin.flush()
704 p.stdin.close()
705 p.wait()
706
707
708 msg = email.email.message_from_file(sys.stdin)
709
710 if options.mail:
711 stdout = tempfile.TemporaryFile()
712 stderr = stdout
713 else:
714 stdout = sys.stdout
715 stderr = sys.stderr
716
717 try:
718 module = msg['X-Module-Name']
719 version = msg['X-Module-Version']
720 hexdigest = msg['X-Module-SHA256-tar.xz']
721 except KeyError, e:
722 print >>stderr, "ERROR: %s" % e
723 if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True)
724 sys.exit(1)
725
726 try:
727 packages = get_downstream_from_upstream(module, version)
728 except ValueError, e:
729 print >>stderr, "ERROR: %s" % e
730 if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True)
731 sys.exit(1)
732
733 if options.wait:
734 # maildrop aborts and will try to deliver after 5min
735 # fork to avoid this
736 if os.fork() != 0: sys.exit(0)
737 # wait SLEEP_INITIAL after the message was sent
738 secs = SLEEP_INITIAL
739 t = email.utils.parsedate_tz(msg['Date'])
740 if t is not None:
741 msg_time = email.utils.mktime_tz(t)
742 secs = SLEEP_INITIAL - (time.time() - msg_time)
743
744 if secs > 0: time.sleep(secs)
745
746 error = False
747 for package in packages:
748 if subprocess.call(['mga-gnome', 'increase', '--submit', '--hash', hexdigest, package, version], stdout=stdout, stderr=stderr):
749 error = True
750
751 if options.mail: _send_reply_mail(stdout, msg, options.mail, error=error)
752
753 def main():
754 description = """Mageia GNOME commands."""
755 epilog="""Report bugs to Olav Vitters"""
756 parser = argparse.ArgumentParser(description=description,epilog=epilog)
757
758 # SUBPARSERS
759 subparsers = parser.add_subparsers(title='subcommands')
760 # install
761 subparser = subparsers.add_parser('co', help='checkout all GNOME modules')
762 subparser.set_defaults(
763 func=cmd_co
764 )
765
766 subparser = subparsers.add_parser('packages', help='list all GNOME packages')
767 subparser.add_argument("-m", "--m", action="store_true", dest="upstream",
768 help="Show upstream module")
769 subparser.add_argument( "--version", action="store_true", dest="show_version",
770 help="Show version numbers")
771 subparser.add_argument( "--diff", action="store_true", dest="diff",
772 help="Only show packages with different version")
773 subparser.set_defaults(
774 func=cmd_ls, upstream=False, show_version=False, diff=False
775 )
776
777 subparser = subparsers.add_parser('patches', help='list all GNOME patches')
778 subparser.add_argument("-p", "--path", action="store_true", dest="path",
779 help="Show full path to patch")
780 subparser.set_defaults(
781 func=cmd_patches, path=False
782 )
783
784 subparser = subparsers.add_parser('dep3', help='Add dep3 headers')
785 subparser.add_argument("patch", help="Patch")
786 subparser.set_defaults(
787 func=cmd_dep3, path=False
788 )
789
790 subparser = subparsers.add_parser('increase', help='Increase version number')
791 subparser.add_argument("package", help="Package name")
792 subparser.add_argument("version", help="Version number")
793 subparser.add_argument("-f", "--force", action="store_true", dest="force",
794 help="Override warnings, just do it")
795 subparser.add_argument("-u", "--upstream", action="store_true", dest="upstream",
796 help="Package name reflects the upstream name")
797 subparser.add_argument("-s", "--submit", action="store_true", dest="submit",
798 help="Commit changes and submit")
799 subparser.add_argument("-a", "--algorithm", choices=hashlib.algorithms, dest="algo",
800 help="Hash algorithm")
801 subparser.add_argument("--hash", dest="hexdigest",
802 help="Hexdigest of the hash")
803 subparser.set_defaults(
804 func=cmd_package_new_version, submit=False, upstream=False, hexdigest=None, algo="sha256",
805 force=False
806 )
807
808 subparser = subparsers.add_parser('gnome-release-email', help='Submit packages based on GNOME ftp-release-list email')
809 subparser.add_argument("-m", "--mail", help="Email address to send the progress to")
810 subparser.add_argument("-w", "--wait", action="store_true",
811 help="Wait before trying to retrieve the new version")
812 subparser.set_defaults(
813 func=cmd_parse_ftp_release_list
814 )
815
816 if len(sys.argv) == 1:
817 parser.print_help()
818 sys.exit(2)
819
820 options = parser.parse_args()
821
822 try:
823 options.func(options, parser)
824 except KeyboardInterrupt:
825 print('Interrupted')
826 sys.exit(1)
827 except EOFError:
828 print('EOF')
829 sys.exit(1)
830 except IOError, e:
831 if e.errno != errno.EPIPE:
832 raise
833 sys.exit(0)
834
835 if __name__ == "__main__":
836 main()

Properties

Name Value
svn:executable *

  ViewVC Help
Powered by ViewVC 1.1.30