/[soft]/mga-gnome/trunk/mga-gnome
ViewVC logotype

Contents of /mga-gnome/trunk/mga-gnome

Parent Directory Parent Directory | Revision Log Revision Log


Revision 5289 - (show annotations) (download)
Sat Aug 4 18:36:28 2012 UTC (11 years, 7 months ago) by ovitters
File size: 33859 byte(s)
make submit work for non-Cauldron
1 #!/usr/bin/python -u
2
3 # A lot of the code comes from ftpadmin, see
4 # http://git.gnome.org/browse/sysadmin-bin/tree/ftpadmin
5 # Written by Olav Vitters
6
7 # basic modules:
8 import os
9 import os.path
10 import sys
11 import re
12 import subprocess
13
14 # command line parsing, error handling:
15 import argparse
16 import errno
17
18 # overwriting files by moving them (safer):
19 import tempfile
20 import shutil
21
22 # version comparison:
23 import rpm
24
25 # opening tarballs:
26 import tarfile
27 import gzip
28 import bz2
29 import lzma # pyliblzma
30
31 # getting links from HTML document:
32 from sgmllib import SGMLParser
33 import urllib2
34 import urlparse
35
36 # for checking hashes
37 import hashlib
38
39 # for parsing ftp-release-list emails
40 import email
41 from email.mime.text import MIMEText
42
43 # to be able to sleep for a while
44 import time
45
46 # version freeze
47 import datetime
48
49 # packages --sort
50 import itertools
51
52 # check-latest
53 import requests
54
55 SLEEP_INITIAL=180
56 SLEEP_REPEAT=30
57 SLEEP_TIMES=20
58
59 re_majmin = re.compile(r'^([0-9]+\.[0-9]+).*')
60 re_version = re.compile(r'([-.]|\d+|[^-.\d]+)')
61
62 def version_cmp(a, b):
63 """Compares two versions
64
65 Returns
66 -1 if a < b
67 0 if a == b
68 1 if a > b
69 """
70
71 return rpm.labelCompare(('1', a, '1'), ('1', b, '1'))
72
73 def get_latest_version(versions, max_version=None):
74 """Gets the latest version number
75
76 if max_version is specified, gets the latest version number before
77 max_version"""
78 latest = None
79 for version in versions:
80 if ( latest is None or version_cmp(version, latest) > 0 ) \
81 and ( max_version is None or version_cmp(version, max_version) < 0 ):
82 latest = version
83 return latest
84
85 def get_safe_max_version(version):
86 if not re_majmin.match(version):
87 return None
88
89 majmin_nr = map(long, re_majmin.sub(r'\1', version).split('.'))
90
91 if majmin_nr[1] % 2 == 0:
92 return "%d.%d" % (majmin_nr[0], majmin_nr[1] + 1)
93 else:
94 return "%d.%d" % (majmin_nr[0], majmin_nr[1] + 2)
95
96 def judge_version_increase(version_old, version_new):
97 """Judge quality of version increase:
98
99 Returns a tuple containing judgement and message
100
101 Judgement:
102 Less than 0: Error
103 0 to 4: Better not
104 5+: Ok"""
105 versions = (version_old, version_new)
106
107 # First do a basic version comparison to ensure version_new is actually newer
108 compare = version_cmp(version_new, version_old)
109
110 if compare == 0:
111 # 1.0.0 -> 1.0.1
112 return (-2, "Already at version %s!" % (version_old))
113
114 if compare != 1:
115 # 1.0.1 -> 1.0.0
116 return (-3, "Version %s is older than current version %s!" % (version_new, version_old))
117
118 # Version is newer, but we don't want to see if it follows the GNOME versioning scheme
119 majmins = [re_majmin.sub(r'\1', ver) for ver in versions if re_majmin.match(ver) is not None]
120
121 if len(majmins) == 1:
122 return (-1, "Version number scheme changes: %s" % (", ".join(versions)))
123
124 if len(majmins) == 0:
125 return (0, "Unsupported version numbers: %s" % (", ".join(versions)))
126
127 # Follows GNOME versioning scheme
128 # Meaning: x.y.z
129 # x = major
130 # y = minor : even if stable
131 # z = micro
132
133 # Major+minor the same? Then go ahead and upgrade!
134 if majmins[0] == majmins[1]:
135 # Majmin of both versions are the same, looks good!
136 # 1.1.x -> 1.1.x or 1.0.x -> 1.0.x
137 return (10, None)
138
139 # More detailed analysis needed, so figure out the numbers
140 majmin_nrs = [map(long, ver.split('.')) for ver in majmins]
141
142 # Check/ensure major version number is the same
143 if majmin_nrs[0][0] != majmin_nrs[1][0]:
144 # 1.0.x -> 2.0.x
145 return (1, "Major version number increase")
146
147 # Minor indicates stable/unstable
148 devstate = (majmin_nrs[0][1] % 2 == 0, majmin_nrs[1][1] % 2 == 0)
149
150 # Upgrading to unstable is weird
151 if not devstate[1]:
152 if devstate[0]:
153 # 1.2.x -> 1.3.x
154 return (1, "Stable to unstable increase")
155
156 # 1.3.x -> 1.5.x
157 return (4, "Unstable to unstable version increase")
158
159 # Unstable => stable is always ok
160 if not devstate[0]:
161 # 1.1.x -> 1.2.x
162 return (5, "Unstable to stable")
163
164 # Can only be increase of minors from one stable to the next
165 # 1.0.x -> 1.2.x
166 return (6, "Stable version increase")
167
168 def line_input (file):
169 for line in file:
170 if line[-1] == '\n':
171 yield line[:-1]
172 else:
173 yield line
174
175 def call_editor(filename):
176 """Return a sequence of possible editor binaries for the current platform"""
177
178 editors = []
179
180 for varname in 'VISUAL', 'EDITOR':
181 if varname in os.environ:
182 editors.append(os.environ[varname])
183
184 editors.extend(('/usr/bin/editor', 'vi', 'pico', 'nano', 'joe'))
185
186 for editor in editors:
187 try:
188 ret = subprocess.call([editor, filename])
189 except OSError, e:
190 if e.errno == 2:
191 continue
192 raise
193
194 if ret == 127:
195 continue
196
197 return True
198
199 class urllister(SGMLParser):
200 def reset(self):
201 SGMLParser.reset(self)
202 self.urls = []
203
204 def start_a(self, attrs):
205 href = [v for k, v in attrs if k=='href']
206 if href:
207 self.urls.extend(href)
208
209 class XzTarFile(tarfile.TarFile):
210
211 OPEN_METH = tarfile.TarFile.OPEN_METH.copy()
212 OPEN_METH["xz"] = "xzopen"
213
214 @classmethod
215 def xzopen(cls, name, mode="r", fileobj=None, **kwargs):
216 """Open gzip compressed tar archive name for reading or writing.
217 Appending is not allowed.
218 """
219 if len(mode) > 1 or mode not in "rw":
220 raise ValueError("mode must be 'r' or 'w'")
221
222 if fileobj is not None:
223 fileobj = _LMZAProxy(fileobj, mode)
224 else:
225 fileobj = lzma.LZMAFile(name, mode)
226
227 try:
228 # lzma doesn't immediately return an error
229 # try and read a bit of data to determine if it is a valid xz file
230 fileobj.read(_LZMAProxy.blocksize)
231 fileobj.seek(0)
232 t = cls.taropen(name, mode, fileobj, **kwargs)
233 except IOError:
234 raise tarfile.ReadError("not a xz file")
235 except lzma.error:
236 raise tarfile.ReadError("not a xz file")
237 t._extfileobj = False
238 return t
239
240 if not hasattr(tarfile.TarFile, 'xzopen'):
241 tarfile.open = XzTarFile.open
242
243 def is_valid_hash(path, algo, hexdigest):
244 if algo not in hashlib.algorithms:
245 raise ValueError("Unknown hash algorithm: %s" % algo)
246
247 local_hash = getattr(hashlib, algo)()
248
249 with open(path, 'rb') as fp:
250 data = fp.read(32768)
251 while data:
252 local_hash.update(data)
253 data = fp.read(32768)
254
255 return local_hash.hexdigest() == hexdigest
256
257 class SpecFile(object):
258 re_update_version = re.compile(r'^(?P<pre>Version:\s*)(?P<version>.+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE)
259 re_update_release = re.compile(r'^(?P<pre>Release:\s*)(?P<release>%mkrel \d+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE)
260
261 def __init__(self, path):
262 self.path = path
263 self.cwd = os.path.dirname(path)
264
265 @property
266 def version(self):
267 return subprocess.check_output(["rpm", "--specfile", self.path, "--queryformat", "%{VERSION}\n"]).splitlines()[0]
268 @property
269 def sources(self):
270 ts = rpm.ts()
271 spec = ts.parseSpec(self.path)
272 srclist = spec.sources if isinstance(spec.sources, (list, tuple)) \
273 else spec.sources()
274 return dict((os.path.basename(name), name) for name, no, flags in srclist)
275
276 def update(self, version, force=False):
277 """Update specfile (increase version)"""
278 cur_version = self.version
279
280 (judgement, msg) = judge_version_increase(cur_version, version)
281
282 if judgement < 0:
283 print >>sys.stderr, "ERROR: %s!" % (msg)
284 return False
285
286 if judgement < 5:
287 print "WARNING: %s!" % (msg)
288 if not force: return False
289
290 # XXX - os.path.join is hackish
291 svn_diff_output = subprocess.check_output(["svn", "diff", os.path.join(self.path, '..')])
292 if svn_diff_output != '':
293 print svn_diff_output
294 print >>sys.stderr, "ERROR: Package has uncommitted changes!"
295 if not force:
296 return False
297
298 # Forcing package submission: revert changes
299 try:
300 print >>sys.stderr, "WARNING: Force used; reverting svn changes"
301 subprocess.check_call(["svn", "revert", "-R", os.path.join(self.path, '..')])
302 except subprocess.CalledProcessError:
303 return False
304
305 with open(self.path, "rw") as f:
306 data = f.read()
307
308 if data.count("%mkrel") != 1:
309 print >>sys.stderr, "ERROR: Multiple %mkrel found; don't know what to do!"
310 return False
311
312 data, nr = self.re_update_version.subn(r'\g<pre>%s\g<post>' % version, data, 1)
313 if nr != 1:
314 print >>sys.stderr, "ERROR: Could not increase version!"
315 return False
316
317 data, nr = self.re_update_release.subn(r'\g<pre>%mkrel 1\g<post>', data, 1)
318 if nr != 1:
319 print >>sys.stderr, "ERROR: Could not reset release!"
320 return False
321
322 # Overwrite file with new version number
323 write_file(self.path, data)
324
325
326 # Verify that RPM also agrees that version number has changed
327 if self.version != version:
328 print "ERROR: Increased version to %s, but RPM doesn't agree!?!" % version
329 return False
330
331
332 # Try to download the new tarball various times and wait between attempts
333 tries = 0
334 while tries < SLEEP_TIMES:
335 tries += 1
336 if tries > 1: time.sleep(SLEEP_REPEAT)
337 try:
338 # Download new tarball
339 subprocess.check_call(['mgarepo', 'sync', '-d'], cwd=self.cwd)
340 # success, so exit loop
341 break
342 except subprocess.CalledProcessError, e:
343 # mgarepo sync returns 1 if the tarball cannot be downloaded
344 if e.returncode != 1:
345 return False
346 else:
347 return False
348
349
350 try:
351 # Check patches still apply
352 subprocess.check_call(['bm', '-p', '--nodeps'], cwd=self.cwd)
353 except subprocess.CalledProcessError:
354 logfile = os.path.join(os.path.dirname(self.path), 'log.%s' % os.path.splitext(os.path.basename(self.path))[0])
355 if os.path.exists(logfile):
356 subprocess.call(['tail', '-n', '15', logfile])
357 return False
358
359 return True
360
361 class Patch(object):
362 """Do things with patches"""
363
364 re_dep3 = re.compile(r'^(?:#\s*)?(?P<header>[-A-Za-z0-9]+?):\s*(?P<data>.*)$')
365 re_dep3_cont = re.compile(r'^#?\s+(?P<data>.*)$')
366
367 def __init__(self, path, show_path=False):
368 """Path: path to patch (might not exist)"""
369 self.path = path
370 self.show_path = show_path
371
372 def __str__(self):
373 return self.path if self.show_path else os.path.basename(self.path)
374
375 def add_dep3(self):
376 """Add DEP-3 headers to a patch file"""
377 if self.dep3['valid']:
378 return False
379
380 new_headers = (
381 ('Author', self.svn_author),
382 ('Subject', ''),
383 ('Applied-Upstream', ''),
384 ('Forwarded', ''),
385 ('Bug', ''),
386 )
387
388 with tempfile.NamedTemporaryFile(dir=os.path.dirname(self.path), delete=False) as fdst:
389 with open(self.path, "r") as fsrc:
390 # Start with any existing DEP3 headers
391 for i in range(self.dep3['last_nr']):
392 fdst.write(fsrc.read())
393
394 # After that add the DEP3 headers
395 add_line = False
396 for header, data in new_headers:
397 if header in self.dep3['headers']:
398 continue
399
400 # XXX - wrap this at 80 chars
401 add_line = True
402 print >>fdst, "%s: %s" % (header, "" if data is None else data)
403
404 if add_line: print >>fdst, ""
405 # Now copy any other data and the patch
406 shutil.copyfileobj(fsrc, fdst)
407
408 fdst.flush()
409 os.rename(fdst.name, self.path)
410
411 call_editor(self.path)
412
413 #Author: fwang
414 #Subject: Build fix: Fix glib header inclusion
415 #Applied-Upstream: commit:30602
416 #Forwarded: yes
417 #Bug: http://bugzilla.abisource.com/show_bug.cgi?id=13247
418
419 def _read_dep3(self):
420 """Read DEP-3 headers from an existing patch file
421
422 This will also parse git headers"""
423 dep3 = {}
424 headers = {}
425
426 last_header = None
427 last_nr = 0
428 nr = 0
429 try:
430 with open(self.path, "r") as f:
431 for line in line_input(f):
432 nr += 1
433 # stop trying to parse when real patch begins
434 if line == '---':
435 break
436
437 r = self.re_dep3.match(line)
438 if r:
439 info = r.groupdict()
440
441 # Avoid matching URLS
442 if info['data'].startswith('//') and info['header'].lower () == info['header']:
443 continue
444
445 headers[info['header']] = info['data']
446 last_header = info['header']
447 last_nr = nr
448 continue
449
450 r = self.re_dep3_cont.match(line)
451 if r:
452 info = r.groupdict()
453 if last_header:
454 headers[last_header] = " ".join((headers[last_header], info['data']))
455 last_nr = nr
456 continue
457
458 last_header = None
459 except IOError:
460 pass
461
462 dep3['valid'] = \
463 (('Description' in headers and headers['Description'].strip() != '')
464 or ('Subject' in headers and headers['Subject'].strip() != '')) \
465 and (('Origin' in headers and headers['Origin'].strip() != '') \
466 or ('Author' in headers and headers['Author'].strip() != '') \
467 or ('From' in headers and headers['From'].strip() != ''))
468 dep3['last_nr'] = last_nr
469 dep3['headers'] = headers
470
471 self._dep3 = dep3
472
473 @property
474 def dep3(self):
475 if not hasattr(self, '_dep3'):
476 self._read_dep3()
477
478 return self._dep3
479
480 @property
481 def svn_author(self):
482 if not hasattr(self, '_svn_author'):
483 try:
484 contents = subprocess.check_output(['svn', 'log', '-q', "--", self.path], close_fds=True).strip("\n").splitlines()
485
486 for line in contents:
487 if ' | ' not in line:
488 continue
489
490 fields = line.split(' | ')
491 if len(fields) >= 3:
492 self._svn_author = fields[1]
493 except subprocess.CalledProcessError:
494 pass
495
496 if not hasattr(self, '_svn_author'):
497 return None
498
499 return self._svn_author
500
501
502 class Upstream(object):
503
504 URL="http://download.gnome.org/sources/"
505 limit = None
506 _cache_versions = {}
507
508 def __init__(self):
509 urlopen = urllib2.build_opener()
510
511 good_dir = re.compile('^[-A-Za-z0-9_+.]+/$')
512
513 # Get the files
514 usock = urlopen.open(self.URL)
515 parser = urllister()
516 parser.feed(usock.read())
517 usock.close()
518 parser.close()
519 files = parser.urls
520
521 tarballs = set([filename.replace('/', '') for filename in files if good_dir.search(filename)])
522 if self.limit is not None:
523 tarballs.intersection_update(self.limit)
524
525 self.names = tarballs
526
527 @classmethod
528 def versions(cls, module):
529 # XXX - ugly
530 if module not in cls._cache_versions:
531 versions = None
532
533 url = '%s%s/cache.json' % (cls.URL, module)
534 r = requests.get(url)
535 j = r.json
536 if j is not None and len(j) > 2 and module in j[2]:
537 versions = j[2][module]
538
539 cls._cache_versions[module] = versions
540
541 return cls._cache_versions[module]
542
543 class Downstream(object):
544 re_file = re.compile(r'^(?P<module>.*?)[_-](?:(?P<oldversion>([0-9]+[\.])*[0-9]+)-)?(?P<version>([0-9]+[\.\-])*[0-9]+)\.(?P<format>(?:tar\.|diff\.)?[a-z][a-z0-9]*)$')
545
546 MEDIA="Core Release Source"
547 PKGROOT='~/pkgs'
548 DISTRO=None
549
550 def __init__(self):
551 contents = subprocess.check_output(['urpmf', '--qf', '%name|%version|%files', '.', "--media", self.MEDIA], close_fds=True).strip("\n").splitlines()
552
553 FILES = {}
554 TARBALLS = {}
555
556 for line in contents:
557 try:
558 srpm, version, filename = line.split("|")
559 except ValueError:
560 print >>sys.stderr, line
561 continue
562
563 if '.tar' in filename:
564 r = self.re_file.match(filename)
565 if r:
566 fileinfo = r.groupdict()
567 module = fileinfo['module']
568
569 if module not in TARBALLS:
570 TARBALLS[module] = {}
571 TARBALLS[module][srpm] = version
572
573 if srpm not in FILES:
574 FILES[srpm] = set()
575 FILES[srpm].add(filename)
576
577 self.tarballs = TARBALLS
578 self.files = FILES
579
580 @classmethod
581 def co(cls, package, cwd=None):
582 if cwd is None:
583 cwd = os.path.expanduser(cls.PKGROOT)
584
585 cmd = ['mgarepo', 'co']
586 if cls.DISTRO:
587 cmd.extend(('-d', cls.DISTRO))
588 cmd.append(package)
589 return subprocess.check_call(cmd, cwd=cwd)
590
591 def get_downstream_from_upstream(self, upstream, version):
592 if upstream not in self.tarballs:
593 raise ValueError("No packages for upstream name: %s" % upstream)
594
595 if len(self.tarballs[upstream]) == 1:
596 return self.tarballs[upstream].keys()
597
598 # Directories packages are located in
599 root = os.path.expanduser(self.PKGROOT)
600
601 packages = {}
602 for package in self.tarballs[upstream].keys():
603 cwd = os.path.join(root, package)
604
605 # Checkout package to ensure the checkout reflects the latest changes
606 try:
607 self.co(package, cwd=root)
608 except subprocess.CalledProcessError:
609 raise ValueError("Multiple packages found and cannot checkout %s" % package)
610
611 # Determine version from spec file
612 try:
613 packages[package] = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)).version
614 except subprocess.CalledProcessError:
615 raise ValueError("Multiple packages found and cannot determine version of %s" % package)
616
617 # Return all packages reflecting the current version
618 matches = [package for package in packages if packages[package] == version]
619 if len(matches):
620 return matches
621
622 # Return all packages reflecting the version before the current version
623 latest_version = get_latest_version(packages.values(), max_version=version)
624 matches = [package for package in packages if packages[package] == latest_version]
625 if len(matches):
626 return matches
627
628 # Give up
629 raise ValueError("Multiple packages found and cannot determine package for version %s" % version)
630
631 def write_file(path, data):
632 with tempfile.NamedTemporaryFile(dir=os.path.dirname(path), delete=False) as fdst:
633 fdst.write(data)
634 fdst.flush()
635 os.rename(fdst.name, path)
636
637 def cmd_co(options, parser):
638 for package, module, package_version, spec_version, downstream_files in sorted(join_streams()):
639 print "%s => %s" % (module, package)
640 try:
641 Downstream.co(package)
642 except subprocess.CalledProcessError:
643 pass
644
645 def join_streams(show_version=False, only_diff_version=False):
646 root = os.path.expanduser(Downstream.PKGROOT)
647
648 upstream = Upstream().names
649 downstream = Downstream()
650
651 matches = upstream & set(downstream.tarballs.keys())
652 for module in matches:
653 for package in downstream.tarballs[module].keys():
654 package_version = downstream.tarballs[module][package]
655 spec_version = None
656 if show_version or only_diff_version:
657 cwd = os.path.join(root, package)
658 try:
659 spec_version = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)).version
660 except subprocess.CalledProcessError:
661 spec_version = 'N/A'
662
663 if only_diff_version and package_version == spec_version:
664 continue
665
666 yield (package, module, package_version, spec_version, downstream.files[package])
667
668 def cmd_ls(options, parser):
669 streams = join_streams(show_version=options.show_version, only_diff_version=options.diff)
670 if options.sort:
671 SORT=dict(zip(options.sort.read().splitlines(), itertools.count()))
672
673 streams = sorted(streams, key=lambda a: (SORT.get(a[1], 9999), a[0]))
674 else:
675 streams = sorted(streams)
676
677 for package, module, package_version, spec_version, downstream_files in streams:
678 sys.stdout.write(package)
679 if options.upstream: sys.stdout.write("\t%s" % module)
680 if options.show_version: sys.stdout.write("\t%s\t%s" % (spec_version, package_version))
681 print
682
683 def cmd_check_latest(options, parser):
684 streams = join_streams(show_version=True)
685
686 for package, module, package_version, spec_version, downstream_files in streams:
687 sys.stdout.write(package)
688 sys.stdout.write("\t%s\t%s" % (spec_version, package_version))
689
690 safe_max_version = get_safe_max_version(spec_version)
691
692 versions = Upstream.versions(module)
693 if versions:
694 latest_version = get_latest_version(versions)
695 safe_version = get_latest_version(versions, safe_max_version)
696
697 if version_cmp(latest_version, spec_version) < 0: latest_version = 'N/A'
698 if version_cmp(safe_version, spec_version) < 0: safe_version = 'N/A'
699
700 sys.stdout.write("\t%s" % latest_version)
701 sys.stdout.write("\t%s" % safe_version)
702 print
703
704 def cmd_patches(options, parser):
705 root = os.path.expanduser(Downstream.PKGROOT)
706
707 for package, module, package_version, spec_version, downstream_files in sorted(join_streams()):
708 for filename in downstream_files:
709 if '.patch' in filename or '.diff' in filename:
710
711 p = Patch(os.path.join(root, package, "SOURCES", filename), show_path=options.path)
712 valid = ""
713 forwarded = ""
714 if p.dep3['headers']:
715 forwarded = p.dep3['headers'].get('Forwarded', "no")
716 if p.dep3['valid']:
717 valid="VALID"
718 print "\t".join((module, package, str(p), forwarded, valid))
719
720 def cmd_dep3(options, parser):
721 p = Patch(options.patch)
722 p.add_dep3()
723
724 def cmd_package_new_version(options, parser):
725 # Determine the package name
726 if options.upstream:
727 try:
728 package = Downstream().get_downstream_from_upstream(options.package, options.version)[0]
729 except ValueError, e:
730 print >>sys.stderr, "ERROR: %s" % e
731 sys.exit(1)
732 else:
733 package = options.package
734
735 # Directories packages are located in
736 root = os.path.expanduser(Downstream.PKGROOT)
737 cwd = os.path.join(root, package)
738
739 # Checkout package to ensure the checkout reflects the latest changes
740 try:
741 Downstream.co(package, cwd=root)
742 except subprocess.CalledProcessError:
743 sys.exit(1)
744
745 # SpecFile class handles the actual version+release change
746 s = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package))
747 print "%s => %s" % (s.version, options.version)
748 if not s.update(options.version, force=options.force):
749 sys.exit(1)
750
751 # Check hash, if given
752 if options.hexdigest is not None:
753 sources = [name for name, origname in s.sources.iteritems() if '://' in origname]
754 if not len(sources):
755 print >>sys.stderr, "ERROR: Cannot determine source file (for hash check)!"
756 sys.stderr(1)
757
758 for filename in sources:
759 path = os.path.join(cwd, "SOURCES", filename)
760 if not is_valid_hash(path, options.algo, options.hexdigest):
761 print >>sys.stderr, "ERROR: Hash file failed check for %s!" % path
762 print >>sys.stderr, "ERROR: Reverting changes!"
763 subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd)
764 sys.exit(1)
765
766 try:
767 # If we made it this far, checkin the changes
768 subprocess.check_call(['mgarepo', 'ci', '-m', 'new version %s' % options.version], cwd=cwd)
769
770 # Submit is optional
771 if options.submit:
772 cmd = ['mgarepo', 'submit']
773 if Downstream.DISTRO:
774 cmd.extend(('--define', 'section=core/updates_testing', '-t', Downstream.DISTRO))
775 subprocess.check_call(cmd, cwd=cwd)
776 except subprocess.CalledProcessError:
777 sys.exit(1)
778
779 def cmd_parse_ftp_release_list(options, parser):
780 def _send_reply_mail(contents, orig_msg, to, packages=[], error=False):
781 """Send an reply email"""
782 contents.seek(0)
783 msg = MIMEText(contents.read(), _charset='utf-8')
784
785 if error:
786 # XXX - ugly
787 contents.seek(0)
788 lastline = contents.read().rstrip().splitlines()[-1]
789 # Remove things like "ERROR: " and so on from the last line
790 lastline = re.sub(r'^(?:[^ :]+:\s+)+', '', lastline)
791 # Remove things like " - " (youri output from mgarepo submit)
792 lastline = re.sub(r'^\s+-\s+', '', lastline)
793 subjecterror = " (ERROR: %s)" % lastline if lastline else " (ERROR)"
794 else:
795 subjecterror = ""
796
797 if packages:
798 subject = "%s %s%s" % (", ".join(packages), orig_msg['X-Module-Version'], subjecterror)
799 else:
800 subject = "Re: %s%s" % (orig_msg['Subject'], subjecterror)
801
802 msg['Subject'] = subject
803 msg['To'] = to
804 msg["In-Reply-To"] = orig_msg["Message-ID"]
805 msg["References"] = orig_msg["Message-ID"]
806
807 # Call sendmail program directly so it doesn't matter if the service is running
808 cmd = ['/usr/sbin/sendmail', '-oi', '--']
809 cmd.extend([to])
810 p = subprocess.Popen(cmd, stdin=subprocess.PIPE)
811 p.stdin.write(msg.as_string())
812 p.stdin.flush()
813 p.stdin.close()
814 p.wait()
815
816
817 msg = email.email.message_from_file(sys.stdin)
818
819 if options.mail:
820 stdout = tempfile.TemporaryFile()
821 stderr = stdout
822 else:
823 stdout = sys.stdout
824 stderr = sys.stderr
825
826 try:
827 module = msg['X-Module-Name']
828 version = msg['X-Module-Version']
829 hexdigest = msg['X-Module-SHA256-tar.xz']
830 except KeyError, e:
831 print >>stderr, "ERROR: %s" % e
832 if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True)
833 sys.exit(1)
834
835 try:
836 packages = Downstream().get_downstream_from_upstream(module, version)
837 except ValueError, e:
838 print >>stderr, "ERROR: %s" % e
839 if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True)
840 sys.exit(1)
841
842 if options.wait:
843 # maildrop aborts and will try to deliver after 5min
844 # fork to avoid this
845 if os.fork() != 0: sys.exit(0)
846 # wait SLEEP_INITIAL after the message was sent
847 secs = SLEEP_INITIAL
848 t = email.utils.parsedate_tz(msg['Date'])
849 if t is not None:
850 msg_time = email.utils.mktime_tz(t)
851 secs = SLEEP_INITIAL - (time.time() - msg_time)
852
853 if secs > 0: time.sleep(secs)
854
855 error = False
856 for package in packages:
857 cmd = ['mga-gnome', 'increase', '--hash', hexdigest]
858 if options.submit:
859 cmd.append('--submit')
860 if options.force:
861 cmd.append('--force')
862 cmd.extend((package, version))
863 if subprocess.call(cmd, stdout=stdout, stderr=stderr):
864 error = True
865
866 if options.mail: _send_reply_mail(stdout, msg, options.mail, packages=packages, error=error)
867
868 def main():
869 description = """Mageia GNOME commands."""
870 epilog="""Report bugs to Olav Vitters"""
871 parser = argparse.ArgumentParser(description=description,epilog=epilog)
872 parser.add_argument("-l", "--limit", type=argparse.FileType('r', 0),
873 dest="limit_upstream", metavar="FILE",
874 help="File containing upstream names")
875 parser.add_argument("-d", "--distro", action="store", dest="distro",
876 help="Distribution release")
877
878 # SUBPARSERS
879 subparsers = parser.add_subparsers(title='subcommands')
880 # install
881 subparser = subparsers.add_parser('co', help='checkout all GNOME modules')
882 subparser.set_defaults(
883 func=cmd_co
884 )
885
886 subparser = subparsers.add_parser('packages', help='list all GNOME packages')
887 subparser.add_argument("-m", "--m", action="store_true", dest="upstream",
888 help="Show upstream module")
889 subparser.add_argument( "--version", action="store_true", dest="show_version",
890 help="Show version numbers")
891 subparser.add_argument( "--diff", action="store_true", dest="diff",
892 help="Only show packages with different version")
893 subparser.add_argument( "--sort", type=argparse.FileType('r', 0),
894 dest="sort", metavar="FILE",
895 help="Sort packages according to order in given FILE")
896
897 subparser.set_defaults(
898 func=cmd_ls, upstream=False, show_version=False, diff=False
899 )
900
901 subparser = subparsers.add_parser('check-latest', help='check for latest version of packages')
902 subparser.set_defaults(
903 func=cmd_check_latest
904 )
905
906 subparser = subparsers.add_parser('patches', help='list all GNOME patches')
907 subparser.add_argument("-p", "--path", action="store_true", dest="path",
908 help="Show full path to patch")
909 subparser.set_defaults(
910 func=cmd_patches, path=False
911 )
912
913 subparser = subparsers.add_parser('dep3', help='Add dep3 headers')
914 subparser.add_argument("patch", help="Patch")
915 subparser.set_defaults(
916 func=cmd_dep3, path=False
917 )
918
919 subparser = subparsers.add_parser('increase', help='Increase version number')
920 subparser.add_argument("package", help="Package name")
921 subparser.add_argument("version", help="Version number")
922 subparser.add_argument("-f", "--force", action="store_true", dest="force",
923 help="Override warnings, just do it")
924 subparser.add_argument("-u", "--upstream", action="store_true", dest="upstream",
925 help="Package name reflects the upstream name")
926 subparser.add_argument("-s", "--submit", action="store_true", dest="submit",
927 help="Commit changes and submit")
928 subparser.add_argument("-a", "--algorithm", choices=hashlib.algorithms, dest="algo",
929 help="Hash algorithm")
930 subparser.add_argument("--hash", dest="hexdigest",
931 help="Hexdigest of the hash")
932 subparser.set_defaults(
933 func=cmd_package_new_version, submit=True, upstream=False, hexdigest=None, algo="sha256",
934 force=False
935 )
936
937 subparser = subparsers.add_parser('gnome-release-email', help='Submit packages based on GNOME ftp-release-list email')
938 subparser.add_argument("-m", "--mail", help="Email address to send the progress to")
939 subparser.add_argument("-w", "--wait", action="store_true",
940 help="Wait before trying to retrieve the new version")
941 subparser.add_argument("-s", "--submit", action="store_true", dest="submit",
942 help="Commit changes and submit")
943 subparser.add_argument("-f", "--force", action="store_true",
944 help="Force submission")
945 subparser.set_defaults(
946 func=cmd_parse_ftp_release_list, force=False, wait=False
947 )
948
949 if len(sys.argv) == 1:
950 parser.print_help()
951 sys.exit(2)
952
953 options = parser.parse_args()
954 if options.limit_upstream:
955 Upstream.limit = set(options.limit_upstream.read().strip("\n").splitlines())
956
957 if options.distro:
958 Downstream.PKGROOT = os.path.join('~/pkgs', options.distro)
959 Downstream.MEDIA = "Core Release %s Source" % options.distro
960 Downstream.DISTRO = options.distro
961
962 try:
963 options.func(options, parser)
964 except KeyboardInterrupt:
965 print('Interrupted')
966 sys.exit(1)
967 except EOFError:
968 print('EOF')
969 sys.exit(1)
970 except IOError, e:
971 if e.errno != errno.EPIPE:
972 raise
973 sys.exit(0)
974
975 if __name__ == "__main__":
976 os.environ['PYTHONUNBUFFERED'] = '1'
977 main()

Properties

Name Value
svn:executable *

  ViewVC Help
Powered by ViewVC 1.1.30