1 |
#!/usr/bin/python |
#!/usr/bin/python -u |
2 |
|
|
3 |
# A lot of the code comes from ftpadmin, see |
# A lot of the code comes from ftpadmin, see |
4 |
# http://git.gnome.org/browse/sysadmin-bin/tree/ftpadmin |
# http://git.gnome.org/browse/sysadmin-bin/tree/ftpadmin |
33 |
import urllib2 |
import urllib2 |
34 |
import urlparse |
import urlparse |
35 |
|
|
36 |
MEDIA="Core Release Source" |
# for checking hashes |
37 |
URL="http://download.gnome.org/sources/" |
import hashlib |
38 |
PKGROOT='~/pkgs' |
|
39 |
|
# for parsing ftp-release-list emails |
40 |
|
import email |
41 |
|
from email.mime.text import MIMEText |
42 |
|
|
43 |
|
# to be able to sleep for a while |
44 |
|
import time |
45 |
|
|
46 |
|
# version freeze |
47 |
|
import datetime |
48 |
|
|
49 |
|
# packages --sort |
50 |
|
import itertools |
51 |
|
|
52 |
|
# check-latest |
53 |
|
import requests |
54 |
|
|
55 |
|
SLEEP_INITIAL=180 |
56 |
|
SLEEP_REPEAT=30 |
57 |
|
SLEEP_TIMES=30 |
58 |
|
|
59 |
re_majmin = re.compile(r'^([0-9]+\.[0-9]+).*') |
re_majmin = re.compile(r'^([0-9]+\.[0-9]+).*') |
60 |
re_version = re.compile(r'([-.]|\d+|[^-.\d]+)') |
re_version = re.compile(r'([-.]|\d+|[^-.\d]+)') |
82 |
latest = version |
latest = version |
83 |
return latest |
return latest |
84 |
|
|
85 |
|
def get_safe_max_version(version): |
86 |
|
if not re_majmin.match(version): |
87 |
|
return None |
88 |
|
|
89 |
|
majmin_nr = map(long, re_majmin.sub(r'\1', version).split('.')) |
90 |
|
|
91 |
|
if majmin_nr[1] % 2 == 0: |
92 |
|
return "%d.%d" % (majmin_nr[0], majmin_nr[1] + 1) |
93 |
|
else: |
94 |
|
return "%d.%d" % (majmin_nr[0], majmin_nr[1] + 2) |
95 |
|
|
96 |
def judge_version_increase(version_old, version_new): |
def judge_version_increase(version_old, version_new): |
97 |
"""Judge quality of version increase: |
"""Judge quality of version increase: |
98 |
|
|
104 |
5+: Ok""" |
5+: Ok""" |
105 |
versions = (version_old, version_new) |
versions = (version_old, version_new) |
106 |
|
|
|
print " => ".join(versions) |
|
|
|
|
107 |
# First do a basic version comparison to ensure version_new is actually newer |
# First do a basic version comparison to ensure version_new is actually newer |
108 |
compare = version_cmp(version_new, version_old) |
compare = version_cmp(version_new, version_old) |
109 |
|
|
110 |
if compare == 0: |
if compare == 0: |
111 |
|
# 1.0.0 -> 1.0.1 |
112 |
return (-2, "Already at version %s!" % (version_old)) |
return (-2, "Already at version %s!" % (version_old)) |
113 |
|
|
114 |
if compare != 1: |
if compare != 1: |
115 |
|
# 1.0.1 -> 1.0.0 |
116 |
return (-3, "Version %s is older than current version %s!" % (version_new, version_old)) |
return (-3, "Version %s is older than current version %s!" % (version_new, version_old)) |
117 |
|
|
118 |
# Version is newer, but we don't want to see if it follows the GNOME versioning scheme |
# Version is newer, but we don't want to see if it follows the GNOME versioning scheme |
133 |
# Major+minor the same? Then go ahead and upgrade! |
# Major+minor the same? Then go ahead and upgrade! |
134 |
if majmins[0] == majmins[1]: |
if majmins[0] == majmins[1]: |
135 |
# Majmin of both versions are the same, looks good! |
# Majmin of both versions are the same, looks good! |
136 |
|
# 1.1.x -> 1.1.x or 1.0.x -> 1.0.x |
137 |
return (10, None) |
return (10, None) |
138 |
|
|
139 |
# More detailed analysis needed, so figure out the numbers |
# More detailed analysis needed, so figure out the numbers |
141 |
|
|
142 |
# Check/ensure major version number is the same |
# Check/ensure major version number is the same |
143 |
if majmin_nrs[0][0] != majmin_nrs[1][0]: |
if majmin_nrs[0][0] != majmin_nrs[1][0]: |
144 |
|
# 1.0.x -> 2.0.x |
145 |
return (1, "Major version number increase") |
return (1, "Major version number increase") |
146 |
|
|
147 |
# Minor indicates stable/unstable |
# Minor indicates stable/unstable |
150 |
# Upgrading to unstable is weird |
# Upgrading to unstable is weird |
151 |
if not devstate[1]: |
if not devstate[1]: |
152 |
if devstate[0]: |
if devstate[0]: |
153 |
|
# 1.2.x -> 1.3.x |
154 |
return (1, "Stable to unstable increase") |
return (1, "Stable to unstable increase") |
155 |
|
|
156 |
|
# 1.3.x -> 1.5.x |
157 |
return (4, "Unstable to unstable version increase") |
return (4, "Unstable to unstable version increase") |
158 |
|
|
159 |
# Unstable => stable is always ok |
# Unstable => stable is always ok |
160 |
if not devstate[0]: |
if not devstate[0]: |
161 |
|
# 1.1.x -> 1.2.x |
162 |
return (5, "Unstable to stable") |
return (5, "Unstable to stable") |
163 |
|
|
164 |
# Can only be increase of minors from one stable to the next |
# Can only be increase of minors from one stable to the next |
165 |
|
# 1.0.x -> 1.2.x |
166 |
return (6, "Stable version increase") |
return (6, "Stable version increase") |
167 |
|
|
168 |
def line_input (file): |
def line_input (file): |
240 |
if not hasattr(tarfile.TarFile, 'xzopen'): |
if not hasattr(tarfile.TarFile, 'xzopen'): |
241 |
tarfile.open = XzTarFile.open |
tarfile.open = XzTarFile.open |
242 |
|
|
243 |
|
def is_valid_hash(path, algo, hexdigest): |
244 |
|
if algo not in hashlib.algorithms: |
245 |
|
raise ValueError("Unknown hash algorithm: %s" % algo) |
246 |
|
|
247 |
|
local_hash = getattr(hashlib, algo)() |
248 |
|
|
249 |
|
with open(path, 'rb') as fp: |
250 |
|
data = fp.read(32768) |
251 |
|
while data: |
252 |
|
local_hash.update(data) |
253 |
|
data = fp.read(32768) |
254 |
|
|
255 |
|
return local_hash.hexdigest() == hexdigest |
256 |
|
|
257 |
class SpecFile(object): |
class SpecFile(object): |
258 |
re_update_version = re.compile(r'^(?P<pre>Version:\s*)(?P<version>.+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE) |
re_update_version = re.compile(r'^(?P<pre>Version:\s*)(?P<version>.+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE) |
259 |
re_update_release = re.compile(r'^(?P<pre>Release:\s*)(?P<release>%mkrel \d+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE) |
re_update_release = re.compile(r'^(?P<pre>Release:\s*)(?P<release>%mkrel \d+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE) |
265 |
@property |
@property |
266 |
def version(self): |
def version(self): |
267 |
return subprocess.check_output(["rpm", "--specfile", self.path, "--queryformat", "%{VERSION}\n"]).splitlines()[0] |
return subprocess.check_output(["rpm", "--specfile", self.path, "--queryformat", "%{VERSION}\n"]).splitlines()[0] |
268 |
|
@property |
269 |
|
def sources(self): |
270 |
|
ts = rpm.ts() |
271 |
|
spec = ts.parseSpec(self.path) |
272 |
|
srclist = spec.sources if isinstance(spec.sources, (list, tuple)) \ |
273 |
|
else spec.sources() |
274 |
|
return dict((os.path.basename(name), name) for name, no, flags in srclist) |
275 |
|
|
276 |
def update(self, version): |
def update(self, version, force=False): |
277 |
"""Update specfile (increase version)""" |
"""Update specfile (increase version)""" |
278 |
cur_version = self.version |
cur_version = self.version |
279 |
|
|
285 |
|
|
286 |
if judgement < 5: |
if judgement < 5: |
287 |
print "WARNING: %s!" % (msg) |
print "WARNING: %s!" % (msg) |
288 |
return False |
if not force: return False |
289 |
|
|
290 |
# XXX - os.path.join is hackish |
# XXX - os.path.join is hackish |
291 |
if subprocess.check_output(["svn", "diff", os.path.join(self.path, '..')]) != '': |
svn_diff_output = subprocess.check_output(["svn", "diff", os.path.join(self.path, '..')]) |
292 |
|
if svn_diff_output != '': |
293 |
|
print svn_diff_output |
294 |
print >>sys.stderr, "ERROR: Package has uncommitted changes!" |
print >>sys.stderr, "ERROR: Package has uncommitted changes!" |
295 |
return False |
if not force: |
296 |
|
return False |
297 |
|
|
298 |
|
# Forcing package submission: revert changes |
299 |
|
try: |
300 |
|
print >>sys.stderr, "WARNING: Force used; reverting svn changes" |
301 |
|
subprocess.check_call(["svn", "revert", "-R", os.path.join(self.path, '..')]) |
302 |
|
except subprocess.CalledProcessError: |
303 |
|
return False |
304 |
|
|
305 |
with open(self.path, "rw") as f: |
with open(self.path, "rw") as f: |
306 |
data = f.read() |
data = f.read() |
307 |
|
|
308 |
|
if data.count("%subrel") != 0: |
309 |
|
print >>sys.stderr, "ERROR: %subrel found; don't know what to do!" |
310 |
|
return False |
311 |
|
|
312 |
if data.count("%mkrel") != 1: |
if data.count("%mkrel") != 1: |
313 |
print >>sys.stderr, "ERROR: Multiple %mkrel found; don't know what to do!" |
print >>sys.stderr, "ERROR: Multiple %mkrel found; don't know what to do!" |
314 |
return False |
return False |
332 |
print "ERROR: Increased version to %s, but RPM doesn't agree!?!" % version |
print "ERROR: Increased version to %s, but RPM doesn't agree!?!" % version |
333 |
return False |
return False |
334 |
|
|
335 |
|
|
336 |
|
# Try to download the new tarball various times and wait between attempts |
337 |
|
tries = 0 |
338 |
|
while tries < SLEEP_TIMES: |
339 |
|
tries += 1 |
340 |
|
if tries > 1: time.sleep(SLEEP_REPEAT * 2 ** (tries // 10)) |
341 |
|
try: |
342 |
|
# Download new tarball |
343 |
|
subprocess.check_call(['mgarepo', 'sync', '-d'], cwd=self.cwd) |
344 |
|
# success, so exit loop |
345 |
|
break |
346 |
|
except subprocess.CalledProcessError, e: |
347 |
|
# mgarepo sync returns 1 if the tarball cannot be downloaded |
348 |
|
if e.returncode != 1: |
349 |
|
subprocess.check_call(["svn", "revert", "-R", os.path.join(self.path, '..')]) |
350 |
|
return False |
351 |
|
else: |
352 |
|
# failed to download tarball |
353 |
|
subprocess.check_call(["svn", "revert", "-R", os.path.join(self.path, '..')]) |
354 |
|
return False |
355 |
|
|
356 |
|
|
357 |
try: |
try: |
|
# Download new tarball |
|
|
subprocess.check_call(['mgarepo', 'sync', '-d'], cwd=self.cwd) |
|
358 |
# Check patches still apply |
# Check patches still apply |
359 |
subprocess.check_call(['bm', '-p', '--nodeps'], cwd=self.cwd) |
subprocess.check_call(['bm', '-p', '--nodeps'], cwd=self.cwd) |
360 |
except subprocess.CalledProcessError: |
except subprocess.CalledProcessError: |
361 |
|
logfile = os.path.join(os.path.dirname(self.path), 'log.%s' % os.path.splitext(os.path.basename(self.path))[0]) |
362 |
|
if os.path.exists(logfile): |
363 |
|
subprocess.call(['tail', '-n', '15', logfile]) |
364 |
return False |
return False |
365 |
|
|
366 |
return True |
return True |
505 |
|
|
506 |
return self._svn_author |
return self._svn_author |
507 |
|
|
|
def get_upstream_names(): |
|
|
urlopen = urllib2.build_opener() |
|
508 |
|
|
509 |
good_dir = re.compile('^[-A-Za-z0-9_+.]+/$') |
class Upstream(object): |
510 |
|
|
511 |
|
URL="http://download.gnome.org/sources/" |
512 |
|
limit = None |
513 |
|
_cache_versions = {} |
514 |
|
|
515 |
|
def __init__(self): |
516 |
|
urlopen = urllib2.build_opener() |
517 |
|
|
518 |
|
good_dir = re.compile('^[-A-Za-z0-9_+.]+/$') |
519 |
|
|
520 |
|
# Get the files |
521 |
|
usock = urlopen.open(self.URL) |
522 |
|
parser = urllister() |
523 |
|
parser.feed(usock.read()) |
524 |
|
usock.close() |
525 |
|
parser.close() |
526 |
|
files = parser.urls |
527 |
|
|
528 |
|
tarballs = set([filename.replace('/', '') for filename in files if good_dir.search(filename)]) |
529 |
|
if self.limit is not None: |
530 |
|
tarballs.intersection_update(self.limit) |
531 |
|
|
532 |
# Get the files |
self.names = tarballs |
|
usock = urlopen.open(URL) |
|
|
parser = urllister() |
|
|
parser.feed(usock.read()) |
|
|
usock.close() |
|
|
parser.close() |
|
|
files = parser.urls |
|
533 |
|
|
534 |
tarballs = set([filename.replace('/', '') for filename in files if good_dir.search(filename)]) |
@classmethod |
535 |
|
def versions(cls, module): |
536 |
|
# XXX - ugly |
537 |
|
if module not in cls._cache_versions: |
538 |
|
versions = None |
539 |
|
|
540 |
|
url = '%s%s/cache.json' % (cls.URL, module) |
541 |
|
r = requests.get(url) |
542 |
|
j = r.json |
543 |
|
if j is not None and len(j) > 2 and module in j[2]: |
544 |
|
versions = j[2][module] |
545 |
|
|
546 |
|
cls._cache_versions[module] = versions |
547 |
|
|
548 |
return tarballs |
return cls._cache_versions[module] |
549 |
|
|
550 |
def get_downstream_names(): |
class Downstream(object): |
551 |
re_file = re.compile(r'^(?P<module>.*?)[_-](?:(?P<oldversion>([0-9]+[\.])*[0-9]+)-)?(?P<version>([0-9]+[\.\-])*[0-9]+)\.(?P<format>(?:tar\.|diff\.)?[a-z][a-z0-9]*)$') |
re_file = re.compile(r'^(?P<module>.*?)[_-](?:(?P<oldversion>([0-9]+[\.])*[0-9]+)-)?(?P<version>([0-9]+[\.\-])*[0-9]+)\.(?P<format>(?:tar\.|diff\.)?[a-z][a-z0-9]*)$') |
552 |
|
|
553 |
contents = subprocess.check_output(['urpmf', '--files', '.', "--media", MEDIA], close_fds=True).strip("\n").splitlines() |
MEDIA="Core Release Source" |
554 |
|
PKGROOT='~/pkgs' |
555 |
|
DISTRO=None |
556 |
|
|
557 |
FILES = {} |
def __init__(self): |
558 |
TARBALLS = {} |
contents = subprocess.check_output(['urpmf', '--qf', '%name|%version|%files', '.', "--media", self.MEDIA], close_fds=True).strip("\n").splitlines() |
559 |
|
|
560 |
for line in contents: |
FILES = {} |
561 |
try: |
TARBALLS = {} |
|
srpm, filename = line.split(":") |
|
|
except ValueError: |
|
|
print >>sys.stderr, line |
|
|
continue |
|
562 |
|
|
563 |
if '.tar' in filename: |
for line in contents: |
564 |
r = re_file.match(filename) |
try: |
565 |
if r: |
srpm, version, filename = line.split("|") |
566 |
fileinfo = r.groupdict() |
except ValueError: |
567 |
module = fileinfo['module'] |
print >>sys.stderr, line |
568 |
|
continue |
|
if module not in TARBALLS: |
|
|
TARBALLS[module] = set() |
|
|
TARBALLS[module].add(srpm) |
|
|
|
|
|
if srpm not in FILES: |
|
|
FILES[srpm] = set() |
|
|
FILES[srpm].add(filename) |
|
569 |
|
|
570 |
return TARBALLS, FILES |
if '.tar' in filename: |
571 |
|
r = self.re_file.match(filename) |
572 |
|
if r: |
573 |
|
fileinfo = r.groupdict() |
574 |
|
module = fileinfo['module'] |
575 |
|
|
576 |
|
if module not in TARBALLS: |
577 |
|
TARBALLS[module] = {} |
578 |
|
|
579 |
|
if srpm in TARBALLS[module]: |
580 |
|
# srpm seen before, check if version is newer |
581 |
|
if version_cmp(TARBALLS[module][srpm], version) == 1: |
582 |
|
TARBALLS[module][srpm] = version |
583 |
|
else: |
584 |
|
TARBALLS[module][srpm] = version |
585 |
|
|
586 |
|
if srpm not in FILES: |
587 |
|
FILES[srpm] = set() |
588 |
|
FILES[srpm].add(filename) |
589 |
|
|
590 |
|
self.tarballs = TARBALLS |
591 |
|
self.files = FILES |
592 |
|
|
593 |
|
@classmethod |
594 |
|
def co(cls, package, cwd=None): |
595 |
|
if cwd is None: |
596 |
|
cwd = os.path.expanduser(cls.PKGROOT) |
597 |
|
|
598 |
|
cmd = ['mgarepo', 'co'] |
599 |
|
if cls.DISTRO: |
600 |
|
cmd.extend(('-d', cls.DISTRO)) |
601 |
|
cmd.append(package) |
602 |
|
return subprocess.check_call(cmd, cwd=cwd) |
603 |
|
|
604 |
|
def get_downstream_from_upstream(self, upstream, version): |
605 |
|
if upstream not in self.tarballs: |
606 |
|
raise ValueError("No packages for upstream name: %s" % upstream) |
607 |
|
|
608 |
|
if len(self.tarballs[upstream]) == 1: |
609 |
|
return self.tarballs[upstream].keys() |
610 |
|
|
611 |
|
# Directories packages are located in |
612 |
|
root = os.path.expanduser(self.PKGROOT) |
613 |
|
|
614 |
|
packages = {} |
615 |
|
for package in self.tarballs[upstream].keys(): |
616 |
|
cwd = os.path.join(root, package) |
617 |
|
|
618 |
|
# Checkout package to ensure the checkout reflects the latest changes |
619 |
|
try: |
620 |
|
self.co(package, cwd=root) |
621 |
|
except subprocess.CalledProcessError: |
622 |
|
raise ValueError("Multiple packages found and cannot checkout %s" % package) |
623 |
|
|
624 |
|
# Determine version from spec file |
625 |
|
try: |
626 |
|
packages[package] = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)).version |
627 |
|
except subprocess.CalledProcessError: |
628 |
|
raise ValueError("Multiple packages found and cannot determine version of %s" % package) |
629 |
|
|
630 |
|
# Return all packages reflecting the current version |
631 |
|
matches = [package for package in packages if packages[package] == version] |
632 |
|
if len(matches): |
633 |
|
return matches |
634 |
|
|
635 |
|
# Return all packages reflecting the version before the current version |
636 |
|
latest_version = get_latest_version(packages.values(), max_version=version) |
637 |
|
matches = [package for package in packages if packages[package] == latest_version] |
638 |
|
if len(matches): |
639 |
|
return matches |
640 |
|
|
641 |
|
# Give up |
642 |
|
raise ValueError("Multiple packages found and cannot determine package for version %s" % version) |
643 |
|
|
644 |
def write_file(path, data): |
def write_file(path, data): |
645 |
with tempfile.NamedTemporaryFile(dir=os.path.dirname(path), delete=False) as fdst: |
with tempfile.NamedTemporaryFile(dir=os.path.dirname(path), delete=False) as fdst: |
648 |
os.rename(fdst.name, path) |
os.rename(fdst.name, path) |
649 |
|
|
650 |
def cmd_co(options, parser): |
def cmd_co(options, parser): |
651 |
upstream = get_upstream_names() |
for package, module, package_version, spec_version, downstream_files in sorted(join_streams()): |
652 |
downstream, downstream_files = get_downstream_names() |
print "%s => %s" % (module, package) |
653 |
|
try: |
654 |
|
Downstream.co(package) |
655 |
|
except subprocess.CalledProcessError: |
656 |
|
pass |
657 |
|
|
658 |
|
def join_streams(show_version=False, only_diff_version=False): |
659 |
|
root = os.path.expanduser(Downstream.PKGROOT) |
660 |
|
|
661 |
cwd = os.path.expanduser(PKGROOT) |
upstream = Upstream().names |
662 |
|
downstream = Downstream() |
663 |
|
|
664 |
matches = upstream & set(downstream.keys()) |
matches = upstream & set(downstream.tarballs.keys()) |
665 |
for module in matches: |
for module in matches: |
666 |
print module, "\t".join(downstream[module]) |
for package in downstream.tarballs[module].keys(): |
667 |
for package in downstream[module]: |
package_version = downstream.tarballs[module][package] |
668 |
subprocess.call(['mgarepo', 'co', package], cwd=cwd) |
spec_version = None |
669 |
|
if show_version or only_diff_version: |
670 |
|
cwd = os.path.join(root, package) |
671 |
|
try: |
672 |
|
spec_version = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)).version |
673 |
|
except subprocess.CalledProcessError: |
674 |
|
spec_version = 'N/A' |
675 |
|
|
676 |
|
if only_diff_version and package_version == spec_version: |
677 |
|
continue |
678 |
|
|
679 |
|
yield (package, module, package_version, spec_version, downstream.files[package]) |
680 |
|
|
681 |
def cmd_ls(options, parser): |
def cmd_ls(options, parser): |
682 |
upstream = get_upstream_names() |
streams = join_streams(show_version=options.show_version, only_diff_version=options.diff) |
683 |
downstream, downstream_files = get_downstream_names() |
if options.sort: |
684 |
|
SORT=dict(zip(options.sort.read().splitlines(), itertools.count())) |
685 |
|
|
686 |
matches = upstream & set(downstream.keys()) |
streams = sorted(streams, key=lambda a: (SORT.get(a[1], 9999), a[0])) |
687 |
for module in matches: |
else: |
688 |
print "\n".join(sorted(downstream[module])) |
streams = sorted(streams) |
689 |
|
|
690 |
def cmd_patches(options, parser): |
for package, module, package_version, spec_version, downstream_files in streams: |
691 |
upstream = get_upstream_names() |
sys.stdout.write(package) |
692 |
downstream, downstream_files = get_downstream_names() |
if options.upstream: sys.stdout.write("\t%s" % module) |
693 |
|
if options.show_version: sys.stdout.write("\t%s\t%s" % (spec_version, package_version)) |
694 |
|
print |
695 |
|
|
696 |
|
def cmd_check_latest(options, parser): |
697 |
|
streams = join_streams(show_version=True) |
698 |
|
|
699 |
|
for package, module, package_version, spec_version, downstream_files in streams: |
700 |
|
upgrade=set() |
701 |
|
sys.stdout.write(package) |
702 |
|
sys.stdout.write("\t%s\t%s" % (spec_version, package_version)) |
703 |
|
|
704 |
|
safe_max_version = get_safe_max_version(spec_version) |
705 |
|
|
706 |
|
versions = Upstream.versions(module) |
707 |
|
if versions: |
708 |
|
latest_version = get_latest_version(versions) |
709 |
|
safe_version = get_latest_version(versions, safe_max_version) |
710 |
|
|
711 |
|
cmp_latest = version_cmp(latest_version, spec_version) |
712 |
|
if cmp_latest < 0: |
713 |
|
latest_version = 'N/A' |
714 |
|
upgrade.add('l') |
715 |
|
elif cmp_latest > 0: |
716 |
|
upgrade.add('L') |
717 |
|
|
718 |
|
cmp_safe = version_cmp(safe_version, spec_version) |
719 |
|
if cmp_safe < 0: |
720 |
|
safe_version = 'N/A' |
721 |
|
upgrade.add('s') |
722 |
|
elif cmp_safe > 0: |
723 |
|
upgrade.add('S') |
724 |
|
|
725 |
|
sys.stdout.write("\t%s" % latest_version) |
726 |
|
sys.stdout.write("\t%s" % safe_version) |
727 |
|
sys.stdout.write("\t%s" % "".join(sorted(upgrade))) |
728 |
|
|
729 |
path = os.path.expanduser(PKGROOT) |
print |
730 |
|
|
731 |
import pprint |
def cmd_patches(options, parser): |
732 |
|
root = os.path.expanduser(Downstream.PKGROOT) |
733 |
|
|
734 |
matches = upstream & set(downstream.keys()) |
for package, module, package_version, spec_version, downstream_files in sorted(join_streams()): |
735 |
for module in sorted(matches): |
for filename in downstream_files: |
736 |
for srpm in downstream[module]: |
if '.patch' in filename or '.diff' in filename: |
737 |
for filename in downstream_files[srpm]: |
|
738 |
if '.patch' in filename or '.diff' in filename: |
p = Patch(os.path.join(root, package, "SOURCES", filename), show_path=options.path) |
739 |
|
valid = "" |
740 |
p = Patch(os.path.join(path, srpm, "SOURCES", filename), show_path=options.path) |
forwarded = "" |
741 |
valid = "" |
if p.dep3['headers']: |
742 |
forwarded = "" |
forwarded = p.dep3['headers'].get('Forwarded', "no") |
743 |
if p.dep3['headers']: |
if p.dep3['valid']: |
744 |
forwarded = p.dep3['headers'].get('Forwarded', "no") |
valid="VALID" |
745 |
if p.dep3['valid']: |
print "\t".join((module, package, str(p), forwarded, valid)) |
|
valid="VALID" |
|
|
print "\t".join((module, srpm, str(p), forwarded, valid)) |
|
746 |
|
|
747 |
def cmd_dep3(options, parser): |
def cmd_dep3(options, parser): |
748 |
p = Patch(options.patch) |
p = Patch(options.patch) |
751 |
def cmd_package_new_version(options, parser): |
def cmd_package_new_version(options, parser): |
752 |
# Determine the package name |
# Determine the package name |
753 |
if options.upstream: |
if options.upstream: |
754 |
downstream, downstream_files = get_downstream_names() |
try: |
755 |
|
package = Downstream().get_downstream_from_upstream(options.package, options.version)[0] |
756 |
if options.package not in downstream: |
except ValueError, e: |
757 |
print >>sys.stderr, "ERROR: No packages for upstream name: %s" % options.package |
print >>sys.stderr, "ERROR: %s" % e |
|
sys.exit(1) |
|
|
|
|
|
if len(downstream[options.package]) != 1: |
|
|
# XXX - Make it more intelligent |
|
|
print >>sys.stderr, "ERROR: Multiple packages found for %s: %s" % (options.package, ", ".join(downstream[options.package])) |
|
758 |
sys.exit(1) |
sys.exit(1) |
|
|
|
|
package = list(downstream[options.package])[0] |
|
759 |
else: |
else: |
760 |
package = options.package |
package = options.package |
761 |
|
|
762 |
# Directories packages are located in |
# Directories packages are located in |
763 |
root = os.path.expanduser(PKGROOT) |
root = os.path.expanduser(Downstream.PKGROOT) |
764 |
cwd = os.path.join(root, package) |
cwd = os.path.join(root, package) |
765 |
|
|
766 |
# Checkout package to ensure the checkout reflects the latest changes |
# Checkout package to ensure the checkout reflects the latest changes |
767 |
try: |
try: |
768 |
subprocess.check_call(['mgarepo', 'co', package], cwd=root) |
Downstream.co(package, cwd=root) |
769 |
except subprocess.CalledProcessError: |
except subprocess.CalledProcessError: |
770 |
sys.exit(1) |
sys.exit(1) |
771 |
|
|
772 |
# SpecFile class handles the actual version+release change |
# SpecFile class handles the actual version+release change |
773 |
s = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)) |
s = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)) |
774 |
print "%s => %s" % (s.version, options.version) |
print "%s => %s" % (s.version, options.version) |
775 |
if not s.update(options.version): |
if not s.update(options.version, force=options.force): |
776 |
sys.exit(1) |
sys.exit(1) |
777 |
|
|
778 |
# We can even checkin and submit :-) |
# Check hash, if given |
779 |
if options.submit: |
if options.hexdigest is not None: |
780 |
try: |
sources = [name for name, origname in s.sources.iteritems() if '://' in origname] |
781 |
# checkin changes |
if not len(sources): |
782 |
subprocess.check_call(['mgarepo', 'ci', '-m', 'new version %s' % options.version], cwd=cwd) |
print >>sys.stderr, "ERROR: Cannot determine source file (for hash check)!" |
783 |
# and submit |
sys.stderr(1) |
784 |
subprocess.check_call(['mgarepo', 'submit'], cwd=cwd) |
|
785 |
except subprocess.CalledProcessError: |
# If there are multiple sources, try to see if there is a preferred name |
786 |
sys.exit(1) |
# --> needed for metacity hash check (multiple tarball sources) |
787 |
|
if len(sources) > 1: |
788 |
|
preferred_name = '%s-%s.tar.xz' % (package, options.version) |
789 |
|
if preferred_name in sources: |
790 |
|
sources = [preferred_name] |
791 |
|
|
792 |
|
for filename in sources: |
793 |
|
path = os.path.join(cwd, "SOURCES", filename) |
794 |
|
if not is_valid_hash(path, options.algo, options.hexdigest): |
795 |
|
print >>sys.stderr, "ERROR: Hash file failed check for %s!" % path |
796 |
|
print >>sys.stderr, "ERROR: Reverting changes!" |
797 |
|
subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd) |
798 |
|
sys.exit(1) |
799 |
|
|
800 |
|
try: |
801 |
|
# If we made it this far, checkin the changes |
802 |
|
subprocess.check_call(['mgarepo', 'ci', '-m', 'new version %s' % options.version], cwd=cwd) |
803 |
|
|
804 |
|
# Submit is optional |
805 |
|
if options.submit: |
806 |
|
cmd = ['mgarepo', 'submit'] |
807 |
|
if Downstream.DISTRO: |
808 |
|
cmd.extend(('--define', 'section=core/updates_testing', '-t', Downstream.DISTRO)) |
809 |
|
subprocess.check_call(cmd, cwd=cwd) |
810 |
|
except subprocess.CalledProcessError: |
811 |
|
sys.exit(1) |
812 |
|
|
813 |
|
def cmd_parse_ftp_release_list(options, parser): |
814 |
|
def _send_reply_mail(contents, orig_msg, to, packages=[], error=False): |
815 |
|
"""Send an reply email""" |
816 |
|
contents.seek(0) |
817 |
|
msg = MIMEText(contents.read(), _charset='utf-8') |
818 |
|
|
819 |
|
if error: |
820 |
|
# XXX - ugly |
821 |
|
contents.seek(0) |
822 |
|
lastline = contents.read().rstrip().splitlines()[-1] |
823 |
|
# Remove things like "ERROR: " and so on from the last line |
824 |
|
lastline = re.sub(r'^(?:[^ :]+:\s+)+', '', lastline) |
825 |
|
# Remove things like " - " (youri output from mgarepo submit) |
826 |
|
lastline = re.sub(r'^\s+-\s+', '', lastline) |
827 |
|
subjecterror = " (ERROR: %s)" % lastline if lastline else " (ERROR)" |
828 |
|
else: |
829 |
|
subjecterror = "" |
830 |
|
|
831 |
|
if packages: |
832 |
|
subject = "%s %s%s" % (", ".join(packages), orig_msg['X-Module-Version'], subjecterror) |
833 |
|
else: |
834 |
|
subject = "Re: %s%s" % (orig_msg['Subject'], subjecterror) |
835 |
|
|
836 |
|
msg['Subject'] = subject |
837 |
|
msg['To'] = to |
838 |
|
msg["In-Reply-To"] = orig_msg["Message-ID"] |
839 |
|
msg["References"] = orig_msg["Message-ID"] |
840 |
|
|
841 |
|
# Call sendmail program directly so it doesn't matter if the service is running |
842 |
|
cmd = ['/usr/sbin/sendmail', '-oi', '--'] |
843 |
|
cmd.extend([to]) |
844 |
|
p = subprocess.Popen(cmd, stdin=subprocess.PIPE) |
845 |
|
p.stdin.write(msg.as_string()) |
846 |
|
p.stdin.flush() |
847 |
|
p.stdin.close() |
848 |
|
p.wait() |
849 |
|
|
850 |
|
|
851 |
|
msg = email.email.message_from_file(sys.stdin) |
852 |
|
|
853 |
|
if options.mail: |
854 |
|
stdout = tempfile.TemporaryFile() |
855 |
|
stderr = stdout |
856 |
|
else: |
857 |
|
stdout = sys.stdout |
858 |
|
stderr = sys.stderr |
859 |
|
|
860 |
|
try: |
861 |
|
module = msg['X-Module-Name'] |
862 |
|
version = msg['X-Module-Version'] |
863 |
|
hexdigest = msg['X-Module-SHA256-tar.xz'] |
864 |
|
except KeyError, e: |
865 |
|
print >>stderr, "ERROR: %s" % e |
866 |
|
if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True) |
867 |
|
sys.exit(1) |
868 |
|
|
869 |
|
try: |
870 |
|
packages = Downstream().get_downstream_from_upstream(module, version) |
871 |
|
except ValueError, e: |
872 |
|
print >>stderr, "ERROR: %s" % e |
873 |
|
if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True) |
874 |
|
sys.exit(1) |
875 |
|
|
876 |
|
if options.wait: |
877 |
|
# maildrop aborts and will try to deliver after 5min |
878 |
|
# fork to avoid this |
879 |
|
if os.fork() != 0: sys.exit(0) |
880 |
|
# wait SLEEP_INITIAL after the message was sent |
881 |
|
secs = SLEEP_INITIAL |
882 |
|
t = email.utils.parsedate_tz(msg['Date']) |
883 |
|
if t is not None: |
884 |
|
msg_time = email.utils.mktime_tz(t) |
885 |
|
secs = SLEEP_INITIAL - (time.time() - msg_time) |
886 |
|
|
887 |
|
if secs > 0: time.sleep(secs) |
888 |
|
|
889 |
|
error = False |
890 |
|
for package in packages: |
891 |
|
cmd = ['mga-gnome', 'increase', '--hash', hexdigest] |
892 |
|
if options.submit: |
893 |
|
cmd.append('--submit') |
894 |
|
if options.force: |
895 |
|
cmd.append('--force') |
896 |
|
cmd.extend((package, version)) |
897 |
|
if subprocess.call(cmd, stdout=stdout, stderr=stderr): |
898 |
|
error = True |
899 |
|
|
900 |
|
if options.mail: _send_reply_mail(stdout, msg, options.mail, packages=packages, error=error) |
901 |
|
|
902 |
def main(): |
def main(): |
903 |
description = """Mageia GNOME commands.""" |
description = """Mageia GNOME commands.""" |
904 |
epilog="""Report bugs to Olav Vitters""" |
epilog="""Report bugs to Olav Vitters""" |
905 |
parser = argparse.ArgumentParser(description=description,epilog=epilog) |
parser = argparse.ArgumentParser(description=description,epilog=epilog) |
906 |
|
parser.add_argument("-l", "--limit", type=argparse.FileType('r', 0), |
907 |
|
dest="limit_upstream", metavar="FILE", |
908 |
|
help="File containing upstream names") |
909 |
|
parser.add_argument("-d", "--distro", action="store", dest="distro", |
910 |
|
help="Distribution release") |
911 |
|
|
912 |
# SUBPARSERS |
# SUBPARSERS |
913 |
subparsers = parser.add_subparsers(title='subcommands') |
subparsers = parser.add_subparsers(title='subcommands') |
918 |
) |
) |
919 |
|
|
920 |
subparser = subparsers.add_parser('packages', help='list all GNOME packages') |
subparser = subparsers.add_parser('packages', help='list all GNOME packages') |
921 |
|
subparser.add_argument("-m", "--m", action="store_true", dest="upstream", |
922 |
|
help="Show upstream module") |
923 |
|
subparser.add_argument( "--version", action="store_true", dest="show_version", |
924 |
|
help="Show version numbers") |
925 |
|
subparser.add_argument( "--diff", action="store_true", dest="diff", |
926 |
|
help="Only show packages with different version") |
927 |
|
subparser.add_argument( "--sort", type=argparse.FileType('r', 0), |
928 |
|
dest="sort", metavar="FILE", |
929 |
|
help="Sort packages according to order in given FILE") |
930 |
|
|
931 |
|
subparser.set_defaults( |
932 |
|
func=cmd_ls, upstream=False, show_version=False, diff=False |
933 |
|
) |
934 |
|
|
935 |
|
subparser = subparsers.add_parser('check-latest', help='check for latest version of packages') |
936 |
subparser.set_defaults( |
subparser.set_defaults( |
937 |
func=cmd_ls |
func=cmd_check_latest |
938 |
) |
) |
939 |
|
|
940 |
subparser = subparsers.add_parser('patches', help='list all GNOME patches') |
subparser = subparsers.add_parser('patches', help='list all GNOME patches') |
953 |
subparser = subparsers.add_parser('increase', help='Increase version number') |
subparser = subparsers.add_parser('increase', help='Increase version number') |
954 |
subparser.add_argument("package", help="Package name") |
subparser.add_argument("package", help="Package name") |
955 |
subparser.add_argument("version", help="Version number") |
subparser.add_argument("version", help="Version number") |
956 |
|
subparser.add_argument("-f", "--force", action="store_true", dest="force", |
957 |
|
help="Override warnings, just do it") |
958 |
subparser.add_argument("-u", "--upstream", action="store_true", dest="upstream", |
subparser.add_argument("-u", "--upstream", action="store_true", dest="upstream", |
959 |
help="Package name reflects the upstream name") |
help="Package name reflects the upstream name") |
960 |
subparser.add_argument("-s", "--submit", action="store_true", dest="submit", |
subparser.add_argument("-s", "--submit", action="store_true", dest="submit", |
961 |
help="Commit changes and submit") |
help="Commit changes and submit") |
962 |
|
subparser.add_argument( "--no-submit", action="store_false", dest="submit", |
963 |
|
help="Do not commit changes and submit") |
964 |
|
subparser.add_argument("-a", "--algorithm", choices=hashlib.algorithms, dest="algo", |
965 |
|
help="Hash algorithm") |
966 |
|
subparser.add_argument("--hash", dest="hexdigest", |
967 |
|
help="Hexdigest of the hash") |
968 |
subparser.set_defaults( |
subparser.set_defaults( |
969 |
func=cmd_package_new_version, submit=False, upstream=False |
func=cmd_package_new_version, submit=argparse.SUPPRESS, upstream=False, hexdigest=None, algo="sha256", |
970 |
|
force=False |
971 |
|
) |
972 |
|
|
973 |
|
subparser = subparsers.add_parser('gnome-release-email', help='Submit packages based on GNOME ftp-release-list email') |
974 |
|
subparser.add_argument("-m", "--mail", help="Email address to send the progress to") |
975 |
|
subparser.add_argument("-w", "--wait", action="store_true", |
976 |
|
help="Wait before trying to retrieve the new version") |
977 |
|
subparser.add_argument("-s", "--submit", action="store_true", dest="submit", |
978 |
|
help="Commit changes and submit") |
979 |
|
subparser.add_argument("-f", "--force", action="store_true", |
980 |
|
help="Force submission") |
981 |
|
subparser.set_defaults( |
982 |
|
func=cmd_parse_ftp_release_list, force=False, wait=False |
983 |
) |
) |
984 |
|
|
985 |
if len(sys.argv) == 1: |
if len(sys.argv) == 1: |
987 |
sys.exit(2) |
sys.exit(2) |
988 |
|
|
989 |
options = parser.parse_args() |
options = parser.parse_args() |
990 |
|
if options.limit_upstream: |
991 |
|
Upstream.limit = set(options.limit_upstream.read().strip("\n").splitlines()) |
992 |
|
|
993 |
|
if not hasattr(options, 'submit'): |
994 |
|
options.submit = not options.distro |
995 |
|
|
996 |
|
if options.distro: |
997 |
|
Downstream.PKGROOT = os.path.join('~/pkgs', options.distro) |
998 |
|
Downstream.MEDIA = "Core Release {0} Source,Core {0} Updates Source,Core {0} Updates Testing Source".format(options.distro) |
999 |
|
Downstream.DISTRO = options.distro |
1000 |
|
|
1001 |
try: |
try: |
1002 |
options.func(options, parser) |
options.func(options, parser) |
1012 |
sys.exit(0) |
sys.exit(0) |
1013 |
|
|
1014 |
if __name__ == "__main__": |
if __name__ == "__main__": |
1015 |
|
os.environ['PYTHONUNBUFFERED'] = '1' |
1016 |
main() |
main() |