1 |
#!/usr/bin/python |
#!/usr/bin/python -u |
2 |
|
|
3 |
|
# A lot of the code comes from ftpadmin, see |
4 |
|
# http://git.gnome.org/browse/sysadmin-bin/tree/ftpadmin |
5 |
|
# Written by Olav Vitters |
6 |
|
|
7 |
|
# basic modules: |
8 |
import os |
import os |
9 |
import os.path |
import os.path |
10 |
import sys |
import sys |
11 |
import re |
import re |
12 |
import subprocess |
import subprocess |
13 |
import urllib2 |
|
14 |
import urlparse |
# command line parsing, error handling: |
15 |
import argparse |
import argparse |
16 |
import errno |
import errno |
17 |
|
|
18 |
|
# overwriting files by moving them (safer): |
19 |
import tempfile |
import tempfile |
20 |
import shutil |
import shutil |
21 |
|
|
22 |
|
# version comparison: |
23 |
|
import rpm |
24 |
|
|
25 |
|
# opening tarballs: |
26 |
|
import tarfile |
27 |
|
import gzip |
28 |
|
import bz2 |
29 |
|
import lzma # pyliblzma |
30 |
|
|
31 |
|
# getting links from HTML document: |
32 |
from sgmllib import SGMLParser |
from sgmllib import SGMLParser |
33 |
|
import urllib2 |
34 |
|
import urlparse |
35 |
|
|
36 |
|
# for checking hashes |
37 |
|
import hashlib |
38 |
|
|
39 |
|
# for parsing ftp-release-list emails |
40 |
|
import email |
41 |
|
from email.mime.text import MIMEText |
42 |
|
|
43 |
|
# to be able to sleep for a while |
44 |
|
import time |
45 |
|
|
46 |
MEDIA="Core Release Source" |
MEDIA="Core Release Source" |
47 |
URL="http://download.gnome.org/sources/" |
URL="http://download.gnome.org/sources/" |
48 |
PKGROOT='~/pkgs' |
PKGROOT='~/pkgs' |
49 |
|
SLEEP_INITIAL=300 |
50 |
|
|
51 |
|
re_majmin = re.compile(r'^([0-9]+\.[0-9]+).*') |
52 |
|
re_version = re.compile(r'([-.]|\d+|[^-.\d]+)') |
53 |
|
|
54 |
|
def version_cmp(a, b): |
55 |
|
"""Compares two versions |
56 |
|
|
57 |
|
Returns |
58 |
|
-1 if a < b |
59 |
|
0 if a == b |
60 |
|
1 if a > b |
61 |
|
""" |
62 |
|
|
63 |
|
return rpm.labelCompare(('1', a, '1'), ('1', b, '1')) |
64 |
|
|
65 |
|
def get_latest_version(versions, max_version=None): |
66 |
|
"""Gets the latest version number |
67 |
|
|
68 |
|
if max_version is specified, gets the latest version number before |
69 |
|
max_version""" |
70 |
|
latest = None |
71 |
|
for version in versions: |
72 |
|
if ( latest is None or version_cmp(version, latest) > 0 ) \ |
73 |
|
and ( max_version is None or version_cmp(version, max_version) < 0 ): |
74 |
|
latest = version |
75 |
|
return latest |
76 |
|
|
77 |
|
def judge_version_increase(version_old, version_new): |
78 |
|
"""Judge quality of version increase: |
79 |
|
|
80 |
|
Returns a tuple containing judgement and message |
81 |
|
|
82 |
|
Judgement: |
83 |
|
Less than 0: Error |
84 |
|
0 to 4: Better not |
85 |
|
5+: Ok""" |
86 |
|
versions = (version_old, version_new) |
87 |
|
|
88 |
|
# First do a basic version comparison to ensure version_new is actually newer |
89 |
|
compare = version_cmp(version_new, version_old) |
90 |
|
|
91 |
|
if compare == 0: |
92 |
|
return (-2, "Already at version %s!" % (version_old)) |
93 |
|
|
94 |
|
if compare != 1: |
95 |
|
return (-3, "Version %s is older than current version %s!" % (version_new, version_old)) |
96 |
|
|
97 |
|
# Version is newer, but we don't want to see if it follows the GNOME versioning scheme |
98 |
|
majmins = [re_majmin.sub(r'\1', ver) for ver in versions if re_majmin.match(ver) is not None] |
99 |
|
|
100 |
|
if len(majmins) == 1: |
101 |
|
return (-1, "Version number scheme changes: %s" % (", ".join(versions))) |
102 |
|
|
103 |
|
if len(majmins) == 0: |
104 |
|
return (0, "Unsupported version numbers: %s" % (", ".join(versions))) |
105 |
|
|
106 |
|
# Follows GNOME versioning scheme |
107 |
|
# Meaning: x.y.z |
108 |
|
# x = major |
109 |
|
# y = minor : even if stable |
110 |
|
# z = micro |
111 |
|
|
112 |
|
# Major+minor the same? Then go ahead and upgrade! |
113 |
|
if majmins[0] == majmins[1]: |
114 |
|
# Majmin of both versions are the same, looks good! |
115 |
|
return (10, None) |
116 |
|
|
117 |
|
# More detailed analysis needed, so figure out the numbers |
118 |
|
majmin_nrs = [map(long, ver.split('.')) for ver in majmins] |
119 |
|
|
120 |
|
# Check/ensure major version number is the same |
121 |
|
if majmin_nrs[0][0] != majmin_nrs[1][0]: |
122 |
|
return (1, "Major version number increase") |
123 |
|
|
124 |
|
# Minor indicates stable/unstable |
125 |
|
devstate = (majmin_nrs[0][1] % 2 == 0, majmin_nrs[1][1] % 2 == 0) |
126 |
|
|
127 |
|
# Upgrading to unstable is weird |
128 |
|
if not devstate[1]: |
129 |
|
if devstate[0]: |
130 |
|
return (1, "Stable to unstable increase") |
131 |
|
|
132 |
|
return (4, "Unstable to unstable version increase") |
133 |
|
|
134 |
|
# Unstable => stable is always ok |
135 |
|
if not devstate[0]: |
136 |
|
return (5, "Unstable to stable") |
137 |
|
|
138 |
|
# Can only be increase of minors from one stable to the next |
139 |
|
return (6, "Stable version increase") |
140 |
|
|
141 |
def line_input (file): |
def line_input (file): |
142 |
for line in file: |
for line in file: |
145 |
else: |
else: |
146 |
yield line |
yield line |
147 |
|
|
148 |
|
def call_editor(filename): |
149 |
|
"""Return a sequence of possible editor binaries for the current platform""" |
150 |
|
|
151 |
|
editors = [] |
152 |
|
|
153 |
|
for varname in 'VISUAL', 'EDITOR': |
154 |
|
if varname in os.environ: |
155 |
|
editors.append(os.environ[varname]) |
156 |
|
|
157 |
|
editors.extend(('/usr/bin/editor', 'vi', 'pico', 'nano', 'joe')) |
158 |
|
|
159 |
|
for editor in editors: |
160 |
|
try: |
161 |
|
ret = subprocess.call([editor, filename]) |
162 |
|
except OSError, e: |
163 |
|
if e.errno == 2: |
164 |
|
continue |
165 |
|
raise |
166 |
|
|
167 |
|
if ret == 127: |
168 |
|
continue |
169 |
|
|
170 |
|
return True |
171 |
|
|
172 |
class urllister(SGMLParser): |
class urllister(SGMLParser): |
173 |
def reset(self): |
def reset(self): |
174 |
SGMLParser.reset(self) |
SGMLParser.reset(self) |
179 |
if href: |
if href: |
180 |
self.urls.extend(href) |
self.urls.extend(href) |
181 |
|
|
182 |
|
class XzTarFile(tarfile.TarFile): |
183 |
|
|
184 |
|
OPEN_METH = tarfile.TarFile.OPEN_METH.copy() |
185 |
|
OPEN_METH["xz"] = "xzopen" |
186 |
|
|
187 |
|
@classmethod |
188 |
|
def xzopen(cls, name, mode="r", fileobj=None, **kwargs): |
189 |
|
"""Open gzip compressed tar archive name for reading or writing. |
190 |
|
Appending is not allowed. |
191 |
|
""" |
192 |
|
if len(mode) > 1 or mode not in "rw": |
193 |
|
raise ValueError("mode must be 'r' or 'w'") |
194 |
|
|
195 |
|
if fileobj is not None: |
196 |
|
fileobj = _LMZAProxy(fileobj, mode) |
197 |
|
else: |
198 |
|
fileobj = lzma.LZMAFile(name, mode) |
199 |
|
|
200 |
|
try: |
201 |
|
# lzma doesn't immediately return an error |
202 |
|
# try and read a bit of data to determine if it is a valid xz file |
203 |
|
fileobj.read(_LZMAProxy.blocksize) |
204 |
|
fileobj.seek(0) |
205 |
|
t = cls.taropen(name, mode, fileobj, **kwargs) |
206 |
|
except IOError: |
207 |
|
raise tarfile.ReadError("not a xz file") |
208 |
|
except lzma.error: |
209 |
|
raise tarfile.ReadError("not a xz file") |
210 |
|
t._extfileobj = False |
211 |
|
return t |
212 |
|
|
213 |
|
if not hasattr(tarfile.TarFile, 'xzopen'): |
214 |
|
tarfile.open = XzTarFile.open |
215 |
|
|
216 |
|
def is_valid_hash(path, algo, hexdigest): |
217 |
|
if algo not in hashlib.algorithms: |
218 |
|
raise ValueError("Unknown hash algorithm: %s" % algo) |
219 |
|
|
220 |
|
local_hash = getattr(hashlib, algo)() |
221 |
|
|
222 |
|
with open(path, 'rb') as fp: |
223 |
|
data = fp.read(32768) |
224 |
|
while data: |
225 |
|
local_hash.update(data) |
226 |
|
data = fp.read(32768) |
227 |
|
|
228 |
|
return local_hash.hexdigest() == hexdigest |
229 |
|
|
230 |
|
class SpecFile(object): |
231 |
|
re_update_version = re.compile(r'^(?P<pre>Version:\s*)(?P<version>.+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE) |
232 |
|
re_update_release = re.compile(r'^(?P<pre>Release:\s*)(?P<release>%mkrel \d+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE) |
233 |
|
|
234 |
|
def __init__(self, path): |
235 |
|
self.path = path |
236 |
|
self.cwd = os.path.dirname(path) |
237 |
|
|
238 |
|
@property |
239 |
|
def version(self): |
240 |
|
return subprocess.check_output(["rpm", "--specfile", self.path, "--queryformat", "%{VERSION}\n"]).splitlines()[0] |
241 |
|
@property |
242 |
|
def sources(self): |
243 |
|
ts = rpm.ts() |
244 |
|
spec = ts.parseSpec(self.path) |
245 |
|
srclist = spec.sources if isinstance(spec.sources, (list, tuple)) \ |
246 |
|
else spec.sources() |
247 |
|
return dict((os.path.basename(name), name) for name, no, flags in srclist) |
248 |
|
|
249 |
|
def update(self, version): |
250 |
|
"""Update specfile (increase version)""" |
251 |
|
cur_version = self.version |
252 |
|
|
253 |
|
(judgement, msg) = judge_version_increase(cur_version, version) |
254 |
|
|
255 |
|
if judgement < 0: |
256 |
|
print >>sys.stderr, "ERROR: %s!" % (msg) |
257 |
|
return False |
258 |
|
|
259 |
|
if judgement < 5: |
260 |
|
print "WARNING: %s!" % (msg) |
261 |
|
return False |
262 |
|
|
263 |
|
# XXX - os.path.join is hackish |
264 |
|
if subprocess.check_output(["svn", "diff", os.path.join(self.path, '..')]) != '': |
265 |
|
print >>sys.stderr, "ERROR: Package has uncommitted changes!" |
266 |
|
return False |
267 |
|
|
268 |
|
with open(self.path, "rw") as f: |
269 |
|
data = f.read() |
270 |
|
|
271 |
|
if data.count("%mkrel") != 1: |
272 |
|
print >>sys.stderr, "ERROR: Multiple %mkrel found; don't know what to do!" |
273 |
|
return False |
274 |
|
|
275 |
|
data, nr = self.re_update_version.subn(r'\g<pre>%s\g<post>' % version, data, 1) |
276 |
|
if nr != 1: |
277 |
|
print >>sys.stderr, "ERROR: Could not increase version!" |
278 |
|
return False |
279 |
|
|
280 |
|
data, nr = self.re_update_release.subn(r'\g<pre>%mkrel 1\g<post>', data, 1) |
281 |
|
if nr != 1: |
282 |
|
print >>sys.stderr, "ERROR: Could not reset release!" |
283 |
|
return False |
284 |
|
|
285 |
|
# Overwrite file with new version number |
286 |
|
write_file(self.path, data) |
287 |
|
|
288 |
|
|
289 |
|
# Verify that RPM also agrees that version number has changed |
290 |
|
if self.version != version: |
291 |
|
print "ERROR: Increased version to %s, but RPM doesn't agree!?!" % version |
292 |
|
return False |
293 |
|
|
294 |
|
try: |
295 |
|
# Download new tarball |
296 |
|
subprocess.check_call(['mgarepo', 'sync', '-d'], cwd=self.cwd) |
297 |
|
# Check patches still apply |
298 |
|
subprocess.check_call(['bm', '-p', '--nodeps'], cwd=self.cwd) |
299 |
|
except subprocess.CalledProcessError: |
300 |
|
return False |
301 |
|
|
302 |
|
return True |
303 |
|
|
304 |
class Patch(object): |
class Patch(object): |
305 |
"""Do things with patches""" |
"""Do things with patches""" |
306 |
|
|
316 |
return self.path if self.show_path else os.path.basename(self.path) |
return self.path if self.show_path else os.path.basename(self.path) |
317 |
|
|
318 |
def add_dep3(self): |
def add_dep3(self): |
319 |
|
"""Add DEP-3 headers to a patch file""" |
320 |
if self.dep3['valid']: |
if self.dep3['valid']: |
321 |
return False |
return False |
322 |
|
|
342 |
|
|
343 |
# XXX - wrap this at 80 chars |
# XXX - wrap this at 80 chars |
344 |
add_line = True |
add_line = True |
345 |
print >>fdst, "%s: %s" % (header, data) |
print >>fdst, "%s: %s" % (header, "" if data is None else data) |
346 |
|
|
347 |
if add_line: print >>fdst, "" |
if add_line: print >>fdst, "" |
348 |
# Now copy any other data and the patch |
# Now copy any other data and the patch |
351 |
fdst.flush() |
fdst.flush() |
352 |
os.rename(fdst.name, self.path) |
os.rename(fdst.name, self.path) |
353 |
|
|
354 |
|
call_editor(self.path) |
355 |
|
|
356 |
#Author: fwang |
#Author: fwang |
357 |
#Subject: Build fix: Fix glib header inclusion |
#Subject: Build fix: Fix glib header inclusion |
358 |
#Applied-Upstream: commit:30602 |
#Applied-Upstream: commit:30602 |
360 |
#Bug: http://bugzilla.abisource.com/show_bug.cgi?id=13247 |
#Bug: http://bugzilla.abisource.com/show_bug.cgi?id=13247 |
361 |
|
|
362 |
def _read_dep3(self): |
def _read_dep3(self): |
363 |
"""This will also parse git headers""" |
"""Read DEP-3 headers from an existing patch file |
364 |
|
|
365 |
|
This will also parse git headers""" |
366 |
dep3 = {} |
dep3 = {} |
367 |
headers = {} |
headers = {} |
368 |
|
|
380 |
r = self.re_dep3.match(line) |
r = self.re_dep3.match(line) |
381 |
if r: |
if r: |
382 |
info = r.groupdict() |
info = r.groupdict() |
383 |
|
|
384 |
|
# Avoid matching URLS |
385 |
|
if info['data'].startswith('//') and info['header'].lower () == info['header']: |
386 |
|
continue |
387 |
|
|
388 |
headers[info['header']] = info['data'] |
headers[info['header']] = info['data'] |
389 |
last_header = info['header'] |
last_header = info['header'] |
390 |
last_nr = nr |
last_nr = nr |
423 |
@property |
@property |
424 |
def svn_author(self): |
def svn_author(self): |
425 |
if not hasattr(self, '_svn_author'): |
if not hasattr(self, '_svn_author'): |
426 |
p = subprocess.Popen(['svn', 'log', '-q', "--", self.path], stdout=subprocess.PIPE, close_fds=True) |
try: |
427 |
contents = p.stdout.read().strip("\n").splitlines() |
contents = subprocess.check_output(['svn', 'log', '-q', "--", self.path], close_fds=True).strip("\n").splitlines() |
428 |
ecode = p.wait() |
|
|
if ecode == 0: |
|
429 |
for line in contents: |
for line in contents: |
430 |
if ' | ' not in line: |
if ' | ' not in line: |
431 |
continue |
continue |
433 |
fields = line.split(' | ') |
fields = line.split(' | ') |
434 |
if len(fields) >= 3: |
if len(fields) >= 3: |
435 |
self._svn_author = fields[1] |
self._svn_author = fields[1] |
436 |
|
except subprocess.CalledProcessError: |
437 |
|
pass |
438 |
|
|
439 |
|
if not hasattr(self, '_svn_author'): |
440 |
|
return None |
441 |
|
|
442 |
return self._svn_author |
return self._svn_author |
443 |
|
|
461 |
def get_downstream_names(): |
def get_downstream_names(): |
462 |
re_file = re.compile(r'^(?P<module>.*?)[_-](?:(?P<oldversion>([0-9]+[\.])*[0-9]+)-)?(?P<version>([0-9]+[\.\-])*[0-9]+)\.(?P<format>(?:tar\.|diff\.)?[a-z][a-z0-9]*)$') |
re_file = re.compile(r'^(?P<module>.*?)[_-](?:(?P<oldversion>([0-9]+[\.])*[0-9]+)-)?(?P<version>([0-9]+[\.\-])*[0-9]+)\.(?P<format>(?:tar\.|diff\.)?[a-z][a-z0-9]*)$') |
463 |
|
|
464 |
p = subprocess.Popen(['urpmf', '--files', '.', "--media", MEDIA], stdout=subprocess.PIPE, close_fds=True) |
contents = subprocess.check_output(['urpmf', '--files', '.', "--media", MEDIA], close_fds=True).strip("\n").splitlines() |
|
contents = p.stdout.read().strip("\n").splitlines() |
|
|
ecode = p.wait() |
|
|
if ecode != 0: |
|
|
sys.exit(1) |
|
465 |
|
|
466 |
FILES = {} |
FILES = {} |
467 |
TARBALLS = {} |
TARBALLS = {} |
489 |
|
|
490 |
return TARBALLS, FILES |
return TARBALLS, FILES |
491 |
|
|
492 |
|
def get_downstream_from_upstream(upstream, version): |
493 |
|
# Determine the package name |
494 |
|
downstream, downstream_files = get_downstream_names() |
495 |
|
|
496 |
|
if upstream not in downstream: |
497 |
|
raise ValueError("No packages for upstream name: %s" % upstream) |
498 |
|
|
499 |
|
if len(downstream[upstream]) != 1: |
500 |
|
# XXX - Make it more intelligent |
501 |
|
raise ValueError("Multiple packages found for %s: %s" % (upstream, ", ".join(downstream[upstream]))) |
502 |
|
|
503 |
|
return list(downstream[upstream]) |
504 |
|
|
505 |
|
def write_file(path, data): |
506 |
|
with tempfile.NamedTemporaryFile(dir=os.path.dirname(path), delete=False) as fdst: |
507 |
|
fdst.write(data) |
508 |
|
fdst.flush() |
509 |
|
os.rename(fdst.name, path) |
510 |
|
|
511 |
def cmd_co(options, parser): |
def cmd_co(options, parser): |
512 |
upstream = get_upstream_names() |
upstream = get_upstream_names() |
513 |
downstream, downstream_files = get_downstream_names() |
downstream, downstream_files = get_downstream_names() |
520 |
for package in downstream[module]: |
for package in downstream[module]: |
521 |
subprocess.call(['mgarepo', 'co', package], cwd=cwd) |
subprocess.call(['mgarepo', 'co', package], cwd=cwd) |
522 |
|
|
523 |
def cmd_ls(options, parser): |
def join_streams(): |
524 |
upstream = get_upstream_names() |
upstream = get_upstream_names() |
525 |
downstream, downstream_files = get_downstream_names() |
downstream, downstream_files = get_downstream_names() |
526 |
|
|
527 |
matches = upstream & set(downstream.keys()) |
matches = upstream & set(downstream.keys()) |
528 |
for module in matches: |
for module in matches: |
529 |
print "\n".join(downstream[module]) |
for package in downstream[module]: |
530 |
|
yield (package, module) |
531 |
|
|
532 |
|
def cmd_ls(options, parser): |
533 |
|
for package, module in sorted(join_streams()): |
534 |
|
print "\t".join((package, module)) if options.upstream else package |
535 |
|
|
536 |
def cmd_patches(options, parser): |
def cmd_patches(options, parser): |
537 |
upstream = get_upstream_names() |
upstream = get_upstream_names() |
546 |
for srpm in downstream[module]: |
for srpm in downstream[module]: |
547 |
for filename in downstream_files[srpm]: |
for filename in downstream_files[srpm]: |
548 |
if '.patch' in filename or '.diff' in filename: |
if '.patch' in filename or '.diff' in filename: |
549 |
|
|
550 |
p = Patch(os.path.join(path, srpm, "SOURCES", filename), show_path=options.path) |
p = Patch(os.path.join(path, srpm, "SOURCES", filename), show_path=options.path) |
551 |
print "\t".join((module, srpm, str(p))) |
valid = "" |
552 |
|
forwarded = "" |
553 |
if p.dep3['headers']: |
if p.dep3['headers']: |
554 |
pprint.pprint(p.dep3['headers']) |
forwarded = p.dep3['headers'].get('Forwarded', "no") |
555 |
if p.dep3['valid']: |
if p.dep3['valid']: |
556 |
print "VALID" |
valid="VALID" |
557 |
|
print "\t".join((module, srpm, str(p), forwarded, valid)) |
558 |
|
|
559 |
def cmd_dep3(options, parser): |
def cmd_dep3(options, parser): |
560 |
p = Patch(options.patch) |
p = Patch(options.patch) |
561 |
p.add_dep3() |
p.add_dep3() |
562 |
|
|
563 |
|
def cmd_package_new_version(options, parser): |
564 |
|
# Determine the package name |
565 |
|
if options.upstream: |
566 |
|
try: |
567 |
|
package = get_downstream_from_upstream(options.package, options.version)[0] |
568 |
|
except ValueError, e: |
569 |
|
print >>sys.stderr, "ERROR: %s" % e |
570 |
|
sys.exit(1) |
571 |
|
else: |
572 |
|
package = options.package |
573 |
|
|
574 |
|
# Directories packages are located in |
575 |
|
root = os.path.expanduser(PKGROOT) |
576 |
|
cwd = os.path.join(root, package) |
577 |
|
|
578 |
|
# Checkout package to ensure the checkout reflects the latest changes |
579 |
|
try: |
580 |
|
subprocess.check_call(['mgarepo', 'co', package], cwd=root) |
581 |
|
except subprocess.CalledProcessError: |
582 |
|
sys.exit(1) |
583 |
|
|
584 |
|
# SpecFile class handles the actual version+release change |
585 |
|
s = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)) |
586 |
|
print "%s => %s" % (s.version, options.version) |
587 |
|
if not s.update(options.version): |
588 |
|
sys.exit(1) |
589 |
|
|
590 |
|
# Check hash, if given |
591 |
|
if options.hexdigest is not None: |
592 |
|
sources = [name for name, origname in s.sources.iteritems() if '://' in origname] |
593 |
|
if not len(sources): |
594 |
|
print >>sys.stderr, "ERROR: Cannot determine source file (for hash check)!" |
595 |
|
sys.stderr(1) |
596 |
|
|
597 |
|
for filename in sources: |
598 |
|
if not is_valid_hash(os.path.join(cwd, "SOURCES", filename), options.algo, options.hexdigest): |
599 |
|
print >>sys.stderr, "ERROR: Hash file failed check for %s!" % path |
600 |
|
print >>sys.stderr, "ERROR: Reverting changes!" |
601 |
|
subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd) |
602 |
|
sys.exit(1) |
603 |
|
|
604 |
|
# We can even checkin and submit :-) |
605 |
|
if options.submit: |
606 |
|
try: |
607 |
|
# checkin changes |
608 |
|
subprocess.check_call(['mgarepo', 'ci', '-m', 'new version %s' % options.version], cwd=cwd) |
609 |
|
# and submit |
610 |
|
subprocess.check_call(['mgarepo', 'submit'], cwd=cwd) |
611 |
|
except subprocess.CalledProcessError: |
612 |
|
sys.exit(1) |
613 |
|
|
614 |
|
def cmd_parse_ftp_release_list(options, parser): |
615 |
|
# XXX - not working yet |
616 |
|
def _send_reply_mail(contents, orig_msg, to): |
617 |
|
"""Send an reply email""" |
618 |
|
contents.seek(0) |
619 |
|
msg = MIMEText(contents.read(), _charset='utf-8') |
620 |
|
msg['Subject'] = "Re: %s" % orig_msg['Subject'] |
621 |
|
msg['To'] = to |
622 |
|
msg["In-Reply-To"] = orig_msg["Message-ID"] |
623 |
|
msg["References"] = orig_msg["Message-ID"] |
624 |
|
|
625 |
|
# Call sendmail program directly so it doesn't matter if the service is running |
626 |
|
cmd = ['/usr/sbin/sendmail', '-oi', '--'] |
627 |
|
cmd.extend([to]) |
628 |
|
p = subprocess.Popen(cmd, stdin=subprocess.PIPE) |
629 |
|
p.stdin.write(msg.as_string()) |
630 |
|
p.stdin.flush() |
631 |
|
p.stdin.close() |
632 |
|
p.wait() |
633 |
|
|
634 |
|
|
635 |
|
msg = email.email.message_from_file(sys.stdin) |
636 |
|
|
637 |
|
if options.mail: |
638 |
|
stdout = tempfile.NamedTemporaryFile() |
639 |
|
stderr = stdout |
640 |
|
else: |
641 |
|
stdout = sys.stdout |
642 |
|
stderr = sys.stderr |
643 |
|
|
644 |
|
try: |
645 |
|
module = msg['X-Module-Name'] |
646 |
|
version = msg['X-Module-Version'] |
647 |
|
hexdigest = msg['X-Module-SHA256-tar.xz'] |
648 |
|
except KeyError, e: |
649 |
|
print >>stderr, "ERROR: %s" % e |
650 |
|
if options.mail: _send_reply_mail(stdout, msg, options.mail) |
651 |
|
sys.exit(1) |
652 |
|
|
653 |
|
try: |
654 |
|
packages = get_downstream_from_upstream(module, version) |
655 |
|
except ValueError, e: |
656 |
|
print >>stderr, "ERROR: %s" % e |
657 |
|
if options.mail: _send_reply_mail(stdout, msg, options.mail) |
658 |
|
sys.exit(1) |
659 |
|
|
660 |
|
if options.wait: |
661 |
|
# maildrop aborts and will try to deliver after 5min |
662 |
|
# fork to avoid this |
663 |
|
if os.fork() != 0: sys.exit(0) |
664 |
|
time.sleep(SLEEP_INITIAL) |
665 |
|
|
666 |
|
for package in packages: |
667 |
|
subprocess.call(['mga-gnome', 'increase', '--submit', '--hash', hexdigest, package, version], stdout=stdout, stderr=stderr) |
668 |
|
|
669 |
|
if options.mail: _send_reply_mail(stdout, msg, options.mail) |
670 |
|
|
671 |
def main(): |
def main(): |
672 |
description = """Mageia GNOME commands.""" |
description = """Mageia GNOME commands.""" |
673 |
epilog="""Report bugs to Olav Vitters""" |
epilog="""Report bugs to Olav Vitters""" |
682 |
) |
) |
683 |
|
|
684 |
subparser = subparsers.add_parser('packages', help='list all GNOME packages') |
subparser = subparsers.add_parser('packages', help='list all GNOME packages') |
685 |
|
subparser.add_argument("-m", "--m", action="store_true", dest="upstream", |
686 |
|
help="Show upstream module") |
687 |
subparser.set_defaults( |
subparser.set_defaults( |
688 |
func=cmd_ls |
func=cmd_ls, upstream=False |
689 |
) |
) |
690 |
|
|
691 |
subparser = subparsers.add_parser('patches', help='list all GNOME patches') |
subparser = subparsers.add_parser('patches', help='list all GNOME patches') |
701 |
func=cmd_dep3, path=False |
func=cmd_dep3, path=False |
702 |
) |
) |
703 |
|
|
704 |
|
subparser = subparsers.add_parser('increase', help='Increase version number') |
705 |
|
subparser.add_argument("package", help="Package name") |
706 |
|
subparser.add_argument("version", help="Version number") |
707 |
|
subparser.add_argument("-u", "--upstream", action="store_true", dest="upstream", |
708 |
|
help="Package name reflects the upstream name") |
709 |
|
subparser.add_argument("-s", "--submit", action="store_true", dest="submit", |
710 |
|
help="Commit changes and submit") |
711 |
|
subparser.add_argument("-a", "--algorithm", choices=hashlib.algorithms, dest="algo", |
712 |
|
help="Hash algorithm") |
713 |
|
subparser.add_argument("--hash", dest="hexdigest", |
714 |
|
help="Hexdigest of the hash") |
715 |
|
subparser.set_defaults( |
716 |
|
func=cmd_package_new_version, submit=False, upstream=False, hexdigest=None, algo="sha256" |
717 |
|
) |
718 |
|
|
719 |
|
subparser = subparsers.add_parser('gnome-release-email', help='Submit packages based on GNOME ftp-release-list email') |
720 |
|
subparser.add_argument("-m", "--mail", help="Email address to send the progress to") |
721 |
|
subparser.add_argument("-w", "--wait", action="store_true", |
722 |
|
help="Wait before trying to retrieve the new version") |
723 |
|
subparser.set_defaults( |
724 |
|
func=cmd_parse_ftp_release_list |
725 |
|
) |
726 |
|
|
727 |
if len(sys.argv) == 1: |
if len(sys.argv) == 1: |
728 |
parser.print_help() |
parser.print_help() |
729 |
sys.exit(2) |
sys.exit(2) |