1 |
ovitters |
3121 |
#!/usr/bin/python -u |
2 |
ovitters |
2932 |
|
3 |
ovitters |
3057 |
# A lot of the code comes from ftpadmin, see |
4 |
|
|
# http://git.gnome.org/browse/sysadmin-bin/tree/ftpadmin |
5 |
|
|
# Written by Olav Vitters |
6 |
|
|
|
7 |
|
|
# basic modules: |
8 |
ovitters |
2932 |
import os |
9 |
|
|
import os.path |
10 |
|
|
import sys |
11 |
|
|
import re |
12 |
|
|
import subprocess |
13 |
ovitters |
3057 |
|
14 |
|
|
# command line parsing, error handling: |
15 |
ovitters |
2932 |
import argparse |
16 |
ovitters |
2936 |
import errno |
17 |
ovitters |
3057 |
|
18 |
|
|
# overwriting files by moving them (safer): |
19 |
ovitters |
2944 |
import tempfile |
20 |
|
|
import shutil |
21 |
ovitters |
3057 |
|
22 |
|
|
# version comparison: |
23 |
ovitters |
3045 |
import rpm |
24 |
ovitters |
3057 |
|
25 |
|
|
# opening tarballs: |
26 |
|
|
import tarfile |
27 |
|
|
import gzip |
28 |
|
|
import bz2 |
29 |
|
|
import lzma # pyliblzma |
30 |
|
|
|
31 |
|
|
# getting links from HTML document: |
32 |
ovitters |
2932 |
from sgmllib import SGMLParser |
33 |
ovitters |
3057 |
import urllib2 |
34 |
|
|
import urlparse |
35 |
ovitters |
2932 |
|
36 |
ovitters |
3115 |
# for checking hashes |
37 |
|
|
import hashlib |
38 |
|
|
|
39 |
ovitters |
3119 |
# for parsing ftp-release-list emails |
40 |
|
|
import email |
41 |
|
|
from email.mime.text import MIMEText |
42 |
|
|
|
43 |
ovitters |
3120 |
# to be able to sleep for a while |
44 |
|
|
import time |
45 |
ovitters |
3119 |
|
46 |
ovitters |
3180 |
# version freeze |
47 |
|
|
import datetime |
48 |
|
|
|
49 |
ovitters |
2932 |
MEDIA="Core Release Source" |
50 |
|
|
URL="http://download.gnome.org/sources/" |
51 |
|
|
PKGROOT='~/pkgs' |
52 |
ovitters |
3557 |
SLEEP_INITIAL=180 |
53 |
ovitters |
3555 |
SLEEP_REPEAT=30 |
54 |
|
|
SLEEP_TIMES=20 |
55 |
ovitters |
2932 |
|
56 |
ovitters |
3090 |
re_majmin = re.compile(r'^([0-9]+\.[0-9]+).*') |
57 |
ovitters |
3012 |
re_version = re.compile(r'([-.]|\d+|[^-.\d]+)') |
58 |
|
|
|
59 |
|
|
def version_cmp(a, b): |
60 |
|
|
"""Compares two versions |
61 |
|
|
|
62 |
|
|
Returns |
63 |
|
|
-1 if a < b |
64 |
|
|
0 if a == b |
65 |
|
|
1 if a > b |
66 |
ovitters |
3045 |
""" |
67 |
ovitters |
3012 |
|
68 |
ovitters |
3045 |
return rpm.labelCompare(('1', a, '1'), ('1', b, '1')) |
69 |
ovitters |
3012 |
|
70 |
|
|
def get_latest_version(versions, max_version=None): |
71 |
|
|
"""Gets the latest version number |
72 |
|
|
|
73 |
|
|
if max_version is specified, gets the latest version number before |
74 |
|
|
max_version""" |
75 |
|
|
latest = None |
76 |
|
|
for version in versions: |
77 |
|
|
if ( latest is None or version_cmp(version, latest) > 0 ) \ |
78 |
|
|
and ( max_version is None or version_cmp(version, max_version) < 0 ): |
79 |
|
|
latest = version |
80 |
|
|
return latest |
81 |
|
|
|
82 |
ovitters |
3088 |
def judge_version_increase(version_old, version_new): |
83 |
|
|
"""Judge quality of version increase: |
84 |
|
|
|
85 |
|
|
Returns a tuple containing judgement and message |
86 |
|
|
|
87 |
|
|
Judgement: |
88 |
|
|
Less than 0: Error |
89 |
|
|
0 to 4: Better not |
90 |
|
|
5+: Ok""" |
91 |
|
|
versions = (version_old, version_new) |
92 |
|
|
|
93 |
|
|
# First do a basic version comparison to ensure version_new is actually newer |
94 |
|
|
compare = version_cmp(version_new, version_old) |
95 |
|
|
|
96 |
|
|
if compare == 0: |
97 |
ovitters |
3125 |
# 1.0.0 -> 1.0.1 |
98 |
ovitters |
3088 |
return (-2, "Already at version %s!" % (version_old)) |
99 |
|
|
|
100 |
|
|
if compare != 1: |
101 |
ovitters |
3125 |
# 1.0.1 -> 1.0.0 |
102 |
ovitters |
3088 |
return (-3, "Version %s is older than current version %s!" % (version_new, version_old)) |
103 |
|
|
|
104 |
|
|
# Version is newer, but we don't want to see if it follows the GNOME versioning scheme |
105 |
|
|
majmins = [re_majmin.sub(r'\1', ver) for ver in versions if re_majmin.match(ver) is not None] |
106 |
|
|
|
107 |
|
|
if len(majmins) == 1: |
108 |
|
|
return (-1, "Version number scheme changes: %s" % (", ".join(versions))) |
109 |
|
|
|
110 |
|
|
if len(majmins) == 0: |
111 |
|
|
return (0, "Unsupported version numbers: %s" % (", ".join(versions))) |
112 |
|
|
|
113 |
|
|
# Follows GNOME versioning scheme |
114 |
|
|
# Meaning: x.y.z |
115 |
|
|
# x = major |
116 |
|
|
# y = minor : even if stable |
117 |
|
|
# z = micro |
118 |
|
|
|
119 |
|
|
# Major+minor the same? Then go ahead and upgrade! |
120 |
|
|
if majmins[0] == majmins[1]: |
121 |
|
|
# Majmin of both versions are the same, looks good! |
122 |
ovitters |
3125 |
# 1.1.x -> 1.1.x or 1.0.x -> 1.0.x |
123 |
ovitters |
3088 |
return (10, None) |
124 |
|
|
|
125 |
|
|
# More detailed analysis needed, so figure out the numbers |
126 |
|
|
majmin_nrs = [map(long, ver.split('.')) for ver in majmins] |
127 |
|
|
|
128 |
|
|
# Check/ensure major version number is the same |
129 |
|
|
if majmin_nrs[0][0] != majmin_nrs[1][0]: |
130 |
ovitters |
3125 |
# 1.0.x -> 2.0.x |
131 |
ovitters |
3088 |
return (1, "Major version number increase") |
132 |
|
|
|
133 |
|
|
# Minor indicates stable/unstable |
134 |
|
|
devstate = (majmin_nrs[0][1] % 2 == 0, majmin_nrs[1][1] % 2 == 0) |
135 |
|
|
|
136 |
|
|
# Upgrading to unstable is weird |
137 |
|
|
if not devstate[1]: |
138 |
|
|
if devstate[0]: |
139 |
ovitters |
3125 |
# 1.2.x -> 1.3.x |
140 |
ovitters |
3088 |
return (1, "Stable to unstable increase") |
141 |
|
|
|
142 |
ovitters |
3125 |
# 1.3.x -> 1.5.x |
143 |
ovitters |
3088 |
return (4, "Unstable to unstable version increase") |
144 |
|
|
|
145 |
|
|
# Unstable => stable is always ok |
146 |
|
|
if not devstate[0]: |
147 |
ovitters |
3125 |
# 1.1.x -> 1.2.x |
148 |
ovitters |
3088 |
return (5, "Unstable to stable") |
149 |
|
|
|
150 |
|
|
# Can only be increase of minors from one stable to the next |
151 |
ovitters |
3125 |
# 1.0.x -> 1.2.x |
152 |
ovitters |
3088 |
return (6, "Stable version increase") |
153 |
|
|
|
154 |
ovitters |
2936 |
def line_input (file): |
155 |
|
|
for line in file: |
156 |
|
|
if line[-1] == '\n': |
157 |
|
|
yield line[:-1] |
158 |
|
|
else: |
159 |
|
|
yield line |
160 |
|
|
|
161 |
ovitters |
2955 |
def call_editor(filename): |
162 |
|
|
"""Return a sequence of possible editor binaries for the current platform""" |
163 |
|
|
|
164 |
|
|
editors = [] |
165 |
|
|
|
166 |
|
|
for varname in 'VISUAL', 'EDITOR': |
167 |
|
|
if varname in os.environ: |
168 |
|
|
editors.append(os.environ[varname]) |
169 |
|
|
|
170 |
|
|
editors.extend(('/usr/bin/editor', 'vi', 'pico', 'nano', 'joe')) |
171 |
|
|
|
172 |
|
|
for editor in editors: |
173 |
|
|
try: |
174 |
|
|
ret = subprocess.call([editor, filename]) |
175 |
|
|
except OSError, e: |
176 |
|
|
if e.errno == 2: |
177 |
|
|
continue |
178 |
|
|
raise |
179 |
|
|
|
180 |
|
|
if ret == 127: |
181 |
|
|
continue |
182 |
|
|
|
183 |
|
|
return True |
184 |
|
|
|
185 |
ovitters |
2932 |
class urllister(SGMLParser): |
186 |
|
|
def reset(self): |
187 |
|
|
SGMLParser.reset(self) |
188 |
|
|
self.urls = [] |
189 |
|
|
|
190 |
|
|
def start_a(self, attrs): |
191 |
|
|
href = [v for k, v in attrs if k=='href'] |
192 |
|
|
if href: |
193 |
|
|
self.urls.extend(href) |
194 |
|
|
|
195 |
ovitters |
3057 |
class XzTarFile(tarfile.TarFile): |
196 |
|
|
|
197 |
|
|
OPEN_METH = tarfile.TarFile.OPEN_METH.copy() |
198 |
|
|
OPEN_METH["xz"] = "xzopen" |
199 |
|
|
|
200 |
|
|
@classmethod |
201 |
|
|
def xzopen(cls, name, mode="r", fileobj=None, **kwargs): |
202 |
|
|
"""Open gzip compressed tar archive name for reading or writing. |
203 |
|
|
Appending is not allowed. |
204 |
|
|
""" |
205 |
|
|
if len(mode) > 1 or mode not in "rw": |
206 |
|
|
raise ValueError("mode must be 'r' or 'w'") |
207 |
|
|
|
208 |
|
|
if fileobj is not None: |
209 |
|
|
fileobj = _LMZAProxy(fileobj, mode) |
210 |
|
|
else: |
211 |
|
|
fileobj = lzma.LZMAFile(name, mode) |
212 |
|
|
|
213 |
|
|
try: |
214 |
|
|
# lzma doesn't immediately return an error |
215 |
|
|
# try and read a bit of data to determine if it is a valid xz file |
216 |
|
|
fileobj.read(_LZMAProxy.blocksize) |
217 |
|
|
fileobj.seek(0) |
218 |
|
|
t = cls.taropen(name, mode, fileobj, **kwargs) |
219 |
|
|
except IOError: |
220 |
|
|
raise tarfile.ReadError("not a xz file") |
221 |
|
|
except lzma.error: |
222 |
|
|
raise tarfile.ReadError("not a xz file") |
223 |
|
|
t._extfileobj = False |
224 |
|
|
return t |
225 |
|
|
|
226 |
ovitters |
3082 |
if not hasattr(tarfile.TarFile, 'xzopen'): |
227 |
ovitters |
3057 |
tarfile.open = XzTarFile.open |
228 |
|
|
|
229 |
ovitters |
3115 |
def is_valid_hash(path, algo, hexdigest): |
230 |
|
|
if algo not in hashlib.algorithms: |
231 |
|
|
raise ValueError("Unknown hash algorithm: %s" % algo) |
232 |
|
|
|
233 |
|
|
local_hash = getattr(hashlib, algo)() |
234 |
|
|
|
235 |
|
|
with open(path, 'rb') as fp: |
236 |
|
|
data = fp.read(32768) |
237 |
|
|
while data: |
238 |
|
|
local_hash.update(data) |
239 |
|
|
data = fp.read(32768) |
240 |
|
|
|
241 |
|
|
return local_hash.hexdigest() == hexdigest |
242 |
|
|
|
243 |
ovitters |
3012 |
class SpecFile(object): |
244 |
|
|
re_update_version = re.compile(r'^(?P<pre>Version:\s*)(?P<version>.+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE) |
245 |
|
|
re_update_release = re.compile(r'^(?P<pre>Release:\s*)(?P<release>%mkrel \d+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE) |
246 |
|
|
|
247 |
|
|
def __init__(self, path): |
248 |
|
|
self.path = path |
249 |
|
|
self.cwd = os.path.dirname(path) |
250 |
|
|
|
251 |
|
|
@property |
252 |
|
|
def version(self): |
253 |
|
|
return subprocess.check_output(["rpm", "--specfile", self.path, "--queryformat", "%{VERSION}\n"]).splitlines()[0] |
254 |
ovitters |
3115 |
@property |
255 |
|
|
def sources(self): |
256 |
|
|
ts = rpm.ts() |
257 |
|
|
spec = ts.parseSpec(self.path) |
258 |
|
|
srclist = spec.sources if isinstance(spec.sources, (list, tuple)) \ |
259 |
|
|
else spec.sources() |
260 |
|
|
return dict((os.path.basename(name), name) for name, no, flags in srclist) |
261 |
ovitters |
3037 |
|
262 |
ovitters |
3555 |
def update(self, version, force=False): |
263 |
ovitters |
3012 |
"""Update specfile (increase version)""" |
264 |
|
|
cur_version = self.version |
265 |
|
|
|
266 |
ovitters |
3088 |
(judgement, msg) = judge_version_increase(cur_version, version) |
267 |
ovitters |
3037 |
|
268 |
ovitters |
3088 |
if judgement < 0: |
269 |
|
|
print >>sys.stderr, "ERROR: %s!" % (msg) |
270 |
ovitters |
3037 |
return False |
271 |
|
|
|
272 |
ovitters |
3088 |
if judgement < 5: |
273 |
ovitters |
3089 |
print "WARNING: %s!" % (msg) |
274 |
ovitters |
3555 |
if not force: return False |
275 |
ovitters |
3012 |
|
276 |
ovitters |
3039 |
# XXX - os.path.join is hackish |
277 |
ovitters |
3560 |
svn_diff_output = subprocess.check_output(["svn", "diff", os.path.join(self.path, '..')]) |
278 |
|
|
if svn_diff_output != '': |
279 |
|
|
print svn_diff_output |
280 |
ovitters |
3039 |
print >>sys.stderr, "ERROR: Package has uncommitted changes!" |
281 |
ovitters |
3560 |
if not force: |
282 |
|
|
return False |
283 |
ovitters |
3039 |
|
284 |
ovitters |
3560 |
# Forcing package submission: revert changes |
285 |
|
|
try: |
286 |
|
|
print >>sys.stderr, "WARNING: Force used; reverting svn changes" |
287 |
|
|
subprocess.check_call(["svn", "revert", "-R", os.path.join(self.path, '..')]) |
288 |
|
|
except subprocess.CalledProcessError: |
289 |
|
|
return False |
290 |
|
|
|
291 |
ovitters |
3012 |
with open(self.path, "rw") as f: |
292 |
|
|
data = f.read() |
293 |
|
|
|
294 |
|
|
if data.count("%mkrel") != 1: |
295 |
ovitters |
3037 |
print >>sys.stderr, "ERROR: Multiple %mkrel found; don't know what to do!" |
296 |
ovitters |
3012 |
return False |
297 |
|
|
|
298 |
|
|
data, nr = self.re_update_version.subn(r'\g<pre>%s\g<post>' % version, data, 1) |
299 |
|
|
if nr != 1: |
300 |
ovitters |
3037 |
print >>sys.stderr, "ERROR: Could not increase version!" |
301 |
ovitters |
3012 |
return False |
302 |
|
|
|
303 |
|
|
data, nr = self.re_update_release.subn(r'\g<pre>%mkrel 1\g<post>', data, 1) |
304 |
|
|
if nr != 1: |
305 |
ovitters |
3037 |
print >>sys.stderr, "ERROR: Could not reset release!" |
306 |
ovitters |
3012 |
return False |
307 |
|
|
|
308 |
|
|
# Overwrite file with new version number |
309 |
|
|
write_file(self.path, data) |
310 |
|
|
|
311 |
|
|
|
312 |
ovitters |
3045 |
# Verify that RPM also agrees that version number has changed |
313 |
ovitters |
3012 |
if self.version != version: |
314 |
|
|
print "ERROR: Increased version to %s, but RPM doesn't agree!?!" % version |
315 |
|
|
return False |
316 |
|
|
|
317 |
ovitters |
3555 |
|
318 |
|
|
# Try to download the new tarball various times and wait between attempts |
319 |
|
|
tries = 0 |
320 |
|
|
while tries < SLEEP_TIMES: |
321 |
|
|
tries += 1 |
322 |
ovitters |
3558 |
if tries > 1: time.sleep(SLEEP_REPEAT) |
323 |
ovitters |
3555 |
try: |
324 |
|
|
# Download new tarball |
325 |
|
|
subprocess.check_call(['mgarepo', 'sync', '-d'], cwd=self.cwd) |
326 |
ovitters |
3558 |
# success, so exit loop |
327 |
ovitters |
3555 |
break |
328 |
|
|
except subprocess.CalledProcessError, e: |
329 |
|
|
# mgarepo sync returns 1 if the tarball cannot be downloaded |
330 |
|
|
if e.returncode != 1: |
331 |
|
|
return False |
332 |
ovitters |
3558 |
else: |
333 |
|
|
return False |
334 |
ovitters |
3555 |
|
335 |
|
|
|
336 |
ovitters |
3034 |
try: |
337 |
|
|
# Check patches still apply |
338 |
|
|
subprocess.check_call(['bm', '-p', '--nodeps'], cwd=self.cwd) |
339 |
|
|
except subprocess.CalledProcessError: |
340 |
ovitters |
3568 |
logfile = os.path.join(os.path.dirname(self.path), 'log.%s' % os.path.splitext(os.path.basename(self.path))[0]) |
341 |
ovitters |
3569 |
if os.path.exists(logfile): |
342 |
ovitters |
3568 |
subprocess.call(['tail', '-n', '15', logfile]) |
343 |
ovitters |
3034 |
return False |
344 |
ovitters |
3012 |
|
345 |
|
|
return True |
346 |
|
|
|
347 |
ovitters |
2936 |
class Patch(object): |
348 |
|
|
"""Do things with patches""" |
349 |
|
|
|
350 |
|
|
re_dep3 = re.compile(r'^(?:#\s*)?(?P<header>[-A-Za-z0-9]+?):\s*(?P<data>.*)$') |
351 |
|
|
re_dep3_cont = re.compile(r'^#?\s+(?P<data>.*)$') |
352 |
|
|
|
353 |
|
|
def __init__(self, path, show_path=False): |
354 |
|
|
"""Path: path to patch (might not exist)""" |
355 |
|
|
self.path = path |
356 |
|
|
self.show_path = show_path |
357 |
|
|
|
358 |
|
|
def __str__(self): |
359 |
|
|
return self.path if self.show_path else os.path.basename(self.path) |
360 |
|
|
|
361 |
|
|
def add_dep3(self): |
362 |
ovitters |
2955 |
"""Add DEP-3 headers to a patch file""" |
363 |
ovitters |
2944 |
if self.dep3['valid']: |
364 |
|
|
return False |
365 |
|
|
|
366 |
|
|
new_headers = ( |
367 |
|
|
('Author', self.svn_author), |
368 |
|
|
('Subject', ''), |
369 |
|
|
('Applied-Upstream', ''), |
370 |
|
|
('Forwarded', ''), |
371 |
|
|
('Bug', ''), |
372 |
|
|
) |
373 |
|
|
|
374 |
|
|
with tempfile.NamedTemporaryFile(dir=os.path.dirname(self.path), delete=False) as fdst: |
375 |
|
|
with open(self.path, "r") as fsrc: |
376 |
|
|
# Start with any existing DEP3 headers |
377 |
|
|
for i in range(self.dep3['last_nr']): |
378 |
|
|
fdst.write(fsrc.read()) |
379 |
|
|
|
380 |
|
|
# After that add the DEP3 headers |
381 |
|
|
add_line = False |
382 |
|
|
for header, data in new_headers: |
383 |
|
|
if header in self.dep3['headers']: |
384 |
|
|
continue |
385 |
|
|
|
386 |
|
|
# XXX - wrap this at 80 chars |
387 |
|
|
add_line = True |
388 |
ovitters |
2955 |
print >>fdst, "%s: %s" % (header, "" if data is None else data) |
389 |
ovitters |
2944 |
|
390 |
|
|
if add_line: print >>fdst, "" |
391 |
|
|
# Now copy any other data and the patch |
392 |
|
|
shutil.copyfileobj(fsrc, fdst) |
393 |
|
|
|
394 |
|
|
fdst.flush() |
395 |
|
|
os.rename(fdst.name, self.path) |
396 |
|
|
|
397 |
ovitters |
2955 |
call_editor(self.path) |
398 |
|
|
|
399 |
ovitters |
2936 |
#Author: fwang |
400 |
|
|
#Subject: Build fix: Fix glib header inclusion |
401 |
|
|
#Applied-Upstream: commit:30602 |
402 |
|
|
#Forwarded: yes |
403 |
|
|
#Bug: http://bugzilla.abisource.com/show_bug.cgi?id=13247 |
404 |
|
|
|
405 |
|
|
def _read_dep3(self): |
406 |
ovitters |
2955 |
"""Read DEP-3 headers from an existing patch file |
407 |
|
|
|
408 |
|
|
This will also parse git headers""" |
409 |
ovitters |
2936 |
dep3 = {} |
410 |
ovitters |
2944 |
headers = {} |
411 |
ovitters |
2936 |
|
412 |
|
|
last_header = None |
413 |
ovitters |
2944 |
last_nr = 0 |
414 |
|
|
nr = 0 |
415 |
ovitters |
2936 |
try: |
416 |
|
|
with open(self.path, "r") as f: |
417 |
|
|
for line in line_input(f): |
418 |
ovitters |
2944 |
nr += 1 |
419 |
|
|
# stop trying to parse when real patch begins |
420 |
ovitters |
2936 |
if line == '---': |
421 |
|
|
break |
422 |
|
|
|
423 |
|
|
r = self.re_dep3.match(line) |
424 |
|
|
if r: |
425 |
|
|
info = r.groupdict() |
426 |
ovitters |
3012 |
|
427 |
|
|
# Avoid matching URLS |
428 |
|
|
if info['data'].startswith('//') and info['header'].lower () == info['header']: |
429 |
|
|
continue |
430 |
|
|
|
431 |
ovitters |
2944 |
headers[info['header']] = info['data'] |
432 |
ovitters |
2936 |
last_header = info['header'] |
433 |
ovitters |
2944 |
last_nr = nr |
434 |
ovitters |
2936 |
continue |
435 |
|
|
|
436 |
|
|
r = self.re_dep3_cont.match(line) |
437 |
|
|
if r: |
438 |
|
|
info = r.groupdict() |
439 |
|
|
if last_header: |
440 |
ovitters |
2944 |
headers[last_header] = " ".join((headers[last_header], info['data'])) |
441 |
|
|
last_nr = nr |
442 |
ovitters |
2936 |
continue |
443 |
|
|
|
444 |
|
|
last_header = None |
445 |
|
|
except IOError: |
446 |
|
|
pass |
447 |
ovitters |
2944 |
|
448 |
|
|
dep3['valid'] = \ |
449 |
|
|
(('Description' in headers and headers['Description'].strip() != '') |
450 |
|
|
or ('Subject' in headers and headers['Subject'].strip() != '')) \ |
451 |
|
|
and (('Origin' in headers and headers['Origin'].strip() != '') \ |
452 |
|
|
or ('Author' in headers and headers['Author'].strip() != '') \ |
453 |
|
|
or ('From' in headers and headers['From'].strip() != '')) |
454 |
|
|
dep3['last_nr'] = last_nr |
455 |
|
|
dep3['headers'] = headers |
456 |
|
|
|
457 |
ovitters |
2936 |
self._dep3 = dep3 |
458 |
|
|
|
459 |
|
|
@property |
460 |
|
|
def dep3(self): |
461 |
|
|
if not hasattr(self, '_dep3'): |
462 |
|
|
self._read_dep3() |
463 |
|
|
|
464 |
|
|
return self._dep3 |
465 |
|
|
|
466 |
ovitters |
2944 |
@property |
467 |
|
|
def svn_author(self): |
468 |
|
|
if not hasattr(self, '_svn_author'): |
469 |
ovitters |
3083 |
try: |
470 |
|
|
contents = subprocess.check_output(['svn', 'log', '-q', "--", self.path], close_fds=True).strip("\n").splitlines() |
471 |
|
|
|
472 |
ovitters |
2944 |
for line in contents: |
473 |
|
|
if ' | ' not in line: |
474 |
|
|
continue |
475 |
ovitters |
2936 |
|
476 |
ovitters |
2944 |
fields = line.split(' | ') |
477 |
|
|
if len(fields) >= 3: |
478 |
|
|
self._svn_author = fields[1] |
479 |
ovitters |
3083 |
except subprocess.CalledProcessError: |
480 |
|
|
pass |
481 |
ovitters |
2944 |
|
482 |
ovitters |
2955 |
if not hasattr(self, '_svn_author'): |
483 |
|
|
return None |
484 |
|
|
|
485 |
ovitters |
2944 |
return self._svn_author |
486 |
|
|
|
487 |
ovitters |
2932 |
def get_upstream_names(): |
488 |
|
|
urlopen = urllib2.build_opener() |
489 |
|
|
|
490 |
|
|
good_dir = re.compile('^[-A-Za-z0-9_+.]+/$') |
491 |
|
|
|
492 |
|
|
# Get the files |
493 |
|
|
usock = urlopen.open(URL) |
494 |
|
|
parser = urllister() |
495 |
|
|
parser.feed(usock.read()) |
496 |
|
|
usock.close() |
497 |
|
|
parser.close() |
498 |
|
|
files = parser.urls |
499 |
|
|
|
500 |
|
|
tarballs = set([filename.replace('/', '') for filename in files if good_dir.search(filename)]) |
501 |
|
|
|
502 |
|
|
return tarballs |
503 |
|
|
|
504 |
|
|
def get_downstream_names(): |
505 |
|
|
re_file = re.compile(r'^(?P<module>.*?)[_-](?:(?P<oldversion>([0-9]+[\.])*[0-9]+)-)?(?P<version>([0-9]+[\.\-])*[0-9]+)\.(?P<format>(?:tar\.|diff\.)?[a-z][a-z0-9]*)$') |
506 |
|
|
|
507 |
ovitters |
3498 |
contents = subprocess.check_output(['urpmf', '--qf', '%name|%version|%files', '.', "--media", MEDIA], close_fds=True).strip("\n").splitlines() |
508 |
ovitters |
2932 |
|
509 |
|
|
FILES = {} |
510 |
|
|
TARBALLS = {} |
511 |
|
|
|
512 |
|
|
for line in contents: |
513 |
|
|
try: |
514 |
ovitters |
3498 |
srpm, version, filename = line.split("|") |
515 |
ovitters |
2932 |
except ValueError: |
516 |
|
|
print >>sys.stderr, line |
517 |
|
|
continue |
518 |
|
|
|
519 |
|
|
if '.tar' in filename: |
520 |
|
|
r = re_file.match(filename) |
521 |
|
|
if r: |
522 |
|
|
fileinfo = r.groupdict() |
523 |
|
|
module = fileinfo['module'] |
524 |
|
|
|
525 |
|
|
if module not in TARBALLS: |
526 |
ovitters |
3498 |
TARBALLS[module] = {} |
527 |
|
|
TARBALLS[module][srpm] = version |
528 |
ovitters |
2932 |
|
529 |
|
|
if srpm not in FILES: |
530 |
|
|
FILES[srpm] = set() |
531 |
|
|
FILES[srpm].add(filename) |
532 |
|
|
|
533 |
|
|
return TARBALLS, FILES |
534 |
|
|
|
535 |
ovitters |
3119 |
def get_downstream_from_upstream(upstream, version): |
536 |
|
|
# Determine the package name |
537 |
|
|
downstream, downstream_files = get_downstream_names() |
538 |
ovitters |
3012 |
|
539 |
ovitters |
3119 |
if upstream not in downstream: |
540 |
|
|
raise ValueError("No packages for upstream name: %s" % upstream) |
541 |
|
|
|
542 |
ovitters |
3559 |
if len(downstream[upstream]) == 1: |
543 |
|
|
return downstream[upstream].keys() |
544 |
ovitters |
3119 |
|
545 |
ovitters |
3559 |
# Directories packages are located in |
546 |
|
|
root = os.path.expanduser(PKGROOT) |
547 |
ovitters |
3119 |
|
548 |
ovitters |
3559 |
packages = {} |
549 |
|
|
for package in downstream[upstream].keys(): |
550 |
|
|
cwd = os.path.join(root, package) |
551 |
|
|
|
552 |
|
|
# Checkout package to ensure the checkout reflects the latest changes |
553 |
|
|
try: |
554 |
|
|
subprocess.check_call(['mgarepo', 'co', package], cwd=root) |
555 |
|
|
except subprocess.CalledProcessError: |
556 |
|
|
raise ValueError("Multiple packages found and cannot checkout %s" % package) |
557 |
|
|
|
558 |
|
|
# Determine version from spec file |
559 |
|
|
try: |
560 |
|
|
packages[package] = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)).version |
561 |
|
|
except subprocess.CalledProcessError: |
562 |
|
|
raise ValueError("Multiple packages found and cannot determine version of %s" % package) |
563 |
|
|
|
564 |
|
|
# Return all packages reflecting the current version |
565 |
|
|
matches = [package for package in packages if packages[package] == version] |
566 |
|
|
if len(matches): |
567 |
|
|
return matches |
568 |
|
|
|
569 |
|
|
# Return all packages reflecting the version before the current version |
570 |
|
|
latest_version = get_latest_version(packages.values(), max_version=version) |
571 |
|
|
matches = [package for package in packages if packages[package] == latest_version] |
572 |
|
|
if len(matches): |
573 |
|
|
return matches |
574 |
|
|
|
575 |
|
|
# Give up |
576 |
|
|
raise ValueError("Multiple packages found and cannot determine package for version %s" % version) |
577 |
|
|
|
578 |
ovitters |
3012 |
def write_file(path, data): |
579 |
|
|
with tempfile.NamedTemporaryFile(dir=os.path.dirname(path), delete=False) as fdst: |
580 |
|
|
fdst.write(data) |
581 |
|
|
fdst.flush() |
582 |
|
|
os.rename(fdst.name, path) |
583 |
|
|
|
584 |
ovitters |
2932 |
def cmd_co(options, parser): |
585 |
ovitters |
3501 |
root = os.path.expanduser(PKGROOT) |
586 |
ovitters |
2932 |
|
587 |
ovitters |
3502 |
for package, module, package_version, spec_version, downstream_files in sorted(join_streams()): |
588 |
ovitters |
3501 |
print "%s => %s" % (module, package) |
589 |
|
|
subprocess.call(['mgarepo', 'co', package], cwd=root) |
590 |
ovitters |
2932 |
|
591 |
ovitters |
3501 |
def join_streams(show_version=False, only_diff_version=False): |
592 |
|
|
root = os.path.expanduser(PKGROOT) |
593 |
ovitters |
2932 |
|
594 |
|
|
upstream = get_upstream_names() |
595 |
|
|
downstream, downstream_files = get_downstream_names() |
596 |
|
|
|
597 |
|
|
matches = upstream & set(downstream.keys()) |
598 |
|
|
for module in matches: |
599 |
ovitters |
3498 |
for package in downstream[module].keys(): |
600 |
ovitters |
3501 |
package_version = downstream[module][package] |
601 |
|
|
spec_version = None |
602 |
|
|
if show_version or only_diff_version: |
603 |
|
|
cwd = os.path.join(root, package) |
604 |
|
|
try: |
605 |
|
|
spec_version = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)).version |
606 |
|
|
except subprocess.CalledProcessError: |
607 |
|
|
spec_version = 'N/A' |
608 |
ovitters |
2932 |
|
609 |
ovitters |
3501 |
if only_diff_version and package_version == spec_version: |
610 |
|
|
continue |
611 |
|
|
|
612 |
ovitters |
3502 |
yield (package, module, package_version, spec_version, downstream_files[package]) |
613 |
ovitters |
3501 |
|
614 |
ovitters |
3114 |
def cmd_ls(options, parser): |
615 |
ovitters |
3502 |
for package, module, package_version, spec_version, downstream_files in sorted(join_streams(show_version=options.show_version, only_diff_version=options.diff)): |
616 |
ovitters |
3501 |
print package,"\t", |
617 |
|
|
if options.upstream: print module, "\t", |
618 |
|
|
if options.show_version: print spec_version, "\t", package_version, "\t", |
619 |
|
|
print |
620 |
ovitters |
3114 |
|
621 |
ovitters |
2932 |
def cmd_patches(options, parser): |
622 |
ovitters |
3502 |
root = os.path.expanduser(PKGROOT) |
623 |
ovitters |
2932 |
|
624 |
ovitters |
3502 |
for package, module, package_version, spec_version, downstream_files in sorted(join_streams()): |
625 |
|
|
for filename in downstream_files: |
626 |
|
|
if '.patch' in filename or '.diff' in filename: |
627 |
ovitters |
2932 |
|
628 |
ovitters |
3502 |
p = Patch(os.path.join(root, package, "SOURCES", filename), show_path=options.path) |
629 |
|
|
valid = "" |
630 |
|
|
forwarded = "" |
631 |
|
|
if p.dep3['headers']: |
632 |
|
|
forwarded = p.dep3['headers'].get('Forwarded', "no") |
633 |
|
|
if p.dep3['valid']: |
634 |
|
|
valid="VALID" |
635 |
|
|
print "\t".join((module, package, str(p), forwarded, valid)) |
636 |
ovitters |
2936 |
|
637 |
ovitters |
2944 |
def cmd_dep3(options, parser): |
638 |
|
|
p = Patch(options.patch) |
639 |
|
|
p.add_dep3() |
640 |
|
|
|
641 |
ovitters |
3012 |
def cmd_package_new_version(options, parser): |
642 |
ovitters |
3087 |
# Determine the package name |
643 |
ovitters |
3086 |
if options.upstream: |
644 |
ovitters |
3119 |
try: |
645 |
|
|
package = get_downstream_from_upstream(options.package, options.version)[0] |
646 |
|
|
except ValueError, e: |
647 |
|
|
print >>sys.stderr, "ERROR: %s" % e |
648 |
ovitters |
3086 |
sys.exit(1) |
649 |
|
|
else: |
650 |
|
|
package = options.package |
651 |
|
|
|
652 |
ovitters |
3087 |
# Directories packages are located in |
653 |
ovitters |
3044 |
root = os.path.expanduser(PKGROOT) |
654 |
|
|
cwd = os.path.join(root, package) |
655 |
ovitters |
3038 |
|
656 |
ovitters |
3087 |
# Checkout package to ensure the checkout reflects the latest changes |
657 |
ovitters |
3044 |
try: |
658 |
|
|
subprocess.check_call(['mgarepo', 'co', package], cwd=root) |
659 |
|
|
except subprocess.CalledProcessError: |
660 |
|
|
sys.exit(1) |
661 |
ovitters |
3087 |
|
662 |
|
|
# SpecFile class handles the actual version+release change |
663 |
ovitters |
3038 |
s = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)) |
664 |
ovitters |
3037 |
print "%s => %s" % (s.version, options.version) |
665 |
ovitters |
3555 |
if not s.update(options.version, force=options.force): |
666 |
ovitters |
3012 |
sys.exit(1) |
667 |
|
|
|
668 |
ovitters |
3115 |
# Check hash, if given |
669 |
|
|
if options.hexdigest is not None: |
670 |
|
|
sources = [name for name, origname in s.sources.iteritems() if '://' in origname] |
671 |
|
|
if not len(sources): |
672 |
|
|
print >>sys.stderr, "ERROR: Cannot determine source file (for hash check)!" |
673 |
|
|
sys.stderr(1) |
674 |
|
|
|
675 |
|
|
for filename in sources: |
676 |
|
|
if not is_valid_hash(os.path.join(cwd, "SOURCES", filename), options.algo, options.hexdigest): |
677 |
|
|
print >>sys.stderr, "ERROR: Hash file failed check for %s!" % path |
678 |
|
|
print >>sys.stderr, "ERROR: Reverting changes!" |
679 |
|
|
subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd) |
680 |
|
|
sys.exit(1) |
681 |
|
|
|
682 |
ovitters |
3087 |
# We can even checkin and submit :-) |
683 |
ovitters |
3038 |
if options.submit: |
684 |
|
|
try: |
685 |
|
|
# checkin changes |
686 |
ovitters |
3087 |
subprocess.check_call(['mgarepo', 'ci', '-m', 'new version %s' % options.version], cwd=cwd) |
687 |
ovitters |
3038 |
# and submit |
688 |
|
|
subprocess.check_call(['mgarepo', 'submit'], cwd=cwd) |
689 |
|
|
except subprocess.CalledProcessError: |
690 |
|
|
sys.exit(1) |
691 |
ovitters |
3012 |
|
692 |
ovitters |
3119 |
def cmd_parse_ftp_release_list(options, parser): |
693 |
ovitters |
3180 |
def _send_reply_mail(contents, orig_msg, to, error=False): |
694 |
ovitters |
3119 |
"""Send an reply email""" |
695 |
|
|
contents.seek(0) |
696 |
|
|
msg = MIMEText(contents.read(), _charset='utf-8') |
697 |
ovitters |
3384 |
if error: |
698 |
|
|
# XXX - ugly |
699 |
|
|
contents.seek(0) |
700 |
ovitters |
3567 |
lastline = contents.read().rstrip().splitlines()[-1] |
701 |
ovitters |
3384 |
# Remove things like "ERROR: " and so on from the last line |
702 |
|
|
lastline = re.sub(r'^(?:[^ :]+:\s+)+', '', lastline) |
703 |
ovitters |
3397 |
subjecterror = " (ERROR: %s)" % lastline if lastline else " (ERROR)" |
704 |
ovitters |
3384 |
else: |
705 |
|
|
subjecterror = "" |
706 |
ovitters |
3397 |
msg['Subject'] = "Re: %s%s" % (orig_msg['Subject'], subjecterror) |
707 |
ovitters |
3119 |
msg['To'] = to |
708 |
|
|
msg["In-Reply-To"] = orig_msg["Message-ID"] |
709 |
|
|
msg["References"] = orig_msg["Message-ID"] |
710 |
ovitters |
3038 |
|
711 |
ovitters |
3119 |
# Call sendmail program directly so it doesn't matter if the service is running |
712 |
|
|
cmd = ['/usr/sbin/sendmail', '-oi', '--'] |
713 |
|
|
cmd.extend([to]) |
714 |
|
|
p = subprocess.Popen(cmd, stdin=subprocess.PIPE) |
715 |
|
|
p.stdin.write(msg.as_string()) |
716 |
|
|
p.stdin.flush() |
717 |
|
|
p.stdin.close() |
718 |
|
|
p.wait() |
719 |
|
|
|
720 |
|
|
|
721 |
|
|
msg = email.email.message_from_file(sys.stdin) |
722 |
|
|
|
723 |
|
|
if options.mail: |
724 |
ovitters |
3176 |
stdout = tempfile.TemporaryFile() |
725 |
ovitters |
3119 |
stderr = stdout |
726 |
|
|
else: |
727 |
|
|
stdout = sys.stdout |
728 |
|
|
stderr = sys.stderr |
729 |
|
|
|
730 |
|
|
try: |
731 |
|
|
module = msg['X-Module-Name'] |
732 |
|
|
version = msg['X-Module-Version'] |
733 |
|
|
hexdigest = msg['X-Module-SHA256-tar.xz'] |
734 |
|
|
except KeyError, e: |
735 |
|
|
print >>stderr, "ERROR: %s" % e |
736 |
ovitters |
3180 |
if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True) |
737 |
ovitters |
3119 |
sys.exit(1) |
738 |
|
|
|
739 |
|
|
try: |
740 |
|
|
packages = get_downstream_from_upstream(module, version) |
741 |
|
|
except ValueError, e: |
742 |
|
|
print >>stderr, "ERROR: %s" % e |
743 |
ovitters |
3180 |
if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True) |
744 |
ovitters |
3119 |
sys.exit(1) |
745 |
|
|
|
746 |
ovitters |
3120 |
if options.wait: |
747 |
ovitters |
3122 |
# maildrop aborts and will try to deliver after 5min |
748 |
|
|
# fork to avoid this |
749 |
|
|
if os.fork() != 0: sys.exit(0) |
750 |
ovitters |
3556 |
# wait SLEEP_INITIAL after the message was sent |
751 |
|
|
secs = SLEEP_INITIAL |
752 |
|
|
t = email.utils.parsedate_tz(msg['Date']) |
753 |
|
|
if t is not None: |
754 |
|
|
msg_time = email.utils.mktime_tz(t) |
755 |
|
|
secs = SLEEP_INITIAL - (time.time() - msg_time) |
756 |
ovitters |
3120 |
|
757 |
ovitters |
3556 |
if secs > 0: time.sleep(secs) |
758 |
|
|
|
759 |
ovitters |
3180 |
error = False |
760 |
ovitters |
3119 |
for package in packages: |
761 |
ovitters |
3561 |
cmd = ['mga-gnome', 'increase', '--submit', '--hash', hexdigest] |
762 |
|
|
if options.force: |
763 |
|
|
cmd.append('--force') |
764 |
|
|
cmd.extend((package, version)) |
765 |
|
|
if subprocess.call(cmd, stdout=stdout, stderr=stderr): |
766 |
ovitters |
3180 |
error = True |
767 |
ovitters |
3119 |
|
768 |
ovitters |
3180 |
if options.mail: _send_reply_mail(stdout, msg, options.mail, error=error) |
769 |
ovitters |
3119 |
|
770 |
ovitters |
2932 |
def main(): |
771 |
|
|
description = """Mageia GNOME commands.""" |
772 |
|
|
epilog="""Report bugs to Olav Vitters""" |
773 |
|
|
parser = argparse.ArgumentParser(description=description,epilog=epilog) |
774 |
|
|
|
775 |
|
|
# SUBPARSERS |
776 |
|
|
subparsers = parser.add_subparsers(title='subcommands') |
777 |
|
|
# install |
778 |
|
|
subparser = subparsers.add_parser('co', help='checkout all GNOME modules') |
779 |
|
|
subparser.set_defaults( |
780 |
|
|
func=cmd_co |
781 |
|
|
) |
782 |
|
|
|
783 |
|
|
subparser = subparsers.add_parser('packages', help='list all GNOME packages') |
784 |
ovitters |
3114 |
subparser.add_argument("-m", "--m", action="store_true", dest="upstream", |
785 |
|
|
help="Show upstream module") |
786 |
ovitters |
3501 |
subparser.add_argument( "--version", action="store_true", dest="show_version", |
787 |
|
|
help="Show version numbers") |
788 |
|
|
subparser.add_argument( "--diff", action="store_true", dest="diff", |
789 |
|
|
help="Only show packages with different version") |
790 |
ovitters |
2932 |
subparser.set_defaults( |
791 |
ovitters |
3501 |
func=cmd_ls, upstream=False, show_version=False, diff=False |
792 |
ovitters |
2932 |
) |
793 |
|
|
|
794 |
|
|
subparser = subparsers.add_parser('patches', help='list all GNOME patches') |
795 |
|
|
subparser.add_argument("-p", "--path", action="store_true", dest="path", |
796 |
ovitters |
2944 |
help="Show full path to patch") |
797 |
ovitters |
2932 |
subparser.set_defaults( |
798 |
|
|
func=cmd_patches, path=False |
799 |
|
|
) |
800 |
|
|
|
801 |
ovitters |
2944 |
subparser = subparsers.add_parser('dep3', help='Add dep3 headers') |
802 |
|
|
subparser.add_argument("patch", help="Patch") |
803 |
|
|
subparser.set_defaults( |
804 |
|
|
func=cmd_dep3, path=False |
805 |
|
|
) |
806 |
ovitters |
2932 |
|
807 |
ovitters |
3012 |
subparser = subparsers.add_parser('increase', help='Increase version number') |
808 |
|
|
subparser.add_argument("package", help="Package name") |
809 |
|
|
subparser.add_argument("version", help="Version number") |
810 |
ovitters |
3555 |
subparser.add_argument("-f", "--force", action="store_true", dest="force", |
811 |
|
|
help="Override warnings, just do it") |
812 |
ovitters |
3086 |
subparser.add_argument("-u", "--upstream", action="store_true", dest="upstream", |
813 |
|
|
help="Package name reflects the upstream name") |
814 |
ovitters |
3038 |
subparser.add_argument("-s", "--submit", action="store_true", dest="submit", |
815 |
|
|
help="Commit changes and submit") |
816 |
ovitters |
3115 |
subparser.add_argument("-a", "--algorithm", choices=hashlib.algorithms, dest="algo", |
817 |
|
|
help="Hash algorithm") |
818 |
|
|
subparser.add_argument("--hash", dest="hexdigest", |
819 |
|
|
help="Hexdigest of the hash") |
820 |
ovitters |
3012 |
subparser.set_defaults( |
821 |
ovitters |
3555 |
func=cmd_package_new_version, submit=False, upstream=False, hexdigest=None, algo="sha256", |
822 |
|
|
force=False |
823 |
ovitters |
3012 |
) |
824 |
|
|
|
825 |
ovitters |
3119 |
subparser = subparsers.add_parser('gnome-release-email', help='Submit packages based on GNOME ftp-release-list email') |
826 |
|
|
subparser.add_argument("-m", "--mail", help="Email address to send the progress to") |
827 |
ovitters |
3120 |
subparser.add_argument("-w", "--wait", action="store_true", |
828 |
|
|
help="Wait before trying to retrieve the new version") |
829 |
ovitters |
3561 |
subparser.add_argument("-f", "--force", action="store_true", |
830 |
|
|
help="Force submission") |
831 |
ovitters |
3119 |
subparser.set_defaults( |
832 |
ovitters |
3561 |
func=cmd_parse_ftp_release_list, force=False, wait=False |
833 |
ovitters |
3119 |
) |
834 |
|
|
|
835 |
ovitters |
2932 |
if len(sys.argv) == 1: |
836 |
|
|
parser.print_help() |
837 |
|
|
sys.exit(2) |
838 |
|
|
|
839 |
|
|
options = parser.parse_args() |
840 |
|
|
|
841 |
|
|
try: |
842 |
|
|
options.func(options, parser) |
843 |
|
|
except KeyboardInterrupt: |
844 |
|
|
print('Interrupted') |
845 |
|
|
sys.exit(1) |
846 |
|
|
except EOFError: |
847 |
|
|
print('EOF') |
848 |
|
|
sys.exit(1) |
849 |
|
|
except IOError, e: |
850 |
|
|
if e.errno != errno.EPIPE: |
851 |
|
|
raise |
852 |
|
|
sys.exit(0) |
853 |
|
|
|
854 |
|
|
if __name__ == "__main__": |
855 |
ovitters |
3567 |
os.environ['PYTHONUNBUFFERED'] = '1' |
856 |
ovitters |
2932 |
main() |