8 |
import urllib2 |
import urllib2 |
9 |
import urlparse |
import urlparse |
10 |
import argparse |
import argparse |
11 |
|
import errno |
12 |
|
import tempfile |
13 |
|
import shutil |
14 |
from sgmllib import SGMLParser |
from sgmllib import SGMLParser |
15 |
|
|
16 |
MEDIA="Core Release Source" |
MEDIA="Core Release Source" |
17 |
URL="http://download.gnome.org/sources/" |
URL="http://download.gnome.org/sources/" |
18 |
PKGROOT='~/pkgs' |
PKGROOT='~/pkgs' |
19 |
|
|
20 |
|
re_version = re.compile(r'([-.]|\d+|[^-.\d]+)') |
21 |
|
|
22 |
|
def version_cmp(a, b): |
23 |
|
"""Compares two versions |
24 |
|
|
25 |
|
Returns |
26 |
|
-1 if a < b |
27 |
|
0 if a == b |
28 |
|
1 if a > b |
29 |
|
|
30 |
|
Logic from Bugzilla::Install::Util::vers_cmp""" |
31 |
|
A = re_version.findall(a.lstrip('0')) |
32 |
|
B = re_version.findall(b.lstrip('0')) |
33 |
|
|
34 |
|
while A and B: |
35 |
|
a = A.pop(0) |
36 |
|
b = B.pop(0) |
37 |
|
|
38 |
|
if a == b: |
39 |
|
continue |
40 |
|
elif a == '-': |
41 |
|
return -1 |
42 |
|
elif b == '-': |
43 |
|
return 1 |
44 |
|
elif a == '.': |
45 |
|
return -1 |
46 |
|
elif b == '.': |
47 |
|
return 1 |
48 |
|
elif a.isdigit() and b.isdigit(): |
49 |
|
c = cmp(a, b) if (a.startswith('0') or b.startswith('0')) else cmp(int(a, 10), int(b, 10)) |
50 |
|
if c: |
51 |
|
return c |
52 |
|
else: |
53 |
|
c = cmp(a.upper(), b.upper()) |
54 |
|
if c: |
55 |
|
return c |
56 |
|
|
57 |
|
return cmp(len(A), len(B)) |
58 |
|
|
59 |
|
def get_latest_version(versions, max_version=None): |
60 |
|
"""Gets the latest version number |
61 |
|
|
62 |
|
if max_version is specified, gets the latest version number before |
63 |
|
max_version""" |
64 |
|
latest = None |
65 |
|
for version in versions: |
66 |
|
if ( latest is None or version_cmp(version, latest) > 0 ) \ |
67 |
|
and ( max_version is None or version_cmp(version, max_version) < 0 ): |
68 |
|
latest = version |
69 |
|
return latest |
70 |
|
|
71 |
|
def line_input (file): |
72 |
|
for line in file: |
73 |
|
if line[-1] == '\n': |
74 |
|
yield line[:-1] |
75 |
|
else: |
76 |
|
yield line |
77 |
|
|
78 |
|
def call_editor(filename): |
79 |
|
"""Return a sequence of possible editor binaries for the current platform""" |
80 |
|
|
81 |
|
editors = [] |
82 |
|
|
83 |
|
for varname in 'VISUAL', 'EDITOR': |
84 |
|
if varname in os.environ: |
85 |
|
editors.append(os.environ[varname]) |
86 |
|
|
87 |
|
editors.extend(('/usr/bin/editor', 'vi', 'pico', 'nano', 'joe')) |
88 |
|
|
89 |
|
for editor in editors: |
90 |
|
try: |
91 |
|
ret = subprocess.call([editor, filename]) |
92 |
|
except OSError, e: |
93 |
|
if e.errno == 2: |
94 |
|
continue |
95 |
|
raise |
96 |
|
|
97 |
|
if ret == 127: |
98 |
|
continue |
99 |
|
|
100 |
|
return True |
101 |
|
|
102 |
class urllister(SGMLParser): |
class urllister(SGMLParser): |
103 |
def reset(self): |
def reset(self): |
104 |
SGMLParser.reset(self) |
SGMLParser.reset(self) |
109 |
if href: |
if href: |
110 |
self.urls.extend(href) |
self.urls.extend(href) |
111 |
|
|
112 |
|
class SpecFile(object): |
113 |
|
re_update_version = re.compile(r'^(?P<pre>Version:\s*)(?P<version>.+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE) |
114 |
|
re_update_release = re.compile(r'^(?P<pre>Release:\s*)(?P<release>%mkrel \d+)(?P<post>\s*)$', re.MULTILINE + re.IGNORECASE) |
115 |
|
|
116 |
|
def __init__(self, path): |
117 |
|
self.path = path |
118 |
|
self.cwd = os.path.dirname(path) |
119 |
|
|
120 |
|
@property |
121 |
|
def version(self): |
122 |
|
return subprocess.check_output(["rpm", "--specfile", self.path, "--queryformat", "%{VERSION}\n"]).splitlines()[0] |
123 |
|
def update(self, version): |
124 |
|
"""Update specfile (increase version)""" |
125 |
|
cur_version = self.version |
126 |
|
|
127 |
|
if version_cmp(version, cur_version) != 1: |
128 |
|
print >>sys.stderr, "ERROR: Version %s is older than current version %s!" % (version, cur_version) |
129 |
|
return False |
130 |
|
|
131 |
|
with open(self.path, "rw") as f: |
132 |
|
data = f.read() |
133 |
|
|
134 |
|
if data.count("%mkrel") != 1: |
135 |
|
print "WARNING: Multiple %mkrel found; don't know what to do!" |
136 |
|
return False |
137 |
|
|
138 |
|
data, nr = self.re_update_version.subn(r'\g<pre>%s\g<post>' % version, data, 1) |
139 |
|
if nr != 1: |
140 |
|
print "WARNING: Could not increase version!" |
141 |
|
return False |
142 |
|
|
143 |
|
data, nr = self.re_update_release.subn(r'\g<pre>%mkrel 1\g<post>', data, 1) |
144 |
|
if nr != 1: |
145 |
|
print "WARNING: Could not reset release!" |
146 |
|
return False |
147 |
|
|
148 |
|
# Overwrite file with new version number |
149 |
|
write_file(self.path, data) |
150 |
|
|
151 |
|
|
152 |
|
# Check RPM also agrees that version number has increased |
153 |
|
if self.version != version: |
154 |
|
print "ERROR: Increased version to %s, but RPM doesn't agree!?!" % version |
155 |
|
return False |
156 |
|
|
157 |
|
# Download new tarball |
158 |
|
subprocess.check_call(['mgarepo', 'sync', '-d'], cwd=self.cwd) |
159 |
|
# Check patches still apply |
160 |
|
subprocess.check_call(['bm', '-p', '--nodeps'], cwd=self.cwd) |
161 |
|
|
162 |
|
return True |
163 |
|
|
164 |
|
class Patch(object): |
165 |
|
"""Do things with patches""" |
166 |
|
|
167 |
|
re_dep3 = re.compile(r'^(?:#\s*)?(?P<header>[-A-Za-z0-9]+?):\s*(?P<data>.*)$') |
168 |
|
re_dep3_cont = re.compile(r'^#?\s+(?P<data>.*)$') |
169 |
|
|
170 |
|
def __init__(self, path, show_path=False): |
171 |
|
"""Path: path to patch (might not exist)""" |
172 |
|
self.path = path |
173 |
|
self.show_path = show_path |
174 |
|
|
175 |
|
def __str__(self): |
176 |
|
return self.path if self.show_path else os.path.basename(self.path) |
177 |
|
|
178 |
|
def add_dep3(self): |
179 |
|
"""Add DEP-3 headers to a patch file""" |
180 |
|
if self.dep3['valid']: |
181 |
|
return False |
182 |
|
|
183 |
|
new_headers = ( |
184 |
|
('Author', self.svn_author), |
185 |
|
('Subject', ''), |
186 |
|
('Applied-Upstream', ''), |
187 |
|
('Forwarded', ''), |
188 |
|
('Bug', ''), |
189 |
|
) |
190 |
|
|
191 |
|
with tempfile.NamedTemporaryFile(dir=os.path.dirname(self.path), delete=False) as fdst: |
192 |
|
with open(self.path, "r") as fsrc: |
193 |
|
# Start with any existing DEP3 headers |
194 |
|
for i in range(self.dep3['last_nr']): |
195 |
|
fdst.write(fsrc.read()) |
196 |
|
|
197 |
|
# After that add the DEP3 headers |
198 |
|
add_line = False |
199 |
|
for header, data in new_headers: |
200 |
|
if header in self.dep3['headers']: |
201 |
|
continue |
202 |
|
|
203 |
|
# XXX - wrap this at 80 chars |
204 |
|
add_line = True |
205 |
|
print >>fdst, "%s: %s" % (header, "" if data is None else data) |
206 |
|
|
207 |
|
if add_line: print >>fdst, "" |
208 |
|
# Now copy any other data and the patch |
209 |
|
shutil.copyfileobj(fsrc, fdst) |
210 |
|
|
211 |
|
fdst.flush() |
212 |
|
os.rename(fdst.name, self.path) |
213 |
|
|
214 |
|
call_editor(self.path) |
215 |
|
|
216 |
|
#Author: fwang |
217 |
|
#Subject: Build fix: Fix glib header inclusion |
218 |
|
#Applied-Upstream: commit:30602 |
219 |
|
#Forwarded: yes |
220 |
|
#Bug: http://bugzilla.abisource.com/show_bug.cgi?id=13247 |
221 |
|
|
222 |
|
def _read_dep3(self): |
223 |
|
"""Read DEP-3 headers from an existing patch file |
224 |
|
|
225 |
|
This will also parse git headers""" |
226 |
|
dep3 = {} |
227 |
|
headers = {} |
228 |
|
|
229 |
|
last_header = None |
230 |
|
last_nr = 0 |
231 |
|
nr = 0 |
232 |
|
try: |
233 |
|
with open(self.path, "r") as f: |
234 |
|
for line in line_input(f): |
235 |
|
nr += 1 |
236 |
|
# stop trying to parse when real patch begins |
237 |
|
if line == '---': |
238 |
|
break |
239 |
|
|
240 |
|
r = self.re_dep3.match(line) |
241 |
|
if r: |
242 |
|
info = r.groupdict() |
243 |
|
|
244 |
|
# Avoid matching URLS |
245 |
|
if info['data'].startswith('//') and info['header'].lower () == info['header']: |
246 |
|
continue |
247 |
|
|
248 |
|
headers[info['header']] = info['data'] |
249 |
|
last_header = info['header'] |
250 |
|
last_nr = nr |
251 |
|
continue |
252 |
|
|
253 |
|
r = self.re_dep3_cont.match(line) |
254 |
|
if r: |
255 |
|
info = r.groupdict() |
256 |
|
if last_header: |
257 |
|
headers[last_header] = " ".join((headers[last_header], info['data'])) |
258 |
|
last_nr = nr |
259 |
|
continue |
260 |
|
|
261 |
|
last_header = None |
262 |
|
except IOError: |
263 |
|
pass |
264 |
|
|
265 |
|
dep3['valid'] = \ |
266 |
|
(('Description' in headers and headers['Description'].strip() != '') |
267 |
|
or ('Subject' in headers and headers['Subject'].strip() != '')) \ |
268 |
|
and (('Origin' in headers and headers['Origin'].strip() != '') \ |
269 |
|
or ('Author' in headers and headers['Author'].strip() != '') \ |
270 |
|
or ('From' in headers and headers['From'].strip() != '')) |
271 |
|
dep3['last_nr'] = last_nr |
272 |
|
dep3['headers'] = headers |
273 |
|
|
274 |
|
self._dep3 = dep3 |
275 |
|
|
276 |
|
@property |
277 |
|
def dep3(self): |
278 |
|
if not hasattr(self, '_dep3'): |
279 |
|
self._read_dep3() |
280 |
|
|
281 |
|
return self._dep3 |
282 |
|
|
283 |
|
@property |
284 |
|
def svn_author(self): |
285 |
|
if not hasattr(self, '_svn_author'): |
286 |
|
p = subprocess.Popen(['svn', 'log', '-q', "--", self.path], stdout=subprocess.PIPE, close_fds=True) |
287 |
|
contents = p.stdout.read().strip("\n").splitlines() |
288 |
|
ecode = p.wait() |
289 |
|
if ecode == 0: |
290 |
|
for line in contents: |
291 |
|
if ' | ' not in line: |
292 |
|
continue |
293 |
|
|
294 |
|
fields = line.split(' | ') |
295 |
|
if len(fields) >= 3: |
296 |
|
self._svn_author = fields[1] |
297 |
|
|
298 |
|
if not hasattr(self, '_svn_author'): |
299 |
|
return None |
300 |
|
|
301 |
|
return self._svn_author |
302 |
|
|
303 |
def get_upstream_names(): |
def get_upstream_names(): |
304 |
urlopen = urllib2.build_opener() |
urlopen = urllib2.build_opener() |
305 |
|
|
352 |
|
|
353 |
return TARBALLS, FILES |
return TARBALLS, FILES |
354 |
|
|
355 |
|
|
356 |
|
def write_file(path, data): |
357 |
|
with tempfile.NamedTemporaryFile(dir=os.path.dirname(path), delete=False) as fdst: |
358 |
|
fdst.write(data) |
359 |
|
fdst.flush() |
360 |
|
os.rename(fdst.name, path) |
361 |
|
|
362 |
def cmd_co(options, parser): |
def cmd_co(options, parser): |
363 |
upstream = get_upstream_names() |
upstream = get_upstream_names() |
364 |
downstream, downstream_files = get_downstream_names() |
downstream, downstream_files = get_downstream_names() |
385 |
|
|
386 |
path = os.path.expanduser(PKGROOT) |
path = os.path.expanduser(PKGROOT) |
387 |
|
|
388 |
|
import pprint |
389 |
|
|
390 |
matches = upstream & set(downstream.keys()) |
matches = upstream & set(downstream.keys()) |
391 |
for module in sorted(matches): |
for module in sorted(matches): |
392 |
for srpm in downstream[module]: |
for srpm in downstream[module]: |
393 |
for filename in downstream_files[srpm]: |
for filename in downstream_files[srpm]: |
394 |
if '.patch' in filename or '.diff' in filename: |
if '.patch' in filename or '.diff' in filename: |
395 |
print "\t".join((module,srpm, os.path.join(path, srpm, "SOURCES", filename) if options.path else filename)) |
|
396 |
|
p = Patch(os.path.join(path, srpm, "SOURCES", filename), show_path=options.path) |
397 |
|
valid = "" |
398 |
|
forwarded = "" |
399 |
|
if p.dep3['headers']: |
400 |
|
forwarded = p.dep3['headers'].get('Forwarded', "no") |
401 |
|
if p.dep3['valid']: |
402 |
|
valid="VALID" |
403 |
|
print "\t".join((module, srpm, str(p), forwarded, valid)) |
404 |
|
|
405 |
|
def cmd_dep3(options, parser): |
406 |
|
p = Patch(options.patch) |
407 |
|
p.add_dep3() |
408 |
|
|
409 |
|
def cmd_package_new_version(options, parser): |
410 |
|
cwd = os.path.expanduser(PKGROOT) |
411 |
|
package = options.package |
412 |
|
|
413 |
|
subprocess.call(['mgarepo', 'co', package], cwd=cwd) |
414 |
|
s = SpecFile(os.path.join(cwd, package, "SPECS", "%s.spec" % package)) |
415 |
|
print s.version |
416 |
|
if not s.update(options.version): |
417 |
|
sys.exit(1) |
418 |
|
|
419 |
|
|
420 |
def main(): |
def main(): |
421 |
description = """Mageia GNOME commands.""" |
description = """Mageia GNOME commands.""" |
437 |
|
|
438 |
subparser = subparsers.add_parser('patches', help='list all GNOME patches') |
subparser = subparsers.add_parser('patches', help='list all GNOME patches') |
439 |
subparser.add_argument("-p", "--path", action="store_true", dest="path", |
subparser.add_argument("-p", "--path", action="store_true", dest="path", |
440 |
help="Full path to patch") |
help="Show full path to patch") |
441 |
subparser.set_defaults( |
subparser.set_defaults( |
442 |
func=cmd_patches, path=False |
func=cmd_patches, path=False |
443 |
) |
) |
444 |
|
|
445 |
|
subparser = subparsers.add_parser('dep3', help='Add dep3 headers') |
446 |
|
subparser.add_argument("patch", help="Patch") |
447 |
|
subparser.set_defaults( |
448 |
|
func=cmd_dep3, path=False |
449 |
|
) |
450 |
|
|
451 |
|
subparser = subparsers.add_parser('increase', help='Increase version number') |
452 |
|
subparser.add_argument("package", help="Package name") |
453 |
|
subparser.add_argument("version", help="Version number") |
454 |
|
subparser.set_defaults( |
455 |
|
func=cmd_package_new_version, path=False |
456 |
|
) |
457 |
|
|
458 |
if len(sys.argv) == 1: |
if len(sys.argv) == 1: |
459 |
parser.print_help() |
parser.print_help() |