1 |
ovitters |
2932 |
#!/usr/bin/python |
2 |
|
|
|
3 |
|
|
import os |
4 |
|
|
import os.path |
5 |
|
|
import sys |
6 |
|
|
import re |
7 |
|
|
import subprocess |
8 |
|
|
import urllib2 |
9 |
|
|
import urlparse |
10 |
|
|
import argparse |
11 |
ovitters |
2936 |
import errno |
12 |
ovitters |
2944 |
import tempfile |
13 |
|
|
import shutil |
14 |
ovitters |
2932 |
from sgmllib import SGMLParser |
15 |
|
|
|
16 |
|
|
MEDIA="Core Release Source" |
17 |
|
|
URL="http://download.gnome.org/sources/" |
18 |
|
|
PKGROOT='~/pkgs' |
19 |
|
|
|
20 |
ovitters |
2936 |
def line_input (file): |
21 |
|
|
for line in file: |
22 |
|
|
if line[-1] == '\n': |
23 |
|
|
yield line[:-1] |
24 |
|
|
else: |
25 |
|
|
yield line |
26 |
|
|
|
27 |
ovitters |
2955 |
def call_editor(filename): |
28 |
|
|
"""Return a sequence of possible editor binaries for the current platform""" |
29 |
|
|
|
30 |
|
|
editors = [] |
31 |
|
|
|
32 |
|
|
for varname in 'VISUAL', 'EDITOR': |
33 |
|
|
if varname in os.environ: |
34 |
|
|
editors.append(os.environ[varname]) |
35 |
|
|
|
36 |
|
|
editors.extend(('/usr/bin/editor', 'vi', 'pico', 'nano', 'joe')) |
37 |
|
|
|
38 |
|
|
for editor in editors: |
39 |
|
|
try: |
40 |
|
|
ret = subprocess.call([editor, filename]) |
41 |
|
|
except OSError, e: |
42 |
|
|
if e.errno == 2: |
43 |
|
|
continue |
44 |
|
|
raise |
45 |
|
|
|
46 |
|
|
if ret == 127: |
47 |
|
|
continue |
48 |
|
|
|
49 |
|
|
return True |
50 |
|
|
|
51 |
ovitters |
2932 |
class urllister(SGMLParser): |
52 |
|
|
def reset(self): |
53 |
|
|
SGMLParser.reset(self) |
54 |
|
|
self.urls = [] |
55 |
|
|
|
56 |
|
|
def start_a(self, attrs): |
57 |
|
|
href = [v for k, v in attrs if k=='href'] |
58 |
|
|
if href: |
59 |
|
|
self.urls.extend(href) |
60 |
|
|
|
61 |
ovitters |
2936 |
class Patch(object): |
62 |
|
|
"""Do things with patches""" |
63 |
|
|
|
64 |
|
|
re_dep3 = re.compile(r'^(?:#\s*)?(?P<header>[-A-Za-z0-9]+?):\s*(?P<data>.*)$') |
65 |
|
|
re_dep3_cont = re.compile(r'^#?\s+(?P<data>.*)$') |
66 |
|
|
|
67 |
|
|
def __init__(self, path, show_path=False): |
68 |
|
|
"""Path: path to patch (might not exist)""" |
69 |
|
|
self.path = path |
70 |
|
|
self.show_path = show_path |
71 |
|
|
|
72 |
|
|
def __str__(self): |
73 |
|
|
return self.path if self.show_path else os.path.basename(self.path) |
74 |
|
|
|
75 |
|
|
def add_dep3(self): |
76 |
ovitters |
2955 |
"""Add DEP-3 headers to a patch file""" |
77 |
ovitters |
2944 |
if self.dep3['valid']: |
78 |
|
|
return False |
79 |
|
|
|
80 |
|
|
new_headers = ( |
81 |
|
|
('Author', self.svn_author), |
82 |
|
|
('Subject', ''), |
83 |
|
|
('Applied-Upstream', ''), |
84 |
|
|
('Forwarded', ''), |
85 |
|
|
('Bug', ''), |
86 |
|
|
) |
87 |
|
|
|
88 |
|
|
with tempfile.NamedTemporaryFile(dir=os.path.dirname(self.path), delete=False) as fdst: |
89 |
|
|
with open(self.path, "r") as fsrc: |
90 |
|
|
# Start with any existing DEP3 headers |
91 |
|
|
for i in range(self.dep3['last_nr']): |
92 |
|
|
fdst.write(fsrc.read()) |
93 |
|
|
|
94 |
|
|
# After that add the DEP3 headers |
95 |
|
|
add_line = False |
96 |
|
|
for header, data in new_headers: |
97 |
|
|
if header in self.dep3['headers']: |
98 |
|
|
continue |
99 |
|
|
|
100 |
|
|
# XXX - wrap this at 80 chars |
101 |
|
|
add_line = True |
102 |
ovitters |
2955 |
print >>fdst, "%s: %s" % (header, "" if data is None else data) |
103 |
ovitters |
2944 |
|
104 |
|
|
if add_line: print >>fdst, "" |
105 |
|
|
# Now copy any other data and the patch |
106 |
|
|
shutil.copyfileobj(fsrc, fdst) |
107 |
|
|
|
108 |
|
|
fdst.flush() |
109 |
|
|
os.rename(fdst.name, self.path) |
110 |
|
|
|
111 |
ovitters |
2955 |
call_editor(self.path) |
112 |
|
|
|
113 |
ovitters |
2936 |
#Author: fwang |
114 |
|
|
#Subject: Build fix: Fix glib header inclusion |
115 |
|
|
#Applied-Upstream: commit:30602 |
116 |
|
|
#Forwarded: yes |
117 |
|
|
#Bug: http://bugzilla.abisource.com/show_bug.cgi?id=13247 |
118 |
|
|
|
119 |
|
|
def _read_dep3(self): |
120 |
ovitters |
2955 |
"""Read DEP-3 headers from an existing patch file |
121 |
|
|
|
122 |
|
|
This will also parse git headers""" |
123 |
ovitters |
2936 |
dep3 = {} |
124 |
ovitters |
2944 |
headers = {} |
125 |
ovitters |
2936 |
|
126 |
|
|
last_header = None |
127 |
ovitters |
2944 |
last_nr = 0 |
128 |
|
|
nr = 0 |
129 |
ovitters |
2936 |
try: |
130 |
|
|
with open(self.path, "r") as f: |
131 |
|
|
for line in line_input(f): |
132 |
ovitters |
2944 |
nr += 1 |
133 |
|
|
# stop trying to parse when real patch begins |
134 |
ovitters |
2936 |
if line == '---': |
135 |
|
|
break |
136 |
|
|
|
137 |
|
|
r = self.re_dep3.match(line) |
138 |
|
|
if r: |
139 |
|
|
info = r.groupdict() |
140 |
ovitters |
2944 |
headers[info['header']] = info['data'] |
141 |
ovitters |
2936 |
last_header = info['header'] |
142 |
ovitters |
2944 |
last_nr = nr |
143 |
ovitters |
2936 |
continue |
144 |
|
|
|
145 |
|
|
r = self.re_dep3_cont.match(line) |
146 |
|
|
if r: |
147 |
|
|
info = r.groupdict() |
148 |
|
|
if last_header: |
149 |
ovitters |
2944 |
headers[last_header] = " ".join((headers[last_header], info['data'])) |
150 |
|
|
last_nr = nr |
151 |
ovitters |
2936 |
continue |
152 |
|
|
|
153 |
|
|
last_header = None |
154 |
|
|
except IOError: |
155 |
|
|
pass |
156 |
ovitters |
2944 |
|
157 |
|
|
dep3['valid'] = \ |
158 |
|
|
(('Description' in headers and headers['Description'].strip() != '') |
159 |
|
|
or ('Subject' in headers and headers['Subject'].strip() != '')) \ |
160 |
|
|
and (('Origin' in headers and headers['Origin'].strip() != '') \ |
161 |
|
|
or ('Author' in headers and headers['Author'].strip() != '') \ |
162 |
|
|
or ('From' in headers and headers['From'].strip() != '')) |
163 |
|
|
dep3['last_nr'] = last_nr |
164 |
|
|
dep3['headers'] = headers |
165 |
|
|
|
166 |
ovitters |
2936 |
self._dep3 = dep3 |
167 |
|
|
|
168 |
|
|
@property |
169 |
|
|
def dep3(self): |
170 |
|
|
if not hasattr(self, '_dep3'): |
171 |
|
|
self._read_dep3() |
172 |
|
|
|
173 |
|
|
return self._dep3 |
174 |
|
|
|
175 |
ovitters |
2944 |
@property |
176 |
|
|
def svn_author(self): |
177 |
|
|
if not hasattr(self, '_svn_author'): |
178 |
|
|
p = subprocess.Popen(['svn', 'log', '-q', "--", self.path], stdout=subprocess.PIPE, close_fds=True) |
179 |
|
|
contents = p.stdout.read().strip("\n").splitlines() |
180 |
|
|
ecode = p.wait() |
181 |
|
|
if ecode == 0: |
182 |
|
|
for line in contents: |
183 |
|
|
if ' | ' not in line: |
184 |
|
|
continue |
185 |
ovitters |
2936 |
|
186 |
ovitters |
2944 |
fields = line.split(' | ') |
187 |
|
|
if len(fields) >= 3: |
188 |
|
|
self._svn_author = fields[1] |
189 |
|
|
|
190 |
ovitters |
2955 |
if not hasattr(self, '_svn_author'): |
191 |
|
|
return None |
192 |
|
|
|
193 |
ovitters |
2944 |
return self._svn_author |
194 |
|
|
|
195 |
ovitters |
2932 |
def get_upstream_names(): |
196 |
|
|
urlopen = urllib2.build_opener() |
197 |
|
|
|
198 |
|
|
good_dir = re.compile('^[-A-Za-z0-9_+.]+/$') |
199 |
|
|
|
200 |
|
|
# Get the files |
201 |
|
|
usock = urlopen.open(URL) |
202 |
|
|
parser = urllister() |
203 |
|
|
parser.feed(usock.read()) |
204 |
|
|
usock.close() |
205 |
|
|
parser.close() |
206 |
|
|
files = parser.urls |
207 |
|
|
|
208 |
|
|
tarballs = set([filename.replace('/', '') for filename in files if good_dir.search(filename)]) |
209 |
|
|
|
210 |
|
|
return tarballs |
211 |
|
|
|
212 |
|
|
def get_downstream_names(): |
213 |
|
|
re_file = re.compile(r'^(?P<module>.*?)[_-](?:(?P<oldversion>([0-9]+[\.])*[0-9]+)-)?(?P<version>([0-9]+[\.\-])*[0-9]+)\.(?P<format>(?:tar\.|diff\.)?[a-z][a-z0-9]*)$') |
214 |
|
|
|
215 |
|
|
p = subprocess.Popen(['urpmf', '--files', '.', "--media", MEDIA], stdout=subprocess.PIPE, close_fds=True) |
216 |
|
|
contents = p.stdout.read().strip("\n").splitlines() |
217 |
|
|
ecode = p.wait() |
218 |
|
|
if ecode != 0: |
219 |
|
|
sys.exit(1) |
220 |
|
|
|
221 |
|
|
FILES = {} |
222 |
|
|
TARBALLS = {} |
223 |
|
|
|
224 |
|
|
for line in contents: |
225 |
|
|
try: |
226 |
|
|
srpm, filename = line.split(":") |
227 |
|
|
except ValueError: |
228 |
|
|
print >>sys.stderr, line |
229 |
|
|
continue |
230 |
|
|
|
231 |
|
|
if '.tar' in filename: |
232 |
|
|
r = re_file.match(filename) |
233 |
|
|
if r: |
234 |
|
|
fileinfo = r.groupdict() |
235 |
|
|
module = fileinfo['module'] |
236 |
|
|
|
237 |
|
|
if module not in TARBALLS: |
238 |
|
|
TARBALLS[module] = set() |
239 |
|
|
TARBALLS[module].add(srpm) |
240 |
|
|
|
241 |
|
|
if srpm not in FILES: |
242 |
|
|
FILES[srpm] = set() |
243 |
|
|
FILES[srpm].add(filename) |
244 |
|
|
|
245 |
|
|
return TARBALLS, FILES |
246 |
|
|
|
247 |
|
|
def cmd_co(options, parser): |
248 |
|
|
upstream = get_upstream_names() |
249 |
|
|
downstream, downstream_files = get_downstream_names() |
250 |
|
|
|
251 |
|
|
cwd = os.path.expanduser(PKGROOT) |
252 |
|
|
|
253 |
|
|
matches = upstream & set(downstream.keys()) |
254 |
|
|
for module in matches: |
255 |
|
|
print module, "\t".join(downstream[module]) |
256 |
|
|
for package in downstream[module]: |
257 |
|
|
subprocess.call(['mgarepo', 'co', package], cwd=cwd) |
258 |
|
|
|
259 |
|
|
def cmd_ls(options, parser): |
260 |
|
|
upstream = get_upstream_names() |
261 |
|
|
downstream, downstream_files = get_downstream_names() |
262 |
|
|
|
263 |
|
|
matches = upstream & set(downstream.keys()) |
264 |
|
|
for module in matches: |
265 |
|
|
print "\n".join(downstream[module]) |
266 |
|
|
|
267 |
|
|
def cmd_patches(options, parser): |
268 |
|
|
upstream = get_upstream_names() |
269 |
|
|
downstream, downstream_files = get_downstream_names() |
270 |
|
|
|
271 |
|
|
path = os.path.expanduser(PKGROOT) |
272 |
|
|
|
273 |
ovitters |
2936 |
import pprint |
274 |
|
|
|
275 |
ovitters |
2932 |
matches = upstream & set(downstream.keys()) |
276 |
|
|
for module in sorted(matches): |
277 |
|
|
for srpm in downstream[module]: |
278 |
|
|
for filename in downstream_files[srpm]: |
279 |
|
|
if '.patch' in filename or '.diff' in filename: |
280 |
ovitters |
2936 |
p = Patch(os.path.join(path, srpm, "SOURCES", filename), show_path=options.path) |
281 |
|
|
print "\t".join((module, srpm, str(p))) |
282 |
ovitters |
2944 |
if p.dep3['headers']: |
283 |
|
|
pprint.pprint(p.dep3['headers']) |
284 |
|
|
if p.dep3['valid']: |
285 |
|
|
print "VALID" |
286 |
ovitters |
2932 |
|
287 |
ovitters |
2944 |
def cmd_dep3(options, parser): |
288 |
|
|
p = Patch(options.patch) |
289 |
|
|
p.add_dep3() |
290 |
|
|
|
291 |
ovitters |
2932 |
def main(): |
292 |
|
|
description = """Mageia GNOME commands.""" |
293 |
|
|
epilog="""Report bugs to Olav Vitters""" |
294 |
|
|
parser = argparse.ArgumentParser(description=description,epilog=epilog) |
295 |
|
|
|
296 |
|
|
# SUBPARSERS |
297 |
|
|
subparsers = parser.add_subparsers(title='subcommands') |
298 |
|
|
# install |
299 |
|
|
subparser = subparsers.add_parser('co', help='checkout all GNOME modules') |
300 |
|
|
subparser.set_defaults( |
301 |
|
|
func=cmd_co |
302 |
|
|
) |
303 |
|
|
|
304 |
|
|
subparser = subparsers.add_parser('packages', help='list all GNOME packages') |
305 |
|
|
subparser.set_defaults( |
306 |
|
|
func=cmd_ls |
307 |
|
|
) |
308 |
|
|
|
309 |
|
|
subparser = subparsers.add_parser('patches', help='list all GNOME patches') |
310 |
|
|
subparser.add_argument("-p", "--path", action="store_true", dest="path", |
311 |
ovitters |
2944 |
help="Show full path to patch") |
312 |
ovitters |
2932 |
subparser.set_defaults( |
313 |
|
|
func=cmd_patches, path=False |
314 |
|
|
) |
315 |
|
|
|
316 |
ovitters |
2944 |
subparser = subparsers.add_parser('dep3', help='Add dep3 headers') |
317 |
|
|
subparser.add_argument("patch", help="Patch") |
318 |
|
|
subparser.set_defaults( |
319 |
|
|
func=cmd_dep3, path=False |
320 |
|
|
) |
321 |
ovitters |
2932 |
|
322 |
|
|
if len(sys.argv) == 1: |
323 |
|
|
parser.print_help() |
324 |
|
|
sys.exit(2) |
325 |
|
|
|
326 |
|
|
options = parser.parse_args() |
327 |
|
|
|
328 |
|
|
try: |
329 |
|
|
options.func(options, parser) |
330 |
|
|
except KeyboardInterrupt: |
331 |
|
|
print('Interrupted') |
332 |
|
|
sys.exit(1) |
333 |
|
|
except EOFError: |
334 |
|
|
print('EOF') |
335 |
|
|
sys.exit(1) |
336 |
|
|
except IOError, e: |
337 |
|
|
if e.errno != errno.EPIPE: |
338 |
|
|
raise |
339 |
|
|
sys.exit(0) |
340 |
|
|
|
341 |
|
|
if __name__ == "__main__": |
342 |
|
|
main() |