8 |
import urllib2 |
import urllib2 |
9 |
import urlparse |
import urlparse |
10 |
import argparse |
import argparse |
11 |
|
import errno |
12 |
from sgmllib import SGMLParser |
from sgmllib import SGMLParser |
13 |
|
|
14 |
MEDIA="Core Release Source" |
MEDIA="Core Release Source" |
15 |
URL="http://download.gnome.org/sources/" |
URL="http://download.gnome.org/sources/" |
16 |
PKGROOT='~/pkgs' |
PKGROOT='~/pkgs' |
17 |
|
|
18 |
|
def line_input (file): |
19 |
|
for line in file: |
20 |
|
if line[-1] == '\n': |
21 |
|
yield line[:-1] |
22 |
|
else: |
23 |
|
yield line |
24 |
|
|
25 |
class urllister(SGMLParser): |
class urllister(SGMLParser): |
26 |
def reset(self): |
def reset(self): |
27 |
SGMLParser.reset(self) |
SGMLParser.reset(self) |
32 |
if href: |
if href: |
33 |
self.urls.extend(href) |
self.urls.extend(href) |
34 |
|
|
35 |
|
class Patch(object): |
36 |
|
"""Do things with patches""" |
37 |
|
|
38 |
|
re_dep3 = re.compile(r'^(?:#\s*)?(?P<header>[-A-Za-z0-9]+?):\s*(?P<data>.*)$') |
39 |
|
re_dep3_cont = re.compile(r'^#?\s+(?P<data>.*)$') |
40 |
|
|
41 |
|
def __init__(self, path, show_path=False): |
42 |
|
"""Path: path to patch (might not exist)""" |
43 |
|
self.path = path |
44 |
|
self.show_path = show_path |
45 |
|
|
46 |
|
def __str__(self): |
47 |
|
return self.path if self.show_path else os.path.basename(self.path) |
48 |
|
|
49 |
|
def add_dep3(self): |
50 |
|
pass |
51 |
|
#Author: fwang |
52 |
|
#Subject: Build fix: Fix glib header inclusion |
53 |
|
#Applied-Upstream: commit:30602 |
54 |
|
#Forwarded: yes |
55 |
|
#Bug: http://bugzilla.abisource.com/show_bug.cgi?id=13247 |
56 |
|
|
57 |
|
def _read_dep3(self): |
58 |
|
"""This will also parse git headers""" |
59 |
|
dep3 = {} |
60 |
|
|
61 |
|
last_header = None |
62 |
|
try: |
63 |
|
with open(self.path, "r") as f: |
64 |
|
for line in line_input(f): |
65 |
|
# Check for start of real patch |
66 |
|
if line == '---': |
67 |
|
break |
68 |
|
|
69 |
|
r = self.re_dep3.match(line) |
70 |
|
if r: |
71 |
|
info = r.groupdict() |
72 |
|
dep3[info['header']] = info['data'] |
73 |
|
last_header = info['header'] |
74 |
|
continue |
75 |
|
|
76 |
|
r = self.re_dep3_cont.match(line) |
77 |
|
if r: |
78 |
|
info = r.groupdict() |
79 |
|
if last_header: |
80 |
|
dep3[last_header] = " ".join((dep3[last_header], info['data'])) |
81 |
|
continue |
82 |
|
|
83 |
|
last_header = None |
84 |
|
except IOError: |
85 |
|
pass |
86 |
|
self._dep3 = dep3 |
87 |
|
|
88 |
|
@property |
89 |
|
def dep3(self): |
90 |
|
if not hasattr(self, '_dep3'): |
91 |
|
self._read_dep3() |
92 |
|
|
93 |
|
return self._dep3 |
94 |
|
|
95 |
|
|
96 |
def get_upstream_names(): |
def get_upstream_names(): |
97 |
urlopen = urllib2.build_opener() |
urlopen = urllib2.build_opener() |
98 |
|
|
171 |
|
|
172 |
path = os.path.expanduser(PKGROOT) |
path = os.path.expanduser(PKGROOT) |
173 |
|
|
174 |
|
import pprint |
175 |
|
|
176 |
matches = upstream & set(downstream.keys()) |
matches = upstream & set(downstream.keys()) |
177 |
for module in sorted(matches): |
for module in sorted(matches): |
178 |
for srpm in downstream[module]: |
for srpm in downstream[module]: |
179 |
for filename in downstream_files[srpm]: |
for filename in downstream_files[srpm]: |
180 |
if '.patch' in filename or '.diff' in filename: |
if '.patch' in filename or '.diff' in filename: |
181 |
print "\t".join((module,srpm, os.path.join(path, srpm, "SOURCES", filename) if options.path else filename)) |
p = Patch(os.path.join(path, srpm, "SOURCES", filename), show_path=options.path) |
182 |
|
print "\t".join((module, srpm, str(p))) |
183 |
|
if p.dep3: |
184 |
|
pprint.pprint(p.dep3) |
185 |
|
|
186 |
def main(): |
def main(): |
187 |
description = """Mageia GNOME commands.""" |
description = """Mageia GNOME commands.""" |