#!/usr/bin/python -u # A lot of the code comes from ftpadmin, see # http://git.gnome.org/browse/sysadmin-bin/tree/ftpadmin # Written by Olav Vitters # basic modules: import os import os.path import sys import re import subprocess # command line parsing, error handling: import argparse import errno # overwriting files by moving them (safer): import tempfile import shutil # version comparison: import rpm # opening tarballs: import tarfile import gzip import bz2 import lzma # pyliblzma # getting links from HTML document: from sgmllib import SGMLParser import urllib2 import urlparse # for checking hashes import hashlib # for parsing ftp-release-list emails import email from email.mime.text import MIMEText # to be able to sleep for a while import time # version freeze import datetime MEDIA="Core Release Source" URL="http://download.gnome.org/sources/" PKGROOT='~/pkgs' SLEEP_INITIAL=180 SLEEP_REPEAT=30 SLEEP_TIMES=20 re_majmin = re.compile(r'^([0-9]+\.[0-9]+).*') re_version = re.compile(r'([-.]|\d+|[^-.\d]+)') def version_cmp(a, b): """Compares two versions Returns -1 if a < b 0 if a == b 1 if a > b """ return rpm.labelCompare(('1', a, '1'), ('1', b, '1')) def get_latest_version(versions, max_version=None): """Gets the latest version number if max_version is specified, gets the latest version number before max_version""" latest = None for version in versions: if ( latest is None or version_cmp(version, latest) > 0 ) \ and ( max_version is None or version_cmp(version, max_version) < 0 ): latest = version return latest def judge_version_increase(version_old, version_new): """Judge quality of version increase: Returns a tuple containing judgement and message Judgement: Less than 0: Error 0 to 4: Better not 5+: Ok""" versions = (version_old, version_new) # First do a basic version comparison to ensure version_new is actually newer compare = version_cmp(version_new, version_old) if compare == 0: # 1.0.0 -> 1.0.1 return (-2, "Already at version %s!" % (version_old)) if compare != 1: # 1.0.1 -> 1.0.0 return (-3, "Version %s is older than current version %s!" % (version_new, version_old)) # Version is newer, but we don't want to see if it follows the GNOME versioning scheme majmins = [re_majmin.sub(r'\1', ver) for ver in versions if re_majmin.match(ver) is not None] if len(majmins) == 1: return (-1, "Version number scheme changes: %s" % (", ".join(versions))) if len(majmins) == 0: return (0, "Unsupported version numbers: %s" % (", ".join(versions))) # Follows GNOME versioning scheme # Meaning: x.y.z # x = major # y = minor : even if stable # z = micro # Major+minor the same? Then go ahead and upgrade! if majmins[0] == majmins[1]: # Majmin of both versions are the same, looks good! # 1.1.x -> 1.1.x or 1.0.x -> 1.0.x return (10, None) # More detailed analysis needed, so figure out the numbers majmin_nrs = [map(long, ver.split('.')) for ver in majmins] # Check/ensure major version number is the same if majmin_nrs[0][0] != majmin_nrs[1][0]: # 1.0.x -> 2.0.x return (1, "Major version number increase") # Minor indicates stable/unstable devstate = (majmin_nrs[0][1] % 2 == 0, majmin_nrs[1][1] % 2 == 0) # Upgrading to unstable is weird if not devstate[1]: if devstate[0]: # 1.2.x -> 1.3.x return (1, "Stable to unstable increase") # 1.3.x -> 1.5.x return (4, "Unstable to unstable version increase") # Unstable => stable is always ok if not devstate[0]: # 1.1.x -> 1.2.x return (5, "Unstable to stable") # Can only be increase of minors from one stable to the next # 1.0.x -> 1.2.x return (6, "Stable version increase") def line_input (file): for line in file: if line[-1] == '\n': yield line[:-1] else: yield line def call_editor(filename): """Return a sequence of possible editor binaries for the current platform""" editors = [] for varname in 'VISUAL', 'EDITOR': if varname in os.environ: editors.append(os.environ[varname]) editors.extend(('/usr/bin/editor', 'vi', 'pico', 'nano', 'joe')) for editor in editors: try: ret = subprocess.call([editor, filename]) except OSError, e: if e.errno == 2: continue raise if ret == 127: continue return True class urllister(SGMLParser): def reset(self): SGMLParser.reset(self) self.urls = [] def start_a(self, attrs): href = [v for k, v in attrs if k=='href'] if href: self.urls.extend(href) class XzTarFile(tarfile.TarFile): OPEN_METH = tarfile.TarFile.OPEN_METH.copy() OPEN_METH["xz"] = "xzopen" @classmethod def xzopen(cls, name, mode="r", fileobj=None, **kwargs): """Open gzip compressed tar archive name for reading or writing. Appending is not allowed. """ if len(mode) > 1 or mode not in "rw": raise ValueError("mode must be 'r' or 'w'") if fileobj is not None: fileobj = _LMZAProxy(fileobj, mode) else: fileobj = lzma.LZMAFile(name, mode) try: # lzma doesn't immediately return an error # try and read a bit of data to determine if it is a valid xz file fileobj.read(_LZMAProxy.blocksize) fileobj.seek(0) t = cls.taropen(name, mode, fileobj, **kwargs) except IOError: raise tarfile.ReadError("not a xz file") except lzma.error: raise tarfile.ReadError("not a xz file") t._extfileobj = False return t if not hasattr(tarfile.TarFile, 'xzopen'): tarfile.open = XzTarFile.open def is_valid_hash(path, algo, hexdigest): if algo not in hashlib.algorithms: raise ValueError("Unknown hash algorithm: %s" % algo) local_hash = getattr(hashlib, algo)() with open(path, 'rb') as fp: data = fp.read(32768) while data: local_hash.update(data) data = fp.read(32768) return local_hash.hexdigest() == hexdigest class SpecFile(object): re_update_version = re.compile(r'^(?P
Version:\s*)(?P.+)(?P\s*)$', re.MULTILINE + re.IGNORECASE)
    re_update_release = re.compile(r'^(?P
Release:\s*)(?P%mkrel \d+)(?P\s*)$', re.MULTILINE + re.IGNORECASE)

    def __init__(self, path):
        self.path = path
        self.cwd = os.path.dirname(path)

    @property
    def version(self):
        return subprocess.check_output(["rpm", "--specfile", self.path, "--queryformat", "%{VERSION}\n"]).splitlines()[0]
    @property
    def sources(self):
        ts = rpm.ts()
        spec = ts.parseSpec(self.path)
        srclist = spec.sources if isinstance(spec.sources, (list, tuple)) \
                        else spec.sources()
        return dict((os.path.basename(name), name) for name, no, flags in srclist)

    def update(self, version, force=False):
        """Update specfile (increase version)"""
        cur_version = self.version

        (judgement, msg) = judge_version_increase(cur_version, version)

        if judgement < 0:
            print >>sys.stderr, "ERROR: %s!" % (msg)
            return False

        if judgement < 5:
            print "WARNING: %s!" % (msg)
            if not force: return False

        # XXX - os.path.join is hackish
        svn_diff_output = subprocess.check_output(["svn", "diff", os.path.join(self.path, '..')])
        if svn_diff_output != '':
            print svn_diff_output
            print >>sys.stderr, "ERROR: Package has uncommitted changes!"
            if not force:
                return False

            # Forcing package submission: revert changes
            try:
                print >>sys.stderr, "WARNING: Force used; reverting svn changes"
                subprocess.check_call(["svn", "revert", "-R", os.path.join(self.path, '..')])
            except subprocess.CalledProcessError:
                return False

        with open(self.path, "rw") as f:
            data = f.read()

            if data.count("%mkrel") != 1:
                print >>sys.stderr, "ERROR: Multiple %mkrel found; don't know what to do!"
                return False

            data, nr = self.re_update_version.subn(r'\g
%s\g' % version, data, 1)
            if nr != 1:
                print >>sys.stderr, "ERROR: Could not increase version!"
                return False

            data, nr = self.re_update_release.subn(r'\g
%mkrel 1\g', data, 1)
            if nr != 1:
                print >>sys.stderr, "ERROR: Could not reset release!"
                return False

            # Overwrite file with new version number
            write_file(self.path, data)


        # Verify that RPM also agrees that version number has changed
        if self.version != version:
            print "ERROR: Increased version to %s, but RPM doesn't agree!?!" % version
            return False


        # Try to download the new tarball various times and wait between attempts
        tries = 0
        while tries < SLEEP_TIMES:
            tries += 1
            if tries > 1: time.sleep(SLEEP_REPEAT)
            try:
                # Download new tarball
                subprocess.check_call(['mgarepo', 'sync', '-d'], cwd=self.cwd)
                # success, so exit loop
                break
            except subprocess.CalledProcessError, e:
                # mgarepo sync returns 1 if the tarball cannot be downloaded
                if e.returncode != 1:
                    return False
        else:
            return False


        try:
            # Check patches still apply
            subprocess.check_call(['bm', '-p', '--nodeps'], cwd=self.cwd)
        except subprocess.CalledProcessError:
            # XXX tail -n 15 SPECS/log.$PACKAGE
            return False

        return True

class Patch(object):
    """Do things with patches"""

    re_dep3 = re.compile(r'^(?:#\s*)?(?P
[-A-Za-z0-9]+?):\s*(?P.*)$') re_dep3_cont = re.compile(r'^#?\s+(?P.*)$') def __init__(self, path, show_path=False): """Path: path to patch (might not exist)""" self.path = path self.show_path = show_path def __str__(self): return self.path if self.show_path else os.path.basename(self.path) def add_dep3(self): """Add DEP-3 headers to a patch file""" if self.dep3['valid']: return False new_headers = ( ('Author', self.svn_author), ('Subject', ''), ('Applied-Upstream', ''), ('Forwarded', ''), ('Bug', ''), ) with tempfile.NamedTemporaryFile(dir=os.path.dirname(self.path), delete=False) as fdst: with open(self.path, "r") as fsrc: # Start with any existing DEP3 headers for i in range(self.dep3['last_nr']): fdst.write(fsrc.read()) # After that add the DEP3 headers add_line = False for header, data in new_headers: if header in self.dep3['headers']: continue # XXX - wrap this at 80 chars add_line = True print >>fdst, "%s: %s" % (header, "" if data is None else data) if add_line: print >>fdst, "" # Now copy any other data and the patch shutil.copyfileobj(fsrc, fdst) fdst.flush() os.rename(fdst.name, self.path) call_editor(self.path) #Author: fwang #Subject: Build fix: Fix glib header inclusion #Applied-Upstream: commit:30602 #Forwarded: yes #Bug: http://bugzilla.abisource.com/show_bug.cgi?id=13247 def _read_dep3(self): """Read DEP-3 headers from an existing patch file This will also parse git headers""" dep3 = {} headers = {} last_header = None last_nr = 0 nr = 0 try: with open(self.path, "r") as f: for line in line_input(f): nr += 1 # stop trying to parse when real patch begins if line == '---': break r = self.re_dep3.match(line) if r: info = r.groupdict() # Avoid matching URLS if info['data'].startswith('//') and info['header'].lower () == info['header']: continue headers[info['header']] = info['data'] last_header = info['header'] last_nr = nr continue r = self.re_dep3_cont.match(line) if r: info = r.groupdict() if last_header: headers[last_header] = " ".join((headers[last_header], info['data'])) last_nr = nr continue last_header = None except IOError: pass dep3['valid'] = \ (('Description' in headers and headers['Description'].strip() != '') or ('Subject' in headers and headers['Subject'].strip() != '')) \ and (('Origin' in headers and headers['Origin'].strip() != '') \ or ('Author' in headers and headers['Author'].strip() != '') \ or ('From' in headers and headers['From'].strip() != '')) dep3['last_nr'] = last_nr dep3['headers'] = headers self._dep3 = dep3 @property def dep3(self): if not hasattr(self, '_dep3'): self._read_dep3() return self._dep3 @property def svn_author(self): if not hasattr(self, '_svn_author'): try: contents = subprocess.check_output(['svn', 'log', '-q', "--", self.path], close_fds=True).strip("\n").splitlines() for line in contents: if ' | ' not in line: continue fields = line.split(' | ') if len(fields) >= 3: self._svn_author = fields[1] except subprocess.CalledProcessError: pass if not hasattr(self, '_svn_author'): return None return self._svn_author def get_upstream_names(): urlopen = urllib2.build_opener() good_dir = re.compile('^[-A-Za-z0-9_+.]+/$') # Get the files usock = urlopen.open(URL) parser = urllister() parser.feed(usock.read()) usock.close() parser.close() files = parser.urls tarballs = set([filename.replace('/', '') for filename in files if good_dir.search(filename)]) return tarballs def get_downstream_names(): re_file = re.compile(r'^(?P.*?)[_-](?:(?P([0-9]+[\.])*[0-9]+)-)?(?P([0-9]+[\.\-])*[0-9]+)\.(?P(?:tar\.|diff\.)?[a-z][a-z0-9]*)$') contents = subprocess.check_output(['urpmf', '--qf', '%name|%version|%files', '.', "--media", MEDIA], close_fds=True).strip("\n").splitlines() FILES = {} TARBALLS = {} for line in contents: try: srpm, version, filename = line.split("|") except ValueError: print >>sys.stderr, line continue if '.tar' in filename: r = re_file.match(filename) if r: fileinfo = r.groupdict() module = fileinfo['module'] if module not in TARBALLS: TARBALLS[module] = {} TARBALLS[module][srpm] = version if srpm not in FILES: FILES[srpm] = set() FILES[srpm].add(filename) return TARBALLS, FILES def get_downstream_from_upstream(upstream, version): # Determine the package name downstream, downstream_files = get_downstream_names() if upstream not in downstream: raise ValueError("No packages for upstream name: %s" % upstream) if len(downstream[upstream]) == 1: return downstream[upstream].keys() # Directories packages are located in root = os.path.expanduser(PKGROOT) packages = {} for package in downstream[upstream].keys(): cwd = os.path.join(root, package) # Checkout package to ensure the checkout reflects the latest changes try: subprocess.check_call(['mgarepo', 'co', package], cwd=root) except subprocess.CalledProcessError: raise ValueError("Multiple packages found and cannot checkout %s" % package) # Determine version from spec file try: packages[package] = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)).version except subprocess.CalledProcessError: raise ValueError("Multiple packages found and cannot determine version of %s" % package) # Return all packages reflecting the current version matches = [package for package in packages if packages[package] == version] if len(matches): return matches # Return all packages reflecting the version before the current version latest_version = get_latest_version(packages.values(), max_version=version) matches = [package for package in packages if packages[package] == latest_version] if len(matches): return matches # Give up raise ValueError("Multiple packages found and cannot determine package for version %s" % version) def write_file(path, data): with tempfile.NamedTemporaryFile(dir=os.path.dirname(path), delete=False) as fdst: fdst.write(data) fdst.flush() os.rename(fdst.name, path) def cmd_co(options, parser): root = os.path.expanduser(PKGROOT) for package, module, package_version, spec_version, downstream_files in sorted(join_streams()): print "%s => %s" % (module, package) subprocess.call(['mgarepo', 'co', package], cwd=root) def join_streams(show_version=False, only_diff_version=False): root = os.path.expanduser(PKGROOT) upstream = get_upstream_names() downstream, downstream_files = get_downstream_names() matches = upstream & set(downstream.keys()) for module in matches: for package in downstream[module].keys(): package_version = downstream[module][package] spec_version = None if show_version or only_diff_version: cwd = os.path.join(root, package) try: spec_version = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)).version except subprocess.CalledProcessError: spec_version = 'N/A' if only_diff_version and package_version == spec_version: continue yield (package, module, package_version, spec_version, downstream_files[package]) def cmd_ls(options, parser): for package, module, package_version, spec_version, downstream_files in sorted(join_streams(show_version=options.show_version, only_diff_version=options.diff)): print package,"\t", if options.upstream: print module, "\t", if options.show_version: print spec_version, "\t", package_version, "\t", print def cmd_patches(options, parser): root = os.path.expanduser(PKGROOT) for package, module, package_version, spec_version, downstream_files in sorted(join_streams()): for filename in downstream_files: if '.patch' in filename or '.diff' in filename: p = Patch(os.path.join(root, package, "SOURCES", filename), show_path=options.path) valid = "" forwarded = "" if p.dep3['headers']: forwarded = p.dep3['headers'].get('Forwarded', "no") if p.dep3['valid']: valid="VALID" print "\t".join((module, package, str(p), forwarded, valid)) def cmd_dep3(options, parser): p = Patch(options.patch) p.add_dep3() def cmd_package_new_version(options, parser): # Determine the package name if options.upstream: try: package = get_downstream_from_upstream(options.package, options.version)[0] except ValueError, e: print >>sys.stderr, "ERROR: %s" % e sys.exit(1) else: package = options.package # Directories packages are located in root = os.path.expanduser(PKGROOT) cwd = os.path.join(root, package) # Checkout package to ensure the checkout reflects the latest changes try: subprocess.check_call(['mgarepo', 'co', package], cwd=root) except subprocess.CalledProcessError: sys.exit(1) # SpecFile class handles the actual version+release change s = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package)) print "%s => %s" % (s.version, options.version) if not s.update(options.version, force=options.force): sys.exit(1) # Check hash, if given if options.hexdigest is not None: sources = [name for name, origname in s.sources.iteritems() if '://' in origname] if not len(sources): print >>sys.stderr, "ERROR: Cannot determine source file (for hash check)!" sys.stderr(1) for filename in sources: if not is_valid_hash(os.path.join(cwd, "SOURCES", filename), options.algo, options.hexdigest): print >>sys.stderr, "ERROR: Hash file failed check for %s!" % path print >>sys.stderr, "ERROR: Reverting changes!" subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd) sys.exit(1) # We can even checkin and submit :-) if options.submit: try: # checkin changes subprocess.check_call(['mgarepo', 'ci', '-m', 'new version %s' % options.version], cwd=cwd) # and submit subprocess.check_call(['mgarepo', 'submit'], cwd=cwd) except subprocess.CalledProcessError: sys.exit(1) def cmd_parse_ftp_release_list(options, parser): def _send_reply_mail(contents, orig_msg, to, error=False): """Send an reply email""" contents.seek(0) msg = MIMEText(contents.read(), _charset='utf-8') if error: # XXX - ugly contents.seek(0) lastline = contents.read().splitlines()[-1] # Remove things like "ERROR: " and so on from the last line lastline = re.sub(r'^(?:[^ :]+:\s+)+', '', lastline) subjecterror = " (ERROR: %s)" % lastline if lastline else " (ERROR)" else: subjecterror = "" msg['Subject'] = "Re: %s%s" % (orig_msg['Subject'], subjecterror) msg['To'] = to msg["In-Reply-To"] = orig_msg["Message-ID"] msg["References"] = orig_msg["Message-ID"] # Call sendmail program directly so it doesn't matter if the service is running cmd = ['/usr/sbin/sendmail', '-oi', '--'] cmd.extend([to]) p = subprocess.Popen(cmd, stdin=subprocess.PIPE) p.stdin.write(msg.as_string()) p.stdin.flush() p.stdin.close() p.wait() msg = email.email.message_from_file(sys.stdin) if options.mail: stdout = tempfile.TemporaryFile() stderr = stdout else: stdout = sys.stdout stderr = sys.stderr try: module = msg['X-Module-Name'] version = msg['X-Module-Version'] hexdigest = msg['X-Module-SHA256-tar.xz'] except KeyError, e: print >>stderr, "ERROR: %s" % e if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True) sys.exit(1) try: packages = get_downstream_from_upstream(module, version) except ValueError, e: print >>stderr, "ERROR: %s" % e if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True) sys.exit(1) if options.wait: # maildrop aborts and will try to deliver after 5min # fork to avoid this if os.fork() != 0: sys.exit(0) # wait SLEEP_INITIAL after the message was sent secs = SLEEP_INITIAL t = email.utils.parsedate_tz(msg['Date']) if t is not None: msg_time = email.utils.mktime_tz(t) secs = SLEEP_INITIAL - (time.time() - msg_time) if secs > 0: time.sleep(secs) error = False for package in packages: if subprocess.call(['mga-gnome', 'increase', '--submit', '--hash', hexdigest, package, version], stdout=stdout, stderr=stderr): error = True if options.mail: _send_reply_mail(stdout, msg, options.mail, error=error) def main(): description = """Mageia GNOME commands.""" epilog="""Report bugs to Olav Vitters""" parser = argparse.ArgumentParser(description=description,epilog=epilog) # SUBPARSERS subparsers = parser.add_subparsers(title='subcommands') # install subparser = subparsers.add_parser('co', help='checkout all GNOME modules') subparser.set_defaults( func=cmd_co ) subparser = subparsers.add_parser('packages', help='list all GNOME packages') subparser.add_argument("-m", "--m", action="store_true", dest="upstream", help="Show upstream module") subparser.add_argument( "--version", action="store_true", dest="show_version", help="Show version numbers") subparser.add_argument( "--diff", action="store_true", dest="diff", help="Only show packages with different version") subparser.set_defaults( func=cmd_ls, upstream=False, show_version=False, diff=False ) subparser = subparsers.add_parser('patches', help='list all GNOME patches') subparser.add_argument("-p", "--path", action="store_true", dest="path", help="Show full path to patch") subparser.set_defaults( func=cmd_patches, path=False ) subparser = subparsers.add_parser('dep3', help='Add dep3 headers') subparser.add_argument("patch", help="Patch") subparser.set_defaults( func=cmd_dep3, path=False ) subparser = subparsers.add_parser('increase', help='Increase version number') subparser.add_argument("package", help="Package name") subparser.add_argument("version", help="Version number") subparser.add_argument("-f", "--force", action="store_true", dest="force", help="Override warnings, just do it") subparser.add_argument("-u", "--upstream", action="store_true", dest="upstream", help="Package name reflects the upstream name") subparser.add_argument("-s", "--submit", action="store_true", dest="submit", help="Commit changes and submit") subparser.add_argument("-a", "--algorithm", choices=hashlib.algorithms, dest="algo", help="Hash algorithm") subparser.add_argument("--hash", dest="hexdigest", help="Hexdigest of the hash") subparser.set_defaults( func=cmd_package_new_version, submit=False, upstream=False, hexdigest=None, algo="sha256", force=False ) subparser = subparsers.add_parser('gnome-release-email', help='Submit packages based on GNOME ftp-release-list email') subparser.add_argument("-m", "--mail", help="Email address to send the progress to") subparser.add_argument("-w", "--wait", action="store_true", help="Wait before trying to retrieve the new version") subparser.set_defaults( func=cmd_parse_ftp_release_list ) if len(sys.argv) == 1: parser.print_help() sys.exit(2) options = parser.parse_args() try: options.func(options, parser) except KeyboardInterrupt: print('Interrupted') sys.exit(1) except EOFError: print('EOF') sys.exit(1) except IOError, e: if e.errno != errno.EPIPE: raise sys.exit(0) if __name__ == "__main__": main()