#!/usr/bin/python3 -u # A lot of the code comes from ftpadmin, see # https://git.gnome.org/browse/sysadmin-bin/tree/ftpadmin # Written by Olav Vitters # basic modules: import os import os.path import sys import re import subprocess # command line parsing, error handling: import argparse import errno # overwriting files by moving them (safer): import tempfile import shutil # version comparison: import rpm # getting links from HTML document: from html.parser import HTMLParser import urllib.request, urllib.error, urllib.parse import urllib.parse # for checking hashes import hashlib # for parsing ftp-release-list emails import email from email.mime.text import MIMEText # to be able to sleep for a while import time # version freeze import datetime # packages --sort import itertools # automatically dropping merged patches import shlex # check-latest import requests import concurrent.futures # for merging comments in order import collections SLEEP_INITIAL=180 SLEEP_REPEAT=30 SLEEP_TIMES=30 re_majmin = re.compile(r'^([0-9]+\.[0-9]+).*') re_version = re.compile(r'([-.]|\d+|[^-.\d]+)') from functools import wraps def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None): """Retry calling the decorated function using an exponential backoff. http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/ original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry :param ExceptionToCheck: the exception to check. may be a tuple of exceptions to check :type ExceptionToCheck: Exception or tuple :param tries: number of times to try (not retry) before giving up :type tries: int :param delay: initial delay between retries in seconds :type delay: int :param backoff: backoff multiplier e.g. value of 2 will double the delay each retry :type backoff: int :param logger: logger to use. If None, print :type logger: logging.Logger instance """ def deco_retry(f): @wraps(f) def f_retry(*args, **kwargs): mtries, mdelay = tries, delay while mtries > 1: try: return f(*args, **kwargs) except ExceptionToCheck as e: msg = "%s, Retrying in %d seconds..." % (str(e), mdelay) if logger: logger.warning(msg) else: print(msg) time.sleep(mdelay) mtries -= 1 mdelay *= backoff return f(*args, **kwargs) return f_retry # true decorator return deco_retry def version_cmp(a, b): """Compares two versions Returns -1 if a < b 0 if a == b 1 if a > b """ return rpm.labelCompare(('1', a, '1'), ('1', b, '1')) def get_latest_version(versions, max_version=None): """Gets the latest version number if max_version is specified, gets the latest version number before max_version""" latest = None for version in versions: if ( latest is None or version_cmp(version, latest) > 0 ) \ and ( max_version is None or version_cmp(version, max_version) < 0 ): latest = version return latest MAJOR_VERSIONS = { # NAMES MUST BE IN LOWERCASE! 'networkmanager': set(('0.9',)), 'networkmanager-applet': set(('0.9',)), 'networkmanager-openconnect': set(('0.9',)), 'networkmanager-openvpn': set(('0.9',)), 'networkmanager-pptp': set(('0.9',)), 'networkmanager-vpnc': set(('0.9',)) } def get_majmin(version, module=None): nrs = version.split('.') if module and module.lower() in MAJOR_VERSIONS: module_versions = [version.split(".") for version in MAJOR_VERSIONS[module.lower()]] nrstest = nrs[:] while len(nrstest) >= 2: if nrstest in module_versions: return (".".join(nrs[:len(nrstest)]), nrs[len(nrstest)]) nrstest.pop() return (nrs[0], nrs[1]) def get_safe_max_version(version, module=None): m = re_majmin.match(version) if version is None or not m: return None majmin = get_majmin(m.group(1), module) min_nr = int(majmin[1]) if min_nr % 2 == 0: return "%s.%d" % (majmin[0], min_nr + 1) else: return "%s.%d" % (majmin[0], min_nr + 2) def judge_version_increase(version_old, version_new, module=None): """Judge quality of version increase: Returns a tuple containing judgement and message Judgement: Less than 0: Error 0 to 4: Better not 5+: Ok""" versions = (version_old, version_new) # First do a basic version comparison to ensure version_new is actually newer compare = version_cmp(version_new, version_old) if compare == 0: # 1.0.0 -> 1.0.1 return (-2, "Already at version %s!" % (version_old)) if compare != 1: # 1.0.1 -> 1.0.0 return (-3, "Version %s is older than current version %s!" % (version_new, version_old)) # Version is newer, but we don't want to see if it follows the GNOME versioning scheme majmins = [get_majmin(ver, module) for ver in versions if re_majmin.match(ver) is not None] if len(majmins) == 1: return (-1, "Version number scheme changes: %s" % (", ".join(versions))) if len(majmins) == 0: return (0, "Unsupported version numbers: %s" % (", ".join(versions))) # Follows GNOME versioning scheme # Meaning: x.y.z # x = major # y = minor : even if stable # z = micro # Major+minor the same? Then go ahead and upgrade! if majmins[0] == majmins[1]: # Majmin of both versions are the same, looks good! # 1.1.x -> 1.1.x or 1.0.x -> 1.0.x return (10, None) # Check/ensure major version number is the same if majmins[0][0] != majmins[1][0]: # 1.0.x -> 2.0.x return (1, "Major version number increase") # Minor indicates stable/unstable devstate = (int(majmins[0][1]) % 2 == 0, int(majmins[1][1]) % 2 == 0) # Upgrading to unstable is weird if not devstate[1]: if devstate[0]: # 1.2.x -> 1.3.x return (1, "Stable to unstable increase") # 1.3.x -> 1.5.x return (3, "Unstable to unstable version increase") # Unstable => stable is always ok if not devstate[0]: # 1.1.x -> 1.2.x return (5, "Unstable to stable") # Can only be increase of minors from one stable to the next # 1.0.x -> 1.2.x return (4, "Stable version increase") def line_input (file): for line in file: if line[-1] == '\n': yield line[:-1] else: yield line def call_editor(filename): """Return a sequence of possible editor binaries for the current platform""" editors = [] for varname in 'VISUAL', 'EDITOR': if varname in os.environ: editors.append(os.environ[varname]) editors.extend(('/usr/bin/editor', 'vi', 'pico', 'nano', 'joe')) for editor in editors: try: ret = subprocess.call([editor, filename]) except OSError as e: if e.errno == 2: continue raise if ret == 127: continue return True class urllister(HTMLParser): def reset(self): HTMLParser.reset(self) self.urls = [] def handle_starttag(self, tag, attrs): if tag == 'a': href = [v for k, v in attrs if k=='href'] if href: self.urls.extend(href) def is_valid_hash(path, algo, hexdigest): if algo not in hashlib.algorithms_available: raise ValueError("Unknown hash algorithm: %s" % algo) local_hash = getattr(hashlib, algo)() with open(path, 'rb') as fp: data = fp.read(32768) while data: local_hash.update(data) data = fp.read(32768) return local_hash.hexdigest() == hexdigest re_clean_1 = re.compile(r'\[[^]()]+\]$') def clean_pkgconfig_prov(prov): prov = re_clean_1.sub('', prov) return prov class SpecFile(object): re_update_version = re.compile(r'^(?P
Version[ \t]*:\s*)(?P.+)(?P\s*)$', re.MULTILINE + re.IGNORECASE)
    re_update_release = re.compile(r'^(?P
Release[ \t]*:\s*)(?P%mkrel [0-9.]+)(?P\s*)$', re.MULTILINE + re.IGNORECASE)
    re_update_patch = re.compile(r'^(?P
Patch0*?)(?P[0-9]*)(?P[ \t]*:\s*)(?P.+)(?P\s*)\n', re.MULTILINE + re.IGNORECASE)

    re_br_part = re.compile(r'(?P
[^\s%{},<>=][^\s%{},<>=]*)\b(?P\s*(?:(?P=|>=|<=|=<|=>|>|<)\s*(?P[^\s%{},]+|\%\{[^\s{%}]+\}|\%[^\s%{},]+)\b)?)') #re_update_br = re.compile(r'^(?P
BuildRequires:\s*)(?P
[^\s%{},]+?)(?P\s*(?:(?:[<>]=?|=)\s+[^\s%{},]+?)?\s*\n)', re.MULTILINE + re.IGNORECASE) re_update_br = re.compile(r'^(?P
BuildRequires[ \t]*:\s*)(?P
[^\s%{},]+?)(?P\s*(?:(?:[<>]=?|=)\s+(?:[^\s%{},]+|\%\{[^\s{%}]+\}|\%[^\s%{},]+))?\s*\n)', re.MULTILINE + re.IGNORECASE) # re_update_br_unsplit = re.compile(r'^(?P
BuildRequires:\s*)(?P[^\n,]+,[^\n]*)(?P\s*\n)', re.MULTILINE + re.IGNORECASE)

    re_update_br_fix_operator = re.compile('^(?P
BuildRequires[ \t]*:\s*[^\n]*)(?P=<|=>)(?P[^\n]+)\n', re.MULTILINE + re.IGNORECASE)
    re_update_br_unsplit = re.compile(r'^(?P
BuildRequires[ \t]*:\s*)(?P(?:%s,?(?:[ \t\f\v]+|$)){2,})(?P\n)' % (re_br_part.pattern,), re.MULTILINE + re.IGNORECASE)

    def __init__(self, path, module=None):
        self.path = path
        self.cwd = os.path.dirname(path)
        self.module = module
        self._changes = collections.OrderedDict()

    @property
    def changes(self):
        return ''.join(self._changes.keys()) if len(self._changes) == 1 else "\n".join(("- %s" % change for change in self._changes.keys()))

    @property
    def version(self):
        return subprocess.check_output(["rpm", "--define", "_topdir %s" % os.path.join(self.cwd, ".."), "--specfile", self.path, "--queryformat", "%{VERSION}\n"]).decode("utf-8").splitlines()[0]

    @property
    def release(self):
        return subprocess.check_output(["rpm", "--define", "%dist %nil", "--define", "_topdir %s" % os.path.join(self.cwd, ".."), "--specfile", self.path, "--queryformat", "%{RELEASE}\n"]).decode("utf-8").splitlines()[0]

    def _sources_and_patches(self, flag=None):
        os.chdir(self.cwd)
        rpm.delMacro("_topdir" )
        rpm.addMacro("_topdir", os.path.join(self.cwd, '..'))
        ts = rpm.ts()
        spec = ts.parseSpec(self.path)
        srclist = spec.sources if isinstance(spec.sources, (list, tuple)) \
                        else spec.sources()
        return dict((os.path.basename(name), [name, 0 if no == 2147483647 and flags == 2 else no]) for name, no, flags in srclist if flag is None or flags == flag)

    @property
    def patches(self):
        return self._sources_and_patches(flag=2)

    @property
    def sources(self):
        return self._sources_and_patches(flag=1)

    def clean_spec(self):
        re_rm_buildroot = r'^(?:\s*\[[^\n\]\[]+\][ \t]+\&\&[ \t]+)?(?:rm|\%__rm|\%\{__rm\}) *(?:-rf|-fr|-r) *"?(?:[%$]buildroot|[%$]\{buildroot\}|[%$]\{buildroot\}|\$RPM_BUILDROOT|\$RPM_BUILD_ROOT|\$\{RPM_BUILD_ROOT\}|\$RPM_BUILD_DIR)"?/?[ \t]*\n'
        re_clean_spec = [
            # remove %defattr
            ('remove defattr', None, re.compile(r'(?P^\%files(?:[ \t]+[^\n]*)?\n(?:^\%doc [^\n]+\n)?)^\%defattr\s*\(- *, *root *, *root *(?:, *-)?\)\s*\n', re.MULTILINE + re.IGNORECASE)),
            ('remove cleaning buildroot in install', None, re.compile(r'(?P^\%install(?:[ \t]+[^\n]*)?\n)' + re_rm_buildroot + r'\n?', re.MULTILINE + re.IGNORECASE)),
            ('remove clean section', None, re.compile(r'^\%clean[ \t]*\n(?:' + re_rm_buildroot + r')?\s*(?P(?:^#[^%\n]+\n)*^(?:\%files|\%post|\%pre|\%trigger|\%install|\%package|\%check|\%_font_pkg|$(?!.|\n)))', re.MULTILINE + re.IGNORECASE)),
            ('remove buildroot definition', None, re.compile(r'^BuildRoot[ \t]*:[^\n]+\n', re.MULTILINE + re.IGNORECASE)),
            ('remove unneeded setup option', None, re.compile(r'^(?P\%setup -q)(?: -n|n) (?:\%name|\%\{name\})-(?:\%version|\%\{version\})(?P\n)', re.MULTILINE + re.IGNORECASE)),
            ('https for download.gnome.org', r'\ghttps://\g', re.compile(r'^(?PSource[0-9]*[ \t]*:[^\n]+)http://(?Pdownload.gnome.org/[^\n]+\n)', re.MULTILINE + re.IGNORECASE)),
            ('download.gnome.org instead of ftp.gnome.org', r'\ghttps://download.gnome.org\g', re.compile(r'^(?PSource[0-9]*[ \t]*:[^\n]+)(?:ftp|http|https)://ftp.gnome.org/pub/GNOME(?P/[^\n]+\n)', re.MULTILINE + re.IGNORECASE)),
            ('restrict what libraries are matched with major numbers', r'\g{,.*}', re.compile(r'^(?P%{_libdir}[^\n]+})\*$', re.MULTILINE)),
            ('keep library matching using two lines', r'\g\n\g.*', re.compile(r'^(?P%{_libdir}[^\n]+})$\n(?P=keeppre)\{,\.\*\}$', re.MULTILINE)),
            ('make use of autopatch', r'%autopatch -p1', re.compile(r'^%apply_patches$', re.MULTILINE)),
            ('change find_lang --with-help into --with-gnome', '\g --with-gnome\g', re.compile(r'^(?P\s*\%find_lang[^\\\n]+) --with-help(?P[^\\\n]*\n)', re.MULTILINE + re.IGNORECASE)),
            ('change find_lang remove duplicate with_gnome', None, re.compile(r'^(?P\%find_lang[^\\\n]+ --with-gnome) --with-gnome(?P[^\\\n]*\n)', re.MULTILINE + re.IGNORECASE)),
            # Use new Python macros
            ('use new Python macros', r'%py2_build', re.compile(r'^%{__python} setup.py build$', re.MULTILINE)),
            ('use new Python macros', r'%py3_build', re.compile(r'^%{__python3} setup.py build$', re.MULTILINE)),

            ('use new Python macros', r'%py2_install', re.compile(r'^%{__python} setup.py install$', re.MULTILINE)),
            ('use new Python macros', r'%py3_install', re.compile(r'^%{__python3} setup.py install$', re.MULTILINE)),

            ('use new Python macros', r'%{python2_sitelib}', re.compile(r'^%{python_sitelib}', re.MULTILINE)),
            ('use new Python macros', r'%{python2_sitearch}', re.compile(r'^%{python_sitearch}', re.MULTILINE)),

            ('use new Python macros', r'%{python2_version}', re.compile(r'%{pyver}', re.MULTILINE)),
            ('use new Python macros', r'%{python3_version}', re.compile(r'%{py3ver}', re.MULTILINE)),
        ]
        re_convert_br = [
            ('remove py_requires', ('python',), re.compile(r'^\%(?:py_requires|\{py_requires\})[ \t]*\n', re.MULTILINE)),
            ('remove py_requires -d', ('python', 'python-devel'), re.compile(r'^\%(?:py_requires[ \t]+-d|\{py_requires[ \t]+-d\})[ \t]*\n', re.MULTILINE)),
        ]

        made_changes = False
        with open(self.path, "r", encoding="utf-8") as f:
            data = f.read()
            for reason, change_to, regexp in re_clean_spec:
                if change_to is None:
                    change_to = ""
                    if 'keeppre' in regexp.groupindex: change_to += r'\g'
                    if 'keeppost' in regexp.groupindex: change_to += r'\g'
                data, nr = regexp.subn(change_to, data)
                if nr:
                    made_changes = True
                    self._changes['SILENT %s' % reason] = True

            # Convert %py_requires and %py_requires -d
            #  - first figure out how a buildrequire is usually defined
            mo = self.re_update_br.search(data)
            br_pre = mo.group('pre') if mo and mo.group('pre') else "BuildRequires:\t"

            for reason, new_brs, regexp in re_convert_br:
                mo = regexp.search(data)
                if mo:
                    # Don't add multiple buildrequires
                    change_to = ""
                    brs_in_file = set()
                    for mo2 in self.re_update_br.finditer(data):
                        if mo2.group('br') in new_brs:
                            brs_in_file.add(mo2.group('br'))

                    for br in set(new_brs) - brs_in_file:
                        change_to += ''.join((br_pre, br, "\n"))
                    data, nr = regexp.subn(change_to, data)
                    if nr:
                        made_changes = True
                        self._changes['SILENT %s' % reason] = True

            # Convert:
            #   %define name SOMETHING
            #   name: %{name}
            # Into
            #   name: SOMETHING
            converted_defines = []
            for search_for in ('name', 'version', 'release', 'summary', 'Summary', 'group'):
                search_for_ignore_case = ''.join(("[%s%s]" % (letter, letter.swapcase()) for letter in search_for))
                re_spec = re.compile(r'^(?P' + re.escape(search_for_ignore_case) + r'[ \t]*:[ \t]*)(?:\%' + re.escape(search_for) + r'|\%\{' + re.escape(search_for) + r'\})(?P[ \t]*\n)', re.MULTILINE)
                re_variable = re.compile(r'^(?P\%define[ \t]+'+ re.escape(search_for) + r'[ \t]+(?P[^\n]+?))(?P[ \t]*\n)', re.MULTILINE)

                mo = re_variable.search(data)
                if mo and mo.group('definition') and len(re_variable.findall(data)) == 1:
                    mo2 = re_spec.search(data)
                    if mo2:
                        data, nr = re_spec.subn('\g' + mo.group('definition').replace('\\', '\\\\') + '\g', data)
                        if nr:
                            made_changes=True
                            data, nr = re_variable.subn('', data)
                            converted_defines.append(search_for)

            if made_changes and converted_defines:
                data = data.lstrip()
                self._changes['SILENT remove variable definition(s) %s' % ", ".join(converted_defines)] = True

            made_changes, data = self._clean_spec_patches(f, made_changes, data)

            # Overwrite file with new version number
            if made_changes:
                write_file(self.path, data)

        return made_changes

    def _clean_spec_patches(self, f, made_changes, data):
        re_autopatch = re.compile(r'^[ \t]*\%autopatch(?:[ \t]+-p(?P[0-9]+))?$', re.MULTILINE)

        re_patch_header = re.compile('^Patch(?P[0-9]*)[ \t]*:[ \t]*(?P[^\n]+)\n', re.MULTILINE + re.IGNORECASE)
        re_patch_any = re.compile(r'^[ \t]*\%patch(?P[0-9]*)', re.MULTILINE)
        re_patch_valid = re.compile(r'^[ \t+]*\%patch(?P[0-9]*)(?:[ \t]+-p(?P[0-9]+))?(?:[ \t]+-b[ \t]+\S+)?$\n?', re.MULTILINE)
        re_prep_patches = re.compile(r'^\%setup[^\n]+$(?:' + re_patch_valid.pattern + r'|^#[^%\n]+\n|^[ \t]*(?:%{_bindir}/|%_bindir)?autoreconf[ \t][^\n]+$|\s)+\n\%build', re.MULTILINE)

        give_patchnr = lambda mo: (mo.group('nr') if len(mo.group('nr')) == 1 else mo.group('nr').lstrip('0')) if mo.group('nr') else "0"

        # Make use of %apply_patches

        if re_autopatch.search(data):
            # Ignore if using %autopatch
            return made_changes, data

        patches = self.patches
        if not patches:
            return made_changes, data

        if len(patches) > 5:
            print("NOTICE: More than 5 patches, skipping package", file=sys.stderr)
            return made_changes, data

        if self.uses_apply_patches:
            return made_changes, data

#        print "WARNING: Patches, no %%apply_patches for %s" % self.module
#        print self.module, patches
#        print re_patch_header.findall(data)
#        print re_patch_valid.findall(data)

        # XXX -- apparently this is really inefficient with e.g. apache
        mo2 = re_prep_patches.search(data)
        patch_nrs_header = set([give_patchnr(mo) for mo in re_patch_header.finditer(data)])
        patch_nrs_any = set([give_patchnr(mo) for mo in re_patch_any.finditer(data)])
        patch_nrs_valid = set([give_patchnr(mo) for mo in re_patch_valid.finditer(mo2.group(0))]) if mo2 else set()

        if not patch_nrs_header:
            # XXX -- werird, self.patches should've returned 0 already
            return made_changes, data

        if not (patch_nrs_header == patch_nrs_any == patch_nrs_valid):
            print("NOTICE: Unable to automatically convert %s patches into %%autopatch (header/patch/valid: %s, %s, %s)" % (self.module, len(patch_nrs_header), len(patch_nrs_any), len(patch_nrs_valid)), file=sys.stderr)
            return made_changes, data

        patch_flags = set([0 if mo.group('strip') is None else mo.group('strip') for mo in re_patch_valid.finditer(mo2.group(0))])

        if len(patch_flags) != 1:
            print("NOTICE: Unable to automatically convert patches into as different -p / strip levels used", file=sys.stderr)
            return made_changes, data

        # Whoot, we can convert!!
        change_to = "%%autopatch -p%s\n" % list(patch_flags)[0]
        prep, n1 = re_patch_valid.subn(change_to.replace('\\', '\\\\'), mo2.group(0), count=1)
        prep, n2 = re_patch_valid.subn('', prep)
        if len(patch_nrs_valid) !=  n1 + n2:
            print("WARNING: Couldn't replace patches?!? Likely error in program logic", file=sys.stderr)
            return made_changes, data

        # First check if patches currently apply
        if not self.check_and_update_patches(check_only=True):
            return made_changes, data

        try:
            change_to = data.replace(mo2.group(0), prep, 1)
            write_file(self.path, change_to)

            # Validate patches still apply
            if self.check_and_update_patches(check_only=True):
                pass
                data = change_to
                self._changes['SILENT use autopatch'] = True
                made_changes = True
        finally:
            if not made_changes:
                write_file(self.path, data)

        return made_changes, data

    @property
    def buildrequires(self):
        rpm.delMacro("_topdir" )
        rpm.addMacro("_topdir", os.path.join(self.cwd, '..'))
        ts = rpm.ts()
        spec = ts.parseSpec(self.path)

        requires = spec.sourceHeader[rpm.RPMTAG_REQUIRES]
        require_flags = spec.sourceHeader[rpm.RPMTAG_REQUIREFLAGS]
        require_ver = spec.sourceHeader[rpm.RPMTAG_REQUIREVERSION]

        br = {}
        for req, flag, ver in itertools.zip_longest(requires, require_flags, require_ver):
            # bitmask other than 15 means the require is (probably?) a require for a trigger or script
            if flag & 15 != flag: continue

            ver_cmp = ""
            if (flag & rpm.RPMSENSE_LESS): ver_cmp += '<'
            if (flag & rpm.RPMSENSE_GREATER): ver_cmp += '>'
            if (flag & rpm.RPMSENSE_EQUAL): ver_cmp += '='

            br[req.decode('utf-8')] = (ver_cmp, ver)
        return br

    @property
    def uses_apply_patches(self):
        return subprocess.call(['grep', '-Eq', '^%(apply_patches|autopatch|autosetup)', '--', self.path]) == 0

    def _revert_changes(self):
        """Revert uncommited changes made to spec file"""
        self._changes.clear()
        subprocess.check_call(["svn", "revert", "-R", os.path.join(self.cwd, '..')])

    def remove_patch(self, patchnr, info_patchname=None):
        """Remove a patch from the spec file"""
        # Uses string as well as numeric comparisons to determine which patch to remove
        nrs = [str(patchnr), patchnr]
        if patchnr == 0: nrs.append('')
        with open(self.path, "r", encoding="utf-8") as f:
            data = f.read()

            len_before=len(data)

            data, nr = self.re_update_patch.subn(lambda mo: '' if mo.group('nr') in nrs or (mo.group('nr').isdigit() and int(mo.group('nr')) in nrs) else mo.group(0), data)

            # XXX - pretty hacky
            if len(data) == len_before:
                print("ERROR: Could not remove patch nr %s!" % patchnr, file=sys.stderr)
                return False

            # Overwrite file with new version number
            write_file(self.path, data)

            if info_patchname:
                self._changes['dropped merged patch %s' % info_patchname] = True
            else:
                self._changes['dropped merged patch %s' % patchnr] = True

        subprocess.check_call(['mgarepo', 'sync'], cwd=self.cwd)
        return True

    def check_and_update_patches(self, check_only=False):
        """Check if patches still apply

        Remove any merged patches"""

        LOGLINES = 15

        initial_patches = self.patches
        patches = initial_patches
        uses_apply_patches = self.uses_apply_patches if patches else False

        while True:
            try:
                # Check patches still apply
                subprocess.check_call(['bm', '-p', '--nodeps'], cwd=self.cwd)
            except subprocess.CalledProcessError:
                logfile = os.path.join(self.cwd, 'log.%s' % os.path.splitext(os.path.basename(self.path))[0])

                failed_patch = None
                cmd = None
                cmd_output = []
                cmd_before = (None, None)
                # Determine the last command that failed
                if os.path.exists(logfile):
                    print(logfile)
                    with open(logfile, "r", encoding="utf-8") as f:
                        for line in line_input(f):
                            if line.startswith('+ '):
                                cmd_before = (cmd, cmd_before)
                                cmd = line[2:]
                                cmd_output = []
                            else:
                                cmd_output.append(line)

                cmd_parsed = shlex.split(cmd) if cmd else []
                cmd_before_parsed = shlex.split(cmd_before[0]) if cmd_before[0] else []

                if not check_only and uses_apply_patches and patches and cmd_parsed:
                    if os.path.basename(cmd_parsed[0]) in ('patch', 'cat'):
                        if os.path.exists(cmd_parsed[-1]):
                            failed_patch = os.path.basename(cmd_parsed[-1])
                        elif cmd_parsed[-1].startswith('-') and os.path.exists(cmd_before_parsed[-1]):
                            # for %autopatch as well as %patch
                            #+ /usr/bin/cat /home/src/pkgs/gnome-getting-started-docs/SOURCES/gs-browse-web-firefox.page.patch
                            #+ /usr/bin/patch -p1 -s
                            failed_patch = os.path.basename(cmd_before_parsed[-1])

                    # Patch is merged if there is at least one 'ignored' line and no 'FAILED' line anywhere
                    has_ignored = False
                    has_reversed = False
                    has_failed = False
                    for line in cmd_output:
                        if 'FAILED' in line or 'File to patch:' in line:
                            has_failed = True
                            break
                        elif 'ignored' in line:
                            has_ignored = True
                        elif 'saving rejects to file' in line:
                            has_failed = True
                            break
                        elif 'Reversed (or previously applied) patch detected' in line:
                            has_reversed = True

                    if has_ignored and has_reversed and not has_failed:
                        # If patch was merged, drop it from spec file and rety
                        print("INFO: Patch has been merged: %s" % failed_patch, file=sys.stdout)
                        if failed_patch in patches:
                            if self.remove_patch(patches[failed_patch][1], failed_patch):
                                # try again
                                patches = self.patches
                                continue

                        print("ERROR: Problem removing merged patch: %s" % failed_patch, file=sys.stderr)
                        return False

                if cmd and len(cmd_output) > LOGLINES:
                    print('+ %s' % cmd, file=sys.stdout)
                    print("\n".join(cmd_output), file=sys.stdout)
                elif os.path.exists(logfile):
                    subprocess.call(['tail', '-n', str(LOGLINES), logfile])

                if failed_patch:
                    print("ERROR: Problem applying patch: %s" % failed_patch, file=sys.stderr)
                elif cmd:
                    print("ERROR: Problem in %%prep phase command: %s" % cmd, file=sys.stderr)
                elif patches:
                    print("ERROR: Problem applying patches and/or %prep phase", file=sys.stderr)
                else:
                    print("ERROR: Problem in %prep phase", file=sys.stderr)
                return False
            finally:
                # bm command doesn't cleanup after itself and the option it has to do that removes spec file + sources!
                buildpath = os.path.join(self.cwd, '..', 'BUILD', '%s-%s' % (self.module, self.version))
                if os.path.exists(buildpath):
                    shutil.rmtree(buildpath, ignore_errors=True)

            return True

    def update_br(self, changes, force=False, change_description='update buildrequirements'):
        """Update buildrequirement"""

        # XXX - doesn't handle buildrequires with version numbers :-(
        made_changes = False

        with open(self.path, "r", encoding="utf-8") as f:
            data = f.read()
            data_before=data

            # Change any "," in buildrequires into multiple lines
            data, nr = self.re_update_br_unsplit.subn(lambda mo: ''.join((''.join((mo.group('pre'), mo2.group(0), mo.group('unsplitpost'))) for mo2 in self.re_br_part.finditer(mo.group('unsplit')) if mo.group(0).strip() != '')), data)
            if data_before != data:
                made_changes = True
                self._changes['SILENT one line per buildrequirement'] = True
                data_before = data

            # Change =< and => operators into <= and >=
            # XXX - pretty ugly
            data, nr = self.re_update_br_fix_operator.subn(lambda mo: mo.group(0).replace('=>', '>=').replace('=<', '<') if self.re_update_br.match(mo.group(0).replace('=>', '>=').replace('=<', '<')) else mo.group(0), data)
            if data_before != data:
                made_changes = True
                self._changes['SILENT fix operator in buildrequires'] = True
                data_before = data

            # Now update buildrequires if any
            data, nr = self.re_update_br.subn(lambda mo: ''.join((mo.group('pre'), changes[mo.group('br')], mo.group('post'))) if mo.group('br') in changes else mo.group(0), data)

            # XXX - very hacky because of multiple changes, could miss out on a change
            if data_before != data:
                made_changes = True
                self._changes['SILENT %s' % change_description] = True
            elif len(changes) != 0:
                print("ERROR: Could not update buildrequires for %s" % self.module, file=sys.stderr)
                print(changes)
                return False

            # Overwrite file with new version number
            write_file(self.path, data)

        return made_changes


    MAX_JUDGEMENT=5

    def ensure_no_local_changes(self, force=False):
        # XXX - os.path.join is hackish
        svn_diff_output = subprocess.check_output(["svn", "diff", os.path.normpath(os.path.join(self.cwd, '..'))]).decode('utf-8')
        if svn_diff_output != '':
            print(svn_diff_output)
            print("ERROR: Package has uncommitted changes!", file=sys.stderr)
            if not force:
                return False

            # Forcing package submission: revert changes
            try:
                print("WARNING: Force used; reverting svn changes", file=sys.stderr)
                self._revert_changes()
            except subprocess.CalledProcessError:
                return False

        return True

    def _check_can_update(self, force):
        data = None
        if not self.ensure_no_local_changes(force):
            return None

        with open(self.path, "r", encoding="utf-8") as f:
            data = f.read()

            if data.count("%subrel") != 0:
                print("ERROR: %subrel found; don't know what to do!", file=sys.stderr)
                return None

            if data.count("%mkrel") != 1:
                print("ERROR: Multiple %mkrel found; don't know what to do!", file=sys.stderr)
                return None

        return data

    def update_release(self, release, reason, force=False):
        """Update release (usually for rebuilds)"""
        cur_release = self.release

        data = self._check_can_update(force)
        if data is None:
            return False

        # XXX - validate current release
        data, nr = self.re_update_release.subn(r'\g
%%mkrel %s\g' % release, data, 1)
        if nr != 1:
            print(data, file=sys.stdout)
            print("ERROR: Could not increase release!", file=sys.stderr)
            return False

        # Overwrite file with new release
        write_file(self.path, data)

        self._changes['rebuild for %s' % reason] = True

        return True

    def update_version(self, version, force=False, max_judgement=MAX_JUDGEMENT):
        """Update version and reset release"""
        cur_version = self.version

        (judgement, msg) = judge_version_increase(cur_version, version, self.module)

        if judgement < 0:
            print("ERROR: %s!" % (msg), file=sys.stderr)
            return False

        if judgement < max_judgement:
            print("WARNING: %s!" % (msg))
            if not force: return False

        data = self._check_can_update(force)
        if data is None:
            return False

        data, nr = self.re_update_version.subn(r'\g
%s\g' % version, data, 1)
        if nr != 1:
            print("ERROR: Could not increase version!", file=sys.stderr)
            return False

        data, nr = self.re_update_release.subn(r'\g
%mkrel 1\g', data, 1)
        if nr != 1:
            print(data, file=sys.stdout)
            print("ERROR: Could not reset release!", file=sys.stderr)
            return False

        # Overwrite file with new version number
        write_file(self.path, data)

        self._changes['new version %s' % version] = True

        # Verify that RPM also agrees that version number has changed
        if self.version != version:
            print("ERROR: Increased version to %s, but RPM doesn't agree!?!" % version, file=sys.stderr)
            return False

        # Try to download the new tarball various times and wait between attempts
        tries = 0
        while tries < SLEEP_TIMES:
            tries += 1
            if tries > 1: time.sleep(SLEEP_REPEAT * 2 ** (tries // 5))
            try:
                # Download new tarball
                subprocess.check_call(['mgarepo', 'sync', '-d'], cwd=self.cwd)
                # success, so exit loop
                break
            except subprocess.CalledProcessError as e:
                # mgarepo sync returns 1 if the tarball cannot be downloaded
                if e.returncode != 1:
                    self._revert_changes()
                    print("ERROR: Could not download tarball", file=sys.stderr)
                    return False
        else:
            # failed to download tarball
            self._revert_changes()
            print("ERROR: Could not download tarball", file=sys.stderr)
            return False

        return self.check_and_update_patches()

class Patch(object):
    """Do things with patches"""

    re_dep3 = re.compile(r'^(?:#\s*)?(?P
[-A-Za-z0-9]+?):\s*(?P.*)$') re_dep3_cont = re.compile(r'^#?\s+(?P.*)$') def __init__(self, path, show_path=False): """Path: path to patch (might not exist)""" self.path = path self.show_path = show_path def __str__(self): return self.path if self.show_path else os.path.basename(self.path) def add_dep3(self): """Add DEP-3 headers to a patch file""" if self.dep3['valid']: return False new_headers = ( ('Author', self.svn_author), ('Subject', ''), ('Applied-Upstream', ''), ('Forwarded', ''), ('Bug', ''), ) with tempfile.NamedTemporaryFile(dir=os.path.dirname(self.path), delete=False) as fdst: with open(self.path, "r", encoding="utf-8") as fsrc: # Start with any existing DEP3 headers for i in range(self.dep3['last_nr']): fdst.write(fsrc.read()) # After that add the DEP3 headers add_line = False for header, data in new_headers: if header in self.dep3['headers']: continue # XXX - wrap this at 80 chars add_line = True print("%s: %s" % (header, "" if data is None else data), file=fdst) if add_line: print("", file=fdst) # Now copy any other data and the patch shutil.copyfileobj(fsrc, fdst) fdst.flush() os.rename(fdst.name, self.path) call_editor(self.path) #Author: fwang #Subject: Build fix: Fix glib header inclusion #Applied-Upstream: commit:30602 #Forwarded: yes #Bug: http://bugzilla.abisource.com/show_bug.cgi?id=13247 def _read_dep3(self): """Read DEP-3 headers from an existing patch file This will also parse git headers""" dep3 = {} headers = {} last_header = None last_nr = 0 nr = 0 try: with open(self.path, "r", encoding="utf-8") as f: for line in line_input(f): nr += 1 # stop trying to parse when real patch begins if line == '---': break r = self.re_dep3.match(line) if r: info = r.groupdict() # Avoid matching URLS if info['data'].startswith('//') and info['header'].lower () == info['header']: continue headers[info['header']] = info['data'] last_header = info['header'] last_nr = nr continue r = self.re_dep3_cont.match(line) if r: info = r.groupdict() if last_header: headers[last_header] = " ".join((headers[last_header], info['data'])) last_nr = nr continue last_header = None except IOError: pass dep3['valid'] = \ (('Description' in headers and headers['Description'].strip() != '') or ('Subject' in headers and headers['Subject'].strip() != '')) \ and (('Origin' in headers and headers['Origin'].strip() != '') \ or ('Author' in headers and headers['Author'].strip() != '') \ or ('From' in headers and headers['From'].strip() != '')) dep3['last_nr'] = last_nr dep3['headers'] = headers self._dep3 = dep3 @property def dep3(self): if not hasattr(self, '_dep3'): self._read_dep3() return self._dep3 @property def svn_author(self): if not hasattr(self, '_svn_author'): try: contents = subprocess.check_output(['svn', 'log', '-q', "--", self.path], close_fds=True).strip("\n").decode('utf-8').splitlines() for line in contents: if ' | ' not in line: continue fields = line.split(' | ') if len(fields) >= 3: self._svn_author = fields[1] except subprocess.CalledProcessError: pass if not hasattr(self, '_svn_author'): return None return self._svn_author class Upstream(object): URL="https://download.gnome.org/sources/" limit = None _cache_versions = {} def __init__(self): urlopen = urllib.request.build_opener() good_dir = re.compile('^[-A-Za-z0-9_+.]+/$') # Get the files usock = urlopen.open(self.URL) parser = urllister() parser.feed(usock.read().decode('utf-8')) usock.close() parser.close() files = parser.urls tarballs = set([filename.replace('/', '') for filename in files if good_dir.search(filename)]) if self.limit is not None: tarballs.intersection_update(self.limit) self.names = tarballs @classmethod def versions(cls, module): # XXX - ugly if module not in cls._cache_versions: versions = None url = '%s%s/cache.json' % (cls.URL, module) r = requests.get(url) j = r.json() if j is not None and len(j) > 2 and module in j[2]: versions = j[2][module] cls._cache_versions[module] = versions return cls._cache_versions[module] class Downstream(object): re_file = re.compile(r'^(?P.*?)[_-](?:(?P([0-9]+[\.])*[0-9]+)-)?(?P([0-9]+[\.\-])*[0-9]+)\.(?P(?:tar\.|diff\.)?[a-z][a-z0-9]*)$') MEDIA="Core Release Source" PKGROOT='~/pkgs' DISTRO=None SECTION=None def __init__(self): contents = subprocess.check_output(['urpmf', '--qf', '%name|%version|%files', '.', "--media", self.MEDIA], close_fds=True).decode("utf-8").strip("\n").splitlines() FILES = {} TARBALLS = {} PACKAGES = set() for line in contents: try: srpm, version, filename = line.split("|") except ValueError: print(line, file=sys.stderr) continue PACKAGES.add(srpm) if '.tar' in filename: r = self.re_file.match(filename) if r: fileinfo = r.groupdict() module = fileinfo['module'] if module not in TARBALLS: TARBALLS[module] = {} if srpm in TARBALLS[module]: # srpm seen before, check if version is newer if version_cmp(TARBALLS[module][srpm], version) == 1: TARBALLS[module][srpm] = version else: TARBALLS[module][srpm] = version if srpm not in FILES: FILES[srpm] = set() FILES[srpm].add(filename) self._packages = PACKAGES self.tarballs = TARBALLS self.files = FILES @property def packages(self): return sorted(self._packages) _provides_cache = {} @classmethod def alternative_provides(cls, search_for): """Give alternative provides for a certain provide Relies on urpmq. Results are cached. Inner working: $ urpmq --whatprovides $search_for --provides""" if search_for not in cls._provides_cache: cls._provides_cache[search_for] = subprocess.check_output(["urpmq", "--whatprovides", search_for, "--provides"]).decode("utf-8").splitlines() return cls._provides_cache[search_for] @classmethod @retry(subprocess.CalledProcessError) def co(cls, package, cwd=None, spec_only=False): if cwd is None: cwd = os.path.expanduser(cls.PKGROOT) cmd = ['mgarepo', 'co'] if cls.DISTRO: cmd.extend(('-d', cls.DISTRO)) if spec_only: cmd.append('-s') cmd.append(package) return subprocess.check_call(cmd, stdin=subprocess.DEVNULL, cwd=cwd) @classmethod @retry(subprocess.CalledProcessError) def ci(cls, package, changes, cwd=None): if cwd is None: cwd = os.path.expanduser(cls.PKGROOT) cmd = ['mgarepo', 'ci', '-m', changes] return subprocess.check_call(cmd, cwd=cwd) def get_downstream_from_upstream(self, upstream, version): if upstream not in self.tarballs: raise ValueError("No packages for upstream name: %s" % upstream) if len(self.tarballs[upstream]) == 1: return list(self.tarballs[upstream].keys()) # Directories packages are located in root = os.path.expanduser(self.PKGROOT) packages = {} for package in list(self.tarballs[upstream].keys()): cwd = os.path.join(root, package) # Checkout package to ensure the checkout reflects the latest changes try: self.co(package, cwd=root) except subprocess.CalledProcessError: raise ValueError("Multiple packages found and cannot checkout %s" % package) # Determine version from spec file try: packages[package] = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package), module=upstream).version except subprocess.CalledProcessError: raise ValueError("Multiple packages found and cannot determine version of %s" % package) # Return all packages reflecting the current version matches = [package for package in packages if packages[package] == version] if len(matches): return matches # Return all packages reflecting the version before the current version # - determine the newest version in any spec file latest_possible_version = get_latest_version(list(packages.values())) # - now get the latest version before the current version latest_version = get_latest_version(list(packages.values()), max_version=version) if latest_version is None: raise ValueError("Multiple packages found and all versions are newer than %s" % version) # - if the latest_possible_spec version is not the latest version, then # ensure it follows get_safe_max_version if latest_version != latest_possible_version and version_cmp(get_safe_max_version(latest_version, upstream), \ version) != 1: raise ValueError("Multiple packages found and unsafe version increase: %s (%s => %s)" % (upstream, latest_version, version)) # - now really get the right packages matches = [package for package in packages if packages[package] == latest_version] if len(matches): return matches # Give up raise ValueError("Multiple packages found and cannot determine package for version %s" % version) def write_file(path, data): with tempfile.NamedTemporaryFile(mode='w+t',dir=os.path.dirname(path), delete=False, encoding="utf-8") as fdst: fdst.write(data) fdst.flush() os.rename(fdst.name, path) def cmd_co_multi(args): package, what_to_print, options = args print(what_to_print) try: Downstream.co(package, spec_only=options.spec_only) except subprocess.CalledProcessError: pass def cmd_co(options, parser): with concurrent.futures.ProcessPoolExecutor(max_workers=8) as executor: if options.all: packages = ((package, package, options) for package in Downstream().packages) elif len(options.package): packages = ((package, package, options) for package in options.package) else: packages = ((l[0], "%s => %s" % (l[0], l[1]), options) for l in sorted(join_streams(auto_update=False))) executor.map(cmd_co_multi, packages) def join_streams(show_version=False, only_diff_version=False, auto_update=True): root = os.path.expanduser(Downstream.PKGROOT) upstream = Upstream().names downstream = Downstream() matches = upstream & set(downstream.tarballs.keys()) for module in matches: for package in list(downstream.tarballs[module].keys()): package_version = downstream.tarballs[module][package] spec_version = None cwd = os.path.join(root, package) if show_version or only_diff_version: # ensure package is checked out if not os.path.exists(cwd): try: downstream.co(package) except subprocess.CalledProcessError: # XXX - ignoring packages which cannot be checked out continue try: spec_version = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package), module=module).version except subprocess.CalledProcessError: spec_version = 'N/A' # in case upstream version is newer, update checkout if auto_update and package_version != spec_version and version_cmp(package_version, spec_version) == 1: try: downstream.co(package) spec_version = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package), module=module).version except subprocess.CalledProcessError: pass if only_diff_version and package_version == spec_version: continue yield (package, module, package_version, spec_version, downstream.files[package]) def cmd_group_owner(options, parser): groups = set(options.group) output = [pkg.split("\t") for pkg in subprocess.check_output(["urpmf", "-F|", "--qf", "%group\t%name\t%sourcerpm\t%version\t%release", "."]).decode("utf-8").splitlines()] if not output: return # Filter by groups output = [pkg for pkg in output if pkg[0] in groups] if not output: return packages = {} for group, name, sourcerpm, version, release in output: if group not in packages: packages[group] = {} source = sourcerpm if sourcerpm else name end = ".src.rpm" if source.endswith(end): source = source[:len(source) - len(end)] end = "-%s-%s" %(version, release) if source.endswith(end): source = source[:len(source) - len(end)] if source not in packages[group]: packages[group][source] = set() packages[group][source].add(name) maints = dict([line.rpartition(" ")[::2] for line in subprocess.check_output(["mgarepo", "maintdb", "get"]).decode("utf-8").splitlines()]) def get_output(source, maints, packages): for source in list(packages.keys()): maint = maints.get(source, "?") yield "\t".join((maint, source, ",".join(sorted(packages[source])))) first = True for group in list(packages.keys()): if first: first = False else: print("") print("") print(group) print("") for line in sorted(get_output(source, maints, packages[group])): print(line) def cmd_cleanup(options, parser): root = os.path.expanduser(Downstream.PKGROOT) packages = set(Downstream().packages) dirs = set((o for o in os.listdir(root) if os.path.isdir(os.path.join(root, o)))) dirs = dirs - packages dirs = (o for o in dirs if os.path.exists(os.path.join(root, o, "SPECS"))) print("\n".join(sorted(dirs))) def cmd_ls(options, parser): streams = join_streams(show_version=options.show_version, only_diff_version=options.diff) if options.sort: SORT=dict(list(zip(options.sort.read().splitlines(), itertools.count()))) streams = sorted(streams, key=lambda a: (SORT.get(a[1], 9999), a[0])) else: streams = sorted(streams) for package, module, package_version, spec_version, downstream_files in streams: sys.stdout.write(package) if options.spec: sys.stdout.write('/SPECS/%s.spec' % package) if options.upstream: sys.stdout.write("\t%s" % module) if options.show_version: sys.stdout.write("\t%s\t%s" % (spec_version, package_version)) print() def cmd_check_version(options, parser): streams = join_streams(show_version=True) for package, module, package_version, spec_version, downstream_files in streams: if package_version == spec_version: continue sys.stdout.write(package) sys.stdout.write("\t%s\t%s" % (spec_version, package_version)) sys.stdout.write("\n") def cmd_check_latest(options, parser): streams = join_streams(show_version=True) for package, module, package_version, spec_version, downstream_files in streams: upgrade=set() sys.stdout.write(package) sys.stdout.write("\t%s\t%s" % (spec_version, package_version)) safe_max_version = get_safe_max_version(spec_version, module=module) versions = Upstream.versions(module) if package_version != spec_version and spec_version != 'N/A' and package_version != 'N/A': upgrade.add('~') if versions: latest_version = get_latest_version(versions) safe_version = get_latest_version(versions, safe_max_version) cmp_latest = version_cmp(latest_version, spec_version) if cmp_latest < 0: latest_version = 'N/A' upgrade.add('l') elif cmp_latest > 0: upgrade.add('L') cmp_safe = version_cmp(safe_version, spec_version) if cmp_safe < 0: safe_version = 'N/A' upgrade.add('s') elif cmp_safe > 0: upgrade.add('S') sys.stdout.write("\t%s" % latest_version) sys.stdout.write("\t%s" % safe_version) sys.stdout.write("\t%s" % "".join(sorted(upgrade))) print() if 'S' in upgrade and options.submit and not Downstream.DISTRO: cmd = ['mgagnome', 'increase', package, safe_version] subprocess.call(cmd, cwd=os.path.expanduser(os.path.join(Downstream.PKGROOT, package))) def cmd_patches(options, parser): root = os.path.expanduser(Downstream.PKGROOT) for package, module, package_version, spec_version, downstream_files in sorted(join_streams()): for filename in downstream_files: if '.patch' in filename or '.diff' in filename: p = Patch(os.path.join(root, package, "SOURCES", filename), show_path=options.path) valid = "" forwarded = "" if p.dep3['headers']: forwarded = p.dep3['headers'].get('Forwarded', "no") if p.dep3['valid']: valid="VALID" print("\t".join((module, package, str(p), forwarded, valid))) def cmd_dep3(options, parser): p = Patch(options.patch) p.add_dep3() def cmd_check_prep(options, parser): # Directories packages are located in root = os.path.expanduser(Downstream.PKGROOT) cwd = os.path.join(root, options.package) s = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % options.package), module=options.package) s.check_and_update_patches() def cmd_clean_spec_multi(args): options, package = args # Directories packages are located in root = os.path.expanduser(Downstream.PKGROOT) print(package) cwd = os.path.join(root, package) path = os.path.join(cwd, "SPECS", "%s.spec" % package) if not os.path.exists(path): print('ERROR: Cannot find spec file for package %s' % package, file=sys.stderr) return False s = SpecFile(path, module=package) try: if not s.ensure_no_local_changes(options.force): return False except subprocess.CalledProcessError: # Package was probably not checked out or something print("ERROR: cannot clean spec file for %s" % package, file=sys.stderr) return False made_changes=False changes = {} # Convert perl- and -devel buildrequires into perl() and pkgconfig() requires if options.convert_br: br = s.buildrequires no_alt = set() no_change = {} convert_brs = { 'pkgconfig': { 'desc': 'convert -devel buildrequires into pkgconfig', 'check_br': lambda req: req.endswith('-devel'), 'check_provide': lambda prov: prov.startswith('pkgconfig('), 'basereq': lambda req: req[:-len('-devel')], 'extra': lambda basereq: [ 'pkgconfig(%s)' % basereq, 'pkgconfig(%s)' % basereq[len('lib'):] if basereq.startswith('lib') else 'pkgconfig(lib%s)' % basereq ], }, 'perl': { 'desc': 'convert perl- buildrequires into perl()', 'check_br': lambda req: req.startswith('perl-'), 'check_provide': lambda prov: prov.startswith('perl('), 'basereq': lambda req: req[len('perl-'):], 'extra': lambda basereq: ['perl(%s)' % basereq.replace('-', '::')], } } for convert_br, keys in convert_brs.items(): keys['changes'] = {} br_old = [r for r in list(br.keys()) if keys['check_br'](r)] for req in br_old: provides = Downstream.alternative_provides(req) provides_alt = [clean_pkgconfig_prov(prov) for prov in provides if keys['check_provide'](prov)] change_to = None if len(provides_alt) == 1: change_to = provides_alt[0] elif len(provides_alt) and 'extra' in keys: basereq = keys['basereq'](req) check_for = keys['extra'](basereq) for check in check_for: if check in provides_alt: change_to = check break if len(provides_alt): if change_to is None: no_change[req] = provides_alt else: no_alt.add(req) if change_to is not None: keys['changes'][req] = change_to if not options.doit: import pprint for keys in list(convert_brs.items()): if 'changes' in keys and keys['changes']: pprint.pprint(keys['changes']) if no_alt: print("WARNING: no alternatives found for: %s" % ", ".join(sorted(no_alt))) if no_change: pprint.pprint(no_change) else: convert_brs = {} keys_with_changes = [keys for keys in convert_brs.values() if 'changes' in keys and keys['changes']] if not keys_with_changes: keys_with_changes.append( {'changes': [], 'desc': 'unsplit BRs'}) for keys in keys_with_changes: if s.update_br(keys['changes'], change_description=keys['desc']): made_changes=True if s.clean_spec(): made_changes=True # If we made it this far, checkin the changes if made_changes: if options.doit: Downstream.ci(package, s.changes, cwd=cwd) else: # show the diff and undo all changes print(s.changes) s.ensure_no_local_changes(force=True) def cmd_clean_spec(options, parser): if options.all: packages = Downstream().packages else: packages = options.package if len(options.package) else (l[0] for l in join_streams()) with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor: executor.map(cmd_clean_spec_multi, ((options, package) for package in packages)) # DEBUG: # for package in packages: # cmd_clean_spec_multi((options, package)) def cmd_new_release(options, parser): success = True for pkg in options.package: # Determine the package name if options.upstream: try: package = Downstream().get_downstream_from_upstream(pkgs, options.version)[0] except ValueError as e: print("ERROR: %s" % e, file=sys.stderr) success = False continue else: package = pkg # Directories packages are located in root = os.path.expanduser(Downstream.PKGROOT) cwd = os.path.join(root, package) # Checkout package to ensure the checkout reflects the latest changes try: Downstream.co(package, cwd=root) except subprocess.CalledProcessError: subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd) success = False continue # SpecFile class handles the actual version+release change # XXX - module should reflect upstream name, this gives it the package name s = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package), module=package) cur_release = s.release new_release = int(cur_release)+1 cur_version = s.version print("%s-%s => %s-%s" % (cur_version, cur_release, cur_version, new_release)) # XXX - Duplicate check as should not revert changes if specfile has already been changed if not s.ensure_no_local_changes(options.force): success = False continue if not s.update_release(new_release, options.reason, force=options.force): subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd, stdout=subprocess.DEVNULL) success = False continue try: # If we made it this far, checkin the changes Downstream.ci(package, s.changes, cwd=cwd) # Submit is optional if options.submit: cmd = ['mgarepo', 'submit'] if Downstream.DISTRO: cmd.extend(('--define', 'section=core/updates_testing', '-t', Downstream.DISTRO)) elif Downstream.SECTION: cmd.extend(('--define', 'section={0}'.format(Downstream.SECTION))) # retry submission various times, could be that some dependencies are being built at the same time @retry(subprocess.CalledProcessError, tries=10, delay=300, backoff=1.5) def _submit(): subprocess.check_call(cmd, cwd=cwd) _submit() except subprocess.CalledProcessError: success = False continue if not success: sys.exit(1) def cmd_package_new_version(options, parser): # Determine the package name if options.upstream: try: package = Downstream().get_downstream_from_upstream(options.package, options.version)[0] except ValueError as e: print("ERROR: %s" % e, file=sys.stderr) sys.exit(1) else: package = options.package # Directories packages are located in root = os.path.expanduser(Downstream.PKGROOT) cwd = os.path.join(root, package) # Checkout package to ensure the checkout reflects the latest changes try: Downstream.co(package, cwd=root) except subprocess.CalledProcessError: subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd) sys.exit(1) # SpecFile class handles the actual version+release change # XXX - module should reflect upstream name, this gives it the package name s = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package), module=package) print("%s => %s" % (s.version, options.version)) # XXX - Duplicate check as should not revert changes if specfile has already been changed if not s.ensure_no_local_changes(options.force): sys.exit(1) if not s.update_version(options.version, force=options.force): # XXX - hack to automatically revert changes when auto upgrading from ftp release list if options.hexdigest is not None: subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd, stdout=subprocess.DEVNULL) sys.exit(1) # Check hash, if given if options.hexdigest is not None: sources = [name for name, value in s.sources.items() if '://' in value[0]] if not len(sources): print("ERROR: Cannot determine source file (for hash check)!", file=sys.stderr) sys.stderr(1) # If there are multiple sources, try to see if there is a preferred name # --> needed for metacity hash check (multiple tarball sources) if len(sources) > 1: preferred_name = '%s-%s.tar.xz' % (package, options.version) if preferred_name in sources: sources = [preferred_name] for filename in sources: path = os.path.join(cwd, "SOURCES", filename) if not is_valid_hash(path, options.algo, options.hexdigest): print("ERROR: Hash file failed check for %s!" % path, file=sys.stderr) print("ERROR: Reverting changes!", file=sys.stderr) subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd) sys.exit(1) try: # If we made it this far, checkin the changes Downstream.ci(package, s.changes, cwd=cwd) # Submit is optional if options.submit: cmd = ['mgarepo', 'submit'] if Downstream.DISTRO: cmd.extend(('--define', 'section=core/updates_testing', '-t', Downstream.DISTRO)) elif Downstream.SECTION: cmd.extend(('--define', 'section={0}'.format(Downstream.SECTION))) # retry submission various times, could be that some dependencies are being built at the same time @retry(subprocess.CalledProcessError, tries=10, delay=300, backoff=1.5) def _submit(): subprocess.check_call(cmd, cwd=cwd) _submit() except subprocess.CalledProcessError: sys.exit(1) def cmd_parse_ftp_release_list(options, parser): def _send_reply_mail(contents, orig_msg, to, packages=[], error=False): """Send an reply email""" contents.seek(0) msg = MIMEText(contents.read(), _charset='utf-8') if error: # XXX - ugly contents.seek(0) lastline = contents.read().decode('utf-8').rstrip().splitlines()[-1] # Remove things like "ERROR: " and so on from the last line lastline = re.sub(r'^(?:[^ :]+:\s+)+', '', lastline) # Remove things like " - " (youri output from mgarepo submit) lastline = re.sub(r'^\s+-\s+', '', lastline) subjecterror = " (ERROR: %s)" % lastline if lastline else " (ERROR)" else: subjecterror = "" if packages: subject = "%s %s%s" % (", ".join(packages), orig_msg['X-Module-Version'], subjecterror) else: subject = "Re: %s%s" % (orig_msg['Subject'], subjecterror) msg['Subject'] = subject msg['To'] = to msg["In-Reply-To"] = orig_msg["Message-ID"] msg["References"] = orig_msg["Message-ID"] # Call sendmail program directly so it doesn't matter if the service is running cmd = ['/usr/sbin/sendmail', '-oi', '--'] cmd.extend([to]) p = subprocess.Popen(cmd, stdin=subprocess.PIPE) p.stdin.write(msg.as_bytes()) p.stdin.flush() p.stdin.close() p.wait() msg = email.message_from_file(sys.stdin) if options.mail: stdout = tempfile.TemporaryFile() stderr = stdout else: stdout = sys.stdout stderr = sys.stderr try: module = msg['X-Module-Name'] version = msg['X-Module-Version'] hexdigest = msg['X-Module-SHA256-tar.xz'] except KeyError as e: print("ERROR: %s" % e, file=stderr) if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True) sys.exit(1) try: packages = Downstream().get_downstream_from_upstream(module, version) except ValueError as e: #print(bytes("ERROR: %s" % str(e), 'UTF-8'), file=stderr) stderr.write(bytes("ERROR: %s" % str(e), 'UTF-8')) if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True) sys.exit(1) if options.wait or options.fork: # maildrop aborts and will try to deliver after 5min # fork to avoid this if os.fork() != 0: sys.exit(0) if options.wait: # wait SLEEP_INITIAL after the message was sent secs = SLEEP_INITIAL t = email.utils.parsedate_tz(msg['Date']) if t is not None: msg_time = email.utils.mktime_tz(t) secs = SLEEP_INITIAL - (time.time() - msg_time) if secs > 0: time.sleep(secs) error = False for package in packages: cmd = ['mgagnome', 'increase', '--hash', hexdigest] if options.submit: cmd.append('--submit') if options.force: cmd.append('--force') cmd.extend((package, version)) if subprocess.call(cmd, stdout=stdout, stderr=stderr): error = True if options.mail: _send_reply_mail(stdout, msg, options.mail, packages=packages, error=error) def main(): description = """Mageia GNOME commands.""" epilog="""Report bugs to Olav Vitters""" parser = argparse.ArgumentParser(description=description,epilog=epilog) parser.add_argument("-l", "--limit", type=argparse.FileType('r', 0), dest="limit_upstream", metavar="FILE", help="File containing upstream names") parser.add_argument("-d", "--distro", action="store", dest="distro", help="Distribution release") # SUBPARSERS subparsers = parser.add_subparsers(title='subcommands') subparser = subparsers.add_parser('check-latest', help='check for latest version of packages') subparser.add_argument("-s", "--submit", action="store_true", dest="submit", help="Increase version for stable upgrades and submit") subparser.set_defaults( func=cmd_check_latest, submit=False ) subparser = subparsers.add_parser('check-prep', help='check prep phase') subparser.add_argument("package", help="Package name") subparser.set_defaults( func=cmd_check_prep ) subparser = subparsers.add_parser('clean-spec', help='clean specfile') subparser.add_argument("package", help="Package name", nargs='*') subparser.add_argument("-d", "-s", action="store_true", dest="doit", help="submit the changes") subparser.add_argument("-f", "--force", action="store_true") subparser.add_argument("-a", "--all", action="store_true", dest="all", help="checkout all Downstream packages") subparser.add_argument("--convert", action="store_true", dest="convert_br", help="convert -buildrequirements to perl/pkgconfig if possible") subparser.set_defaults( func=cmd_clean_spec, doit=False, convert_br=False, all=False, force=False ) subparser = subparsers.add_parser('check-version', help='check if spec version and downstream version match') subparser.set_defaults( func=cmd_check_version ) subparser = subparsers.add_parser('co', help='checkout all GNOME packages') subparser.add_argument("-a", "--all", action="store_true", dest="all", help="checkout all Downstream packages") subparser.add_argument("-s", action="store_true", dest="spec_only", help="only checkout SPECS/ directory") subparser.add_argument("package", help="Package name", nargs='*') subparser.set_defaults( func=cmd_co, all=False ) subparser = subparsers.add_parser('dep3', help='add dep3 headers') subparser.add_argument("patch", help="Patch") subparser.set_defaults( func=cmd_dep3, path=False ) subparser = subparsers.add_parser('gnome-release-email', help='submit packages based on GNOME ftp-release-list email') subparser.add_argument("-m", "--mail", help="Email address to send the progress to") subparser.add_argument( "--fork", action="store_true", help="Fork as quickly as possible") subparser.add_argument("-w", "--wait", action="store_true", help="Wait before trying to retrieve the new version") subparser.add_argument("-s", "--submit", action="store_true", dest="submit", help="Commit changes and submit") subparser.add_argument("-f", "--force", action="store_true", help="Force submission") subparser.set_defaults( func=cmd_parse_ftp_release_list, force=False, wait=False, fork=False ) subparser = subparsers.add_parser('group-owner', help='list packages by group') subparser.add_argument('group', metavar="GROUP", nargs='+') subparser.set_defaults( func=cmd_group_owner ) subparser = subparsers.add_parser('rebuild', help='increase release') subparser.add_argument("package", help="Package name", nargs="*") subparser.add_argument("-m", "--reason", dest="reason", required=True, help="Reason for the rebuild") subparser.add_argument("-f", "--force", action="store_true", dest="force", help="Override warnings, just do it") subparser.add_argument("-u", "--upstream", action="store_true", dest="upstream", help="Package name reflects the upstream name") subparser.add_argument("-s", "--submit", action="store_true", dest="submit", help="Commit changes and submit") subparser.add_argument( "--no-submit", action="store_false", dest="submit", help="Do not commit changes and submit") subparser.set_defaults( func=cmd_new_release, submit=argparse.SUPPRESS, upstream=False, force=False ) subparser = subparsers.add_parser('increase', help='increase version number') subparser.add_argument("package", help="Package name") subparser.add_argument("version", help="Version number") subparser.add_argument("-f", "--force", action="store_true", dest="force", help="Override warnings, just do it") subparser.add_argument("-u", "--upstream", action="store_true", dest="upstream", help="Package name reflects the upstream name") subparser.add_argument("-s", "--submit", action="store_true", dest="submit", help="Commit changes and submit") subparser.add_argument( "--no-submit", action="store_false", dest="submit", help="Do not commit changes and submit") subparser.add_argument("-a", "--algorithm", choices=hashlib.algorithms_available, dest="algo", help="Hash algorithm") subparser.add_argument("--hash", dest="hexdigest", help="Hexdigest of the hash") subparser.set_defaults( func=cmd_package_new_version, submit=argparse.SUPPRESS, upstream=False, hexdigest=None, algo="sha256", force=False ) subparser = subparsers.add_parser('packages', help='list all GNOME packages') subparser.add_argument("-m", "--m", action="store_true", dest="upstream", help="Show upstream module") subparser.add_argument( "--version", action="store_true", dest="show_version", help="Show version numbers") subparser.add_argument( "--diff", action="store_true", dest="diff", help="Only show packages with different version") subparser.add_argument( "--sort", type=argparse.FileType('r', 0), dest="sort", metavar="FILE", help="Sort packages according to order in given FILE") subparser.add_argument( "--spec", action="store_true", dest="spec", help="Give spec file location") subparser.set_defaults( func=cmd_ls, upstream=False, show_version=False, diff=False ) subparser = subparsers.add_parser('cleanup', help='cleanup pkg directory') subparser.set_defaults( func=cmd_cleanup ) subparser = subparsers.add_parser('patches', help='list all GNOME patches') subparser.add_argument("-p", "--path", action="store_true", dest="path", help="Show full path to patch") subparser.set_defaults( func=cmd_patches, path=False ) if len(sys.argv) == 1: parser.print_help() sys.exit(2) options = parser.parse_args() if options.limit_upstream: Upstream.limit = set(options.limit_upstream.read().strip("\n").splitlines()) if not hasattr(options, 'submit'): options.submit = not options.distro if options.distro: Downstream.PKGROOT = os.path.join('~/pkgs', options.distro) Downstream.MEDIA = "Core Release {0} Source,Core {0} Updates Source,Core {0} Updates Testing Source".format(options.distro) Downstream.DISTRO = options.distro try: options.func(options, parser) except KeyboardInterrupt: print('Interrupted') sys.exit(1) except EOFError: print('EOF') sys.exit(1) except IOError as e: if e.errno != errno.EPIPE: raise sys.exit(0) if __name__ == "__main__": os.environ['PYTHONUNBUFFERED'] = '1' main()