#!/usr/bin/python3 -u """Mageia GNOME commands.""" # A lot of the code comes from ftpadmin, see # https://git.gnome.org/browse/sysadmin-bin/tree/ftpadmin # Written by Olav Vitters from functools import wraps, lru_cache, cached_property # basic modules: import os from pathlib import Path import sys import re import subprocess # command line parsing, error handling: import argparse import errno # overwriting files by moving them (safer): import tempfile import shutil # getting links from HTML document: from html.parser import HTMLParser import urllib.request import urllib.error import urllib.parse # for checking hashes import hashlib # for parsing ftp-release-list emails import email from email.mime.text import MIMEText # to be able to sleep for a while import time # packages --sort import itertools # automatically dropping merged patches import shlex import concurrent.futures # for debugging output import pprint import logging # check-latest import requests # version comparison: import rpm SLEEP_INITIAL = 180 SLEEP_REPEAT = 30 SLEEP_TIMES = 30 _re_majmin = re.compile(r'^([0-9]+\.[0-9]+).*') _re_version = re.compile(r'([-.]|\d+|[^-.\d]+)') def retry(exceptions, tries=4, delay=3, backoff=2, logger=None): """Retry calling the decorated function using an exponential backoff. http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/ original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry :param exceptions: the exception to check. may be a tuple of exceptions to check :type exceptions: Exception or tuple :param tries: number of times to try (not retry) before giving up :type tries: int :param delay: initial delay between retries in seconds :type delay: int :param backoff: backoff multiplier e.g. value of 2 will double the delay each retry :type backoff: int :param logger: logger to use. If None, print :type logger: logging.Logger instance """ def deco_retry(func): @wraps(func) def func_retry(*args, **kwargs): mtries, mdelay = tries, delay while mtries > 1: try: return func(*args, **kwargs) except exceptions as exc: msg = "%s, Retrying in %d seconds..." % (str(exc), mdelay) if logger: logger.warning(msg) else: print(msg) time.sleep(mdelay) mtries -= 1 mdelay *= backoff return func(*args, **kwargs) return func_retry # true decorator return deco_retry def version_cmp(version_a, version_b): """Compares two versions Returns -1 if a < b 0 if a == b 1 if a > b """ return rpm.labelCompare(('1', version_a, '1'), ('1', version_b, '1')) def get_latest_version(versions, max_version=None): """Gets the latest version number if max_version is specified, gets the latest version number before max_version""" latest = None for version in versions: if (latest is None or version_cmp(version, latest) > 0) \ and (max_version is None or version_cmp(version, max_version) < 0): latest = version return latest MAJOR_VERSIONS = { # NAMES MUST BE IN LOWERCASE! 'networkmanager': set(('0.9',)), 'networkmanager-applet': set(('0.9',)), 'networkmanager-openconnect': set(('0.9',)), 'networkmanager-openvpn': set(('0.9',)), 'networkmanager-pptp': set(('0.9',)), 'networkmanager-vpnc': set(('0.9',)) } def get_majmin(version, module=None): """Return a tuple with major and minor versions given a version""" nrs = version.split('.') if module and module.lower() in MAJOR_VERSIONS: module_versions = [version.split(".") for version in MAJOR_VERSIONS[module.lower()]] nrstest = nrs[:] while len(nrstest) >= 2: if nrstest in module_versions: return (".".join(nrs[:len(nrstest)]), nrs[len(nrstest)]) nrstest.pop() return (nrs[0], nrs[1]) def get_safe_max_version(version, module=None): """Provide the maximum version a module could safely be increased to This assumed the module is uses symantic versioning""" match = _re_majmin.match(version) if version is None or not match: return None majmin = get_majmin(match.group(1), module) min_nr = int(majmin[1]) # Add 2 for stable releases, 1 for unstable min_nr += 1 if min_nr % 2 == 0 else 2 return "%s.%d" % (majmin[0], min_nr) def judge_version_increase(version_old, version_new, module=None): """Judge quality of version increase: Returns a tuple containing judgement and message Judgement: Less than 0: Error 0 to 4: Better not 5+: Ok""" versions = (version_old, version_new) # First do a basic version comparison to ensure version_new is actually newer compare = version_cmp(version_new, version_old) if compare == 0: # 1.0.0 -> 1.0.1 return (-2, "Already at version %s!" % (version_old)) if compare != 1: # 1.0.1 -> 1.0.0 return (-3, "Version %s is older than current version %s!" % (version_new, version_old)) # Version is newer, but we don't want to see if it follows the GNOME versioning scheme majmins = [get_majmin(ver, module) for ver in versions if _re_majmin.match(ver) is not None] if len(majmins) == 1: return (-1, "Version number scheme changes: %s" % (", ".join(versions))) if len(majmins) == 0: return (0, "Unsupported version numbers: %s" % (", ".join(versions))) # Follows GNOME versioning scheme # Meaning: x.y.z # x = major # y = minor : even if stable # z = micro # Major+minor the same? Then go ahead and upgrade! if majmins[0] == majmins[1]: # Majmin of both versions are the same, looks good! # 1.1.x -> 1.1.x or 1.0.x -> 1.0.x return (10, None) # Check/ensure major version number is the same if majmins[0][0] != majmins[1][0]: # 1.0.x -> 2.0.x return (1, "Major version number increase") # Minor indicates stable/unstable devstate = (int(majmins[0][1]) % 2 == 0, int(majmins[1][1]) % 2 == 0) # Upgrading to unstable is weird if not devstate[1]: if devstate[0]: # 1.2.x -> 1.3.x return (1, "Stable to unstable increase") # 1.3.x -> 1.5.x return (3, "Unstable to unstable version increase") # Unstable => stable is always ok if not devstate[0]: # 1.1.x -> 1.2.x return (5, "Unstable to stable") # Can only be increase of minors from one stable to the next # 1.0.x -> 1.2.x return (4, "Stable version increase") def line_input(file): """Iterate over a file and ignore any newline""" for line in file: if line[-1] == '\n': yield line[:-1] else: yield line def distinct(iterable, keyfunc=None): """Iterate over an iterable and only return unique/distinct iterables""" seen = set() for item in iterable: key = item if keyfunc is None else keyfunc(item) if key not in seen: seen.add(key) yield item class URLLister(HTMLParser): """Parse links from HTML""" # pylint: disable=abstract-method def reset(self): HTMLParser.reset(self) self.urls = [] def handle_starttag(self, tag, attrs): if tag == 'a': href = [v for k, v in attrs if k == 'href'] if href: self.urls.extend(href) def _is_valid_hash(path, algo, hexdigest): if algo not in hashlib.algorithms_available: raise ValueError("Unknown hash algorithm: %s" % algo) local_hash = getattr(hashlib, algo)() with path.open('rb') as fp_file: data = fp_file.read(32768) while data: local_hash.update(data) data = fp_file.read(32768) return local_hash.hexdigest() == hexdigest re_clean_1 = re.compile(r'\[[^]()]+\]$') def clean_pkgconfig_prov(prov): prov = re_clean_1.sub('', prov) return prov class SpecFileError(Exception): """Used for problems in the spec file""" class SpecFile(): """Functions related to a spec file""" re_update_version = re.compile( r'^(?P
Version[ \t]*:\s*)(?P.+)(?P\s*)$',
        re.MULTILINE + re.IGNORECASE
    )
    re_update_release = re.compile(
        r'^(?P
Release[ \t]*:\s*)(?P%mkrel [0-9.]+)(?P\s*)$',
        re.MULTILINE + re.IGNORECASE
    )
    re_update_patch = re.compile(
        r'^(?P
Patch0*?)(?P[0-9]*)(?P[ \t]*:\s*)(?P.+)(?P\s*)\n',
        re.MULTILINE + re.IGNORECASE
    )
    re_br_part = re.compile(
        r'(?P
[^\s%{},<>=][^\s%{},<>=]*)\b(?P\s*(?:(?P=|>=|<=|=<|=>|>|<)\s*' \ r'(?P[^\s%{},]+|\%\{[^\s{%}]+\}|\%[^\s%{},]+)\b)?)' ) #re_update_br = re.compile( # r'^(?P
BuildRequires:\s*)(?P
[^\s%{},]+?)(?P\s*(?:(?:[<>]=?|=)\s+[^\s%{},]+?)?\s*\n)', # re.MULTILINE + re.IGNORECASE #) re_update_br = re.compile( r'^(?P
BuildRequires[ \t]*:\s*)(?P
[^\s%{},]+?)' \ r'(?P\s*(?:(?:[<>]=?|=)\s+(?:[^\s%{},]+|\%\{[^\s{%}]+\}|\%[^\s%{},]+))?\s*\n)', re.MULTILINE + re.IGNORECASE ) #re_update_br_unsplit = re.compile( # r'^(?P
BuildRequires:\s*)(?P[^\n,]+,[^\n]*)(?P\s*\n)',
    #    re.MULTILINE + re.IGNORECASE
    #)

    re_update_br_fix_operator = re.compile(
        r'^(?P
BuildRequires[ \t]*:\s*[^\n]*)(?P=<|=>)(?P[^\n]+)\n',
        re.MULTILINE + re.IGNORECASE
    )
    re_update_br_unsplit = re.compile(
        r'^(?P
BuildRequires[ \t]*:\s*)(?P(?:%s,?(?:[ \t\f\v]+|$)){2,})' \
        r'(?P\n)' % (re_br_part.pattern,),
        re.MULTILINE + re.IGNORECASE
    )

    def __init__(self, path, package=None, module=None):
        self.path = path
        self._package = package
        self.module = package.name if package else module

        if package:
            self.cwd = package.path
        else:
            self.cwd = path.parent
            # If spec file is located in SPECS directory, cwd should be 1
            # directory higher
            if self.cwd.name == 'SPECS':
                self.cwd = self.cwd.parent

        # WARNING: Requires Python 3.7+ as that version guarantees dict ordering
        self._changes = dict()
        self._should_rebuild = False
        self._changed_spec_file = False

    @property
    def changes(self):
        """Description of the changes made to the spec file"""
        return ''.join(self._changes.keys()) \
            if len(self._changes) == 1 \
            else "\n".join(("- %s" % change for change in self._changes))

    @property
    def made_changes(self):
        """Determine if the spec file was changed"""
        return self._changed_spec_file

    @property
    def version(self):
        """Provide the version as parsed by rpm"""
        cmd = ["rpm", "--define", "_topdir %s" % self.cwd, "--specfile", self.path,
               "--queryformat", "%{VERSION}\n"]
        return subprocess.check_output(cmd, encoding='utf-8').splitlines()[0]

    @property
    def should_rebuild(self):
        """Determine if the package should be rebuilt"""
        return self._should_rebuild

    @property
    def release(self):
        """Provide the release as parsed by rpm"""
        cmd = ["rpm", "--define", "%dist %nil", "--define", "_topdir %s" % self.cwd,
               "--specfile", self.path, "--queryformat", "%{RELEASE}\n"]
        return subprocess.check_output(cmd, encoding='utf-8').splitlines()[0]

    def _sources_and_patches(self, flag=None):
        os.chdir(self.cwd)
        rpm.delMacro("_topdir")
        rpm.addMacro("_topdir", str(self.cwd))
        trans_set = rpm.ts()
        spec = trans_set.parseSpec(str(self.path))
        try:
            srclist = spec.sources if isinstance(spec.sources, (list, tuple)) \
                            else spec.sources()
        except ValueError as exc:
            # Reraise this into a more specific exception
            raise SpecFileError from exc
        finally:
            # trans_set.parseSpec can affect changing of internal macros, e.g. redefining things like mkrel and so on
            # reload the config to fix this
            rpm.reloadConfig()
        return dict(
            (Path(name).name, [name, 0 if no == 2147483647 and flags == 2 else no])
            for name, no, flags in srclist if flag is None or flags == flag
        )

    @property
    def patches(self):
        """Return the patches"""
        return self._sources_and_patches(flag=2)

    @property
    def sources(self):
        """Return the sources"""
        return self._sources_and_patches(flag=1)

    def convert_buildrequires(self, explain_changes=False):
        """Converts BuildRequires into their preferred form

        Requires SpecFile to be initialized with its package argument"""

        made_changes = False
        re_prov_get_version = re.compile(r'^[^(]+\([^)]+-(?P[0-9]+\.[0-9][0-9.]*)\)$')
        no_alt = set()
        no_change = {}

        buildreqs = self.buildrequires

        log = logging.getLogger(None)
        debug_enabled = log.isEnabledFor(log.DEBUG)

        convert_brs = {
            'pkgconfig': {
                'desc': 'convert -devel buildrequires into pkgconfig',
                'check_br': lambda req: req.endswith('-devel'),
                'check_provide': lambda prov: prov.startswith('pkgconfig('),
                'basereqs': lambda req: [req[:-len('-devel')]],
                'basereq_no_version': lambda basereqs: [basereq.rstrip('1234567890.') for basereq in basereqs
                                                        if basereq[-1] in '1234567890'],
                'versions_from_basereq': lambda basereqs: set((basereq[len(basereq.rstrip('01234567890.')):]
                                                               for basereq in basereqs if basereq[-1] in '1234567890')),
                'versions_basereq_extra': lambda versions: set(("%s.0" % version for version in versions
                                                                if '.' not in version)),
                'extra': lambda basereqs, versions: \
                    ['pkgconfig(%s)' % basereq for basereq in basereqs] +
                         ['pkgconfig(%s)' % basereq[len('lib'):]
                          if basereq.startswith('lib') else 'pkgconfig(lib%s)' % basereq
                          for basereq in basereqs] +
                         ['pkgconfig(%s-%s)' % (basereq, version) for basereq in basereqs for version in versions],
            },
            'perl': {
                'desc': 'convert perl- buildrequires into perl()',
                'check_br': lambda req: req.startswith('perl-'),
                'check_provide': lambda prov: prov.startswith('perl('),
                'basereqs': lambda req: [req[len('perl-'):]],
                'extra': lambda basereqs, versions: ['perl(%s)' % basereq.replace('-', '::') for basereq in basereqs],
            },
            # PySolFC.spec:BuildRequires:       python3-setuptools
            # $ rpm -q python3-setuptools --provides | grep python3dist
            # python3dist(setuptools)
            # python3dist(setuptools) = 46.1.3
            #
            # There's also provides such as:
            # python3.8dist(setuptools)
            # pythonegg(3)(setuptools)
            'python-pkg': {
                'disabled': True,
                'desc': 'convert python buildrequires into python3dist()',
                'check_br': lambda req: req.startswith('python3-'),
                'check_provide': lambda prov: prov.startswith('python3dist('),
                'basereqs': lambda req: [req[len('python3-'):]],
                'extra': lambda basereqs, versions: ['python3dist(%s)' % basereq for basereq in basereqs],
            },
            'python-egg': {
                'desc': 'convert pythonegg(3) into python3dist()',
                'check_br': lambda req: req.startswith('pythonegg(3)(') and req.endswith(')'),
                'check_provide': lambda prov: prov.startswith('python3dist('),
                'basereqs': lambda req: [req[len('pythonegg(3)('):-1]],
                'extra': lambda basereqs, versions: ['python3dist(%s)' % basereq for basereq in basereqs],
            },
        }

        for keys in convert_brs.values():
            if 'disabled' in keys and keys['disabled']:
                continue

            keys['changes'] = {}
            br_old = [r for r in list(buildreqs.keys()) if keys['check_br'](r)]
            if debug_enabled and br_old:
                pprint.pprint(br_old)
            for req in br_old:
                every_provides, every_ignored_provide = Downstream.alternative_provides(req)
                # XXX - document what clean_pkgconfig_prov is for
                #       maybe integrate clean_pkgconfig_prov in alternative_provides function?
                provides = [clean_pkgconfig_prov(prov)
                            for prov in every_provides
                            if keys['check_provide'](prov)]
                provides_ignored = [clean_pkgconfig_prov(prov)
                                    for prov in every_ignored_provide
                                    if keys['check_provide'](prov)]
                change_to = None
                if len(provides) == 1 and not provides_ignored:
                    if debug_enabled:
                        print("NOTICE: Only one available option, using %s" % provides[0])

                    change_to = provides[0]
                elif provides and 'extra' in keys:
                    # Determine base require (e.g. gtk+3.0-devel --> gtk+3.0)
                    basereqs = keys['basereqs'](req)

                    # Determine version matches
                    versions = set()
                    if 'versions_from_basereq' in keys:
                        # Determine if the basereq has a version at the end (e.g. gtk+3.0 --> 3.0)
                        versions.update(keys['versions_from_basereq'](basereqs))
                        if versions and 'basereq_no_version' in keys:
                            basereqs.extend(keys['basereq_no_version'](basereqs))
                            # Make it unique again, but keep the order
                            #
                            # This is done so that e.g. python3-devel changes to pkgconfig(python3),
                            # even if pkgconfig(python) might be available
                            basereqs = list(distinct(basereqs))
                        if 'versions_basereq_extra' in keys:
                            versions.update(keys['versions_basereq_extra'](versions))

                    if not versions:
                        # In case no versions were retrieved from the basereq,
                        # match with any version found from the alternative
                        # provides (heuristic matching)
                        #
                        # This is only done as a last resort to avoid matching
                        # e.g. gtk+3.0-devel --> pkgconfig(gtk+2.0)
                        for prov in provides:
                            for match in re_prov_get_version.finditer(prov):
                                if debug_enabled:
                                    print("NOTICE: Heuristically adding version %s from provide %s" \
                                            % (match.group('version'), prov))
                                versions.add(match.group('version'))

                    check_for = keys['extra'](basereqs, versions)

                    if debug_enabled and versions:
                        pprint.pprint(versions)

                    for check in check_for:
                        if check in provides:
                            if debug_enabled:
                                print("NOTICE: Matched: %s => %s" % (check, provides))
                            change_to = check
                            break

                if change_to is None and provides:
                    provides_no_versions = []
                    for prov in provides:
                        if re_prov_get_version.fullmatch(prov) is None:
                            provides_no_versions.append(prov)

                    if len(provides_no_versions) == 1 and not provides_ignored:
                        change_to = provides_no_versions[0]
                        if debug_enabled:
                            print("NOTICE: Only one available versionless option, using %s" % change_to)


                if provides:
                    if change_to is None:
                        no_change[req] = (provides, check_for)
                else:
                    no_alt.add(req)

                if change_to is not None:
                    keys['changes'][req] = change_to

        # Optionally explain the intended and failed buildrequire changes
        if explain_changes:
            if debug_enabled:
                for keys in convert_brs.values():
                    if 'changes' in keys and keys['changes']:
                        pprint.pprint(keys['changes'])

            if no_alt:
                print("WARNING: no alternatives found for: %s" % ", ".join(sorted(no_alt)))

            if no_change and debug_enabled:
                pprint.pprint(no_change)


        keys_with_changes = [keys for keys in convert_brs.values() if 'changes' in keys and keys['changes']]
# XXX - seems to cause false messages
#    if not keys_with_changes:
#        keys_with_changes.append({'changes': [], 'desc': 'unsplit BRs'})

        for keys in keys_with_changes:
            if self.update_br(keys['changes'], change_description=keys['desc']):
                made_changes = True

        return made_changes


    def clean_spec(self):
        """Clean the spec file of deprecated statements"""

        made_changes = False
        with self.path.open('r', encoding='utf-8') as fp_spec:
            data = fp_spec.read()

            made_changes, data = self._clean_spec_regexp(made_changes, data)
            made_changes, data = self._clean_spec_py_requires(made_changes, data)
            made_changes, data = self._clean_spec_defines(made_changes, data)
            made_changes, data = self._clean_spec_patches(made_changes, data)

            # Overwrite file with new version number
            if made_changes:
                self._changed_spec_file = True
                write_file(self.path, data)

        return made_changes


    def _clean_spec_regexp(self, made_changes, data):
        """Clean spec using regular expressions"""

        re_rm_buildroot = r'^(?:\s*\[[^\n\]\[]+\][ \t]+\&\&[ \t]+)?(?:rm|\%__rm|\%\{__rm\}) *' \
            r'(?:-rf|-fr|-r) *"?(?:[%$]buildroot|[%$]\{buildroot\}|[%$]\{buildroot\}|' \
            r'\$RPM_BUILDROOT|\$RPM_BUILD_ROOT|\$\{RPM_BUILD_ROOT\}|\$RPM_BUILD_DIR)"?/?[ \t]*\n'

        re_clean_spec = [
            # remove %defattr
            (
                'remove defattr',
                None,
                re.compile(r'(?P^\%files(?:[ \t]+[^\n]*)?\n(?:^\%doc [^\n]+\n)?)'
                           r'^\%defattr\s*\(- *, *root *, *root *(?:, *-)?\)\s*\n',
                           re.MULTILINE + re.IGNORECASE)
            ),
            (
                'remove cleaning buildroot in install',
                None,
                re.compile(r'(?P^\%install(?:[ \t]+[^\n]*)?\n)' + re_rm_buildroot + r'\n?',
                           re.MULTILINE + re.IGNORECASE)
            ),
            (
                'remove clean section',
                None,
                re.compile(r'^\%clean[ \t]*\n(?:' + re_rm_buildroot +
                           r')?\s*(?P(?:^#[^%\n]+\n)*^(?:\%files|\%post|\%pre|\%trigger|'
                           r'\%install|\%package|\%check|\%_font_pkg|$(?!.|\n)))',
                           re.MULTILINE + re.IGNORECASE)
            ),
            (
                'remove buildroot definition',
                None,
                re.compile(r'^BuildRoot[ \t]*:[^\n]+\n', re.MULTILINE + re.IGNORECASE)
            ),
            (
                'remove unneeded setup option',
                None,
                re.compile(r'^(?P\%setup -q)(?: -n|n) (?:\%name|\%\{name\})-'
                           r'(?:\%version|\%\{version\})(?P\n)',
                           re.MULTILINE + re.IGNORECASE)),
            (
                'https for download.gnome.org',
                r'\ghttps://\g',
                re.compile(r'^(?PSource[0-9]*[ \t]*:[^\n]+)http://(?Pdownload.gnome.org/[^\n]+\n)',
                           re.MULTILINE + re.IGNORECASE)
            ),
            (
                'download.gnome.org instead of ftp.gnome.org',
                r'\ghttps://download.gnome.org\g',
                re.compile(r'^(?PSource[0-9]*[ \t]*:[^\n]+)'
                           r'(?:ftp|http|https)://ftp.gnome.org/pub/GNOME'
                           r'(?P/[^\n]+\n)',
                           re.MULTILINE + re.IGNORECASE)
            ),
            (
                'metacpan.org instead of www.cpan.org and search.cpan.org',
                r'\ghttps://metacpan.org/release/\g\g',
                re.compile(r'^(?PURL[ \t]*:[^\n]+)'
                           r'http://(?:search\.cpan\.org)/(?:dist|~[^/]+)/(?P[^/\n]+)/?[ \t]*'
                           r'(?P[^\n]*\n)',
                           re.MULTILINE + re.IGNORECASE)
            ),
            (
                'metacpan.org instead of www.cpan.org and search.cpan.org',
                r'\ghttps://cpan.metacpan.org/\g\g',
                re.compile(r'^(?PSource[0-9]*[ \t]*:[^\n]+)'
                           r'https?://(?:(?:www\.cpan\.org)(?:/CPAN)?|(?:search\.cpan\.org)/CPAN)/+'
                           r'(?Pauthors/id/|modules/by-module/|modules/by-authors/id/)'
                           r'(?P[^\n]+\n)',
                           re.MULTILINE + re.IGNORECASE)
            ),
            (
                'restrict what libraries are matched with major numbers',
                r'\g{,.*}',
                re.compile(r'^(?P%{_libdir}[^\n]+})\*$', re.MULTILINE)
            ),
            (
                'keep library matching using two lines',
                r'\g\n\g.*',
                re.compile(r'^(?P%{_libdir}[^\n]+})$\n(?P=keeppre)\{,\.\*\}$', re.MULTILINE)
            ),
            (
                'make use of autopatch',
                r'%autopatch -p1',
                re.compile(r'^%apply_patches$', re.MULTILINE)
            ),
            (
                'change configure2_5x macro to configure',
                r'%configure',
                re.compile(r'^%configure2_5x\b', re.MULTILINE)
            ),
            (
                'change make macro to make_build',
                r'\g%make_build',
                re.compile(r'^(?P[ \t]*)%make\b', re.MULTILINE)
            ),
            (
                'make_build already implies V=1 and VERBOSE=1, remove these',
                r'\g%make_build',
                re.compile(r'^(?P[ \t]*)%make_build(?:[ \t]+(?:V=1|VERBOSE=1))+\b', re.MULTILINE)
            ),
            # Can have side-effects, disable for now
            #(
            #    'change makeinstall_std macro to make_install',
            #    r'%make_install',
            #    re.compile(r'^%makeinstall_std\b', re.MULTILINE)
            #),
            (
                'change find_lang --with-help into --with-gnome',
                r'\g --with-gnome\g',
                re.compile(r'^(?P\s*\%find_lang[^\\\n]+) --with-help(?P[^\\\n]*\n)',
                           re.MULTILINE + re.IGNORECASE)
            ),
            (
                'change find_lang remove duplicate --with-gnome',
                None,
                re.compile(r'^(?P\%find_lang[^\\\n]+ --with-gnome) --with-gnome(?P[^\\\n]*\n)',
                           re.MULTILINE + re.IGNORECASE)
            ),
            # Use new Python macros
            ('use new Python macros', r'%py3_build', re.compile(r'^%{__python3} setup.py build$', re.MULTILINE)),

            ('use new Python macros', r'%py3_install', re.compile(r'^%{__python3} setup.py install$', re.MULTILINE)),

            ('use new Python macros', r'%{python3_version}', re.compile(r'%{py3ver}', re.MULTILINE)),
            (
                'make use of _mandir macro',
                r'%{_mandir}\g',
                # /usr/share/man/man1/test_whitespaces.1.xz
                re.compile(r'^(?:%{_datadir}/man|%_datadir/man|/usr/share/man)(?P/man[0-9]+/'
                           r'[a-zA-Z0-9]+[-a-zA-Z0-9_./]+\.[0-9]+[.a-z]*(?:\n|$))',
                           re.MULTILINE)
            ),
            (
                'make use of _infodir macro',
                r'%{_infodir}\g',
                # /usr/share/man/man1/test_whitespaces.1.xz
                re.compile(r'^(?:%{_datadir}/info|%_datadir/info|/usr/share/info)(?P/'
                           r'[a-zA-Z0-9]+[-a-zA-Z0-9_./]+\.info[.a-z]*(?:\n|$))',
                           re.MULTILINE)
            ),
            (
                # %{_mandir}/man1/*.xz
                # avoid adding a *
                'avoid assuming xz compression scheme',
                r'\g\g',
                re.compile(r'^(?P(?:%{_mandir}|%_mandir)/'
                           r'[a-zA-Z0-9]+[-@a-zA-Z0-9_./]+/\*)'
                           r'\.xz(?P\n|$)',
                           re.MULTILINE)
            ),
            (
                # %{_mandir}/man8/fxotune.8.xz
                # need to add a *
                'avoid assuming xz compression scheme',
                r'\g*\g',
                re.compile(r'^(?P(?:%doc )?(?:%{_mandir}|%_mandir)/'
                           r'[a-zA-Z0-9]+[-*@+:a-zA-Z0-9_./]+[-a-zA-Z0-9_./]'
                           r'(?:/(?:%{name}|%name|%{shortname})\.[0-9a-z]+)?'
                           r')'
                           r'\.xz(?P\n|$)',
                           re.MULTILINE)
            ),
        ]

        for reason, change_to, regexp in re_clean_spec:
            if change_to is None:
                change_to = ""
                if 'keeppre' in regexp.groupindex:
                    change_to += r'\g'
                if 'keeppost' in regexp.groupindex:
                    change_to += r'\g'
            data, subs = regexp.subn(change_to, data)
            if subs:
                made_changes = True
                self._changes['SILENT %s' % reason] = True

        return made_changes, data


    def _clean_spec_py_requires(self, made_changes, data):
        """Convert %py_requires and %py_requires -d"""

        re_convert_py_requires = [
            (
                'remove py_requires',
                ('python',),
                re.compile(r'^\%(?:py_requires|\{py_requires\})[ \t]*\n', re.MULTILINE)
            ),
            (
                'remove py_requires -d',
                ('python', 'python-devel'),
                re.compile(r'^\%(?:py_requires[ \t]+-d|\{py_requires[ \t]+-d\})[ \t]*\n', re.MULTILINE)
            ),
        ]

        # First figure out how a buildrequire is usually defined
        match = self.re_update_br.search(data)
        br_pre = match.group('pre') if match and match.group('pre') else "BuildRequires:\t"

        for reason, new_brs, regexp in re_convert_py_requires:
            match = regexp.search(data)
            if match:
                # Don't add multiple buildrequires
                change_to = ""
                brs_in_file = set()
                for match2 in self.re_update_br.finditer(data):
                    if match2.group('br') in new_brs:
                        brs_in_file.add(match2.group('br'))

                for buildreq in set(new_brs) - brs_in_file:
                    change_to += ''.join((br_pre, buildreq, "\n"))
                data, subs = regexp.subn(change_to, data)
                if subs:
                    made_changes = True
                    self._changes['SILENT %s' % reason] = True

        return made_changes, data


    def _clean_spec_defines(self, made_changes, data):
        # Convert:
        #   %define name SOMETHING
        #   name: %{name}
        # Into
        #   name: SOMETHING
        converted_defines = []
        for search_for in ('name', 'version', 'release', 'summary', 'Summary', 'group'):
            search_for_ignore_case = ''.join(("[%s%s]" % (letter, letter.swapcase()) for letter in search_for))
            re_spec = re.compile(
                r'^(?P' + re.escape(search_for_ignore_case) + r'[ \t]*:[ \t]*)(?:\%' +
                re.escape(search_for) + r'|\%\{' + re.escape(search_for) +
                r'\})(?P[ \t]*\n)', re.MULTILINE
            )
            re_variable = re.compile(
                r'^(?P\%define[ \t]+' + re.escape(search_for) +
                r'[ \t]+(?P[^\n]+?))(?P[ \t]*\n)',
                re.MULTILINE
            )

            match = re_variable.search(data)
            if match and match.group('definition') and len(re_variable.findall(data)) == 1:
                match2 = re_spec.search(data)
                if match2:
                    data, subs = re_spec.subn(
                        r'\g' + match.group('definition').replace('\\', '\\\\') + r'\g',
                        data
                    )
                    if subs:
                        made_changes = True
                        data, subs = re_variable.subn('', data)
                        converted_defines.append(search_for)

        if made_changes and converted_defines:
            data = data.lstrip()
            self._changes['SILENT remove variable definition(s) %s' % ", ".join(converted_defines)] = True

        return made_changes, data

    def _clean_spec_patches(self, made_changes, data):
        re_autopatch = re.compile(r'^[ \t]*\%autopatch(?:[ \t]+-p(?P[0-9]+))?$', re.MULTILINE)

        re_patch_header = re.compile(
            '^Patch(?P[0-9]*)[ \t]*:[ \t]*(?P[^\n]+)\n',
            re.MULTILINE + re.IGNORECASE
        )
        re_patch_any = re.compile(r'^[ \t]*\%patch(?P[0-9]*)', re.MULTILINE)
        re_patch_valid = re.compile(
            r'^[ \t+]*\%patch(?P[0-9]*)(?:[ \t]+-p(?P[0-9]+))?(?:[ \t]+-b[ \t]+\S+)?$\n?',
            re.MULTILINE
        )
        re_prep_patches = re.compile(
            r'^\%setup[^\n]+$(?:' + re_patch_valid.pattern +
            r'|^#[^%\n]+\n|^[ \t]*(?:%{_bindir}/|%_bindir)?autoreconf[ \t][^\n]+$|\s)+\n\%build',
            re.MULTILINE
        )

        give_patchnr = lambda match: (
            match.group('nr') if len(match.group('nr')) == 1 else match.group('nr').lstrip('0')
            ) if match.group('nr') else "0"

        # Make use of %apply_patches

        if re_autopatch.search(data):
            # Ignore if using %autopatch
            return made_changes, data

        patches = self.patches
        if not patches:
            return made_changes, data

        if len(patches) > 5:
            print("NOTICE: More than 5 patches, skipping package", file=sys.stderr)
            return made_changes, data

        if self.uses_autopatch:
            return made_changes, data

        # XXX -- apparently this is really inefficient with e.g. apache
        match2 = re_prep_patches.search(data)
        patch_nrs_header = set((give_patchnr(match) for match in re_patch_header.finditer(data)))
        patch_nrs_any = set((give_patchnr(match) for match in re_patch_any.finditer(data)))
        patch_nrs_valid = set(
            (give_patchnr(match) for match in re_patch_valid.finditer(match2.group(0)))
            ) if match2 else set()

        if not patch_nrs_header:
            # XXX -- weird, self.patches should've returned 0 already
            return made_changes, data

        if not patch_nrs_header == patch_nrs_any == patch_nrs_valid:
            print("NOTICE: Unable to automatically convert %s patches into %%autopatch "
                  "(header/patch/valid: %s, %s, %s)" % (self.module, len(patch_nrs_header), len(patch_nrs_any),
                                                        len(patch_nrs_valid)),
                  file=sys.stderr \
            )
            return made_changes, data

        patch_flags = set((
            0 if match.group('strip') is None else match.group('strip')
            for match in re_patch_valid.finditer(match2.group(0))
        ))

        if len(patch_flags) != 1:
            print("NOTICE: Unable to automatically convert spec file to use autopatch as"
                  "different -p / strip levels are used", file=sys.stderr)
            return made_changes, data

        # Whoot, we can convert!!
        change_to = "%%autopatch -p%s\n" % list(patch_flags)[0]
        prep, subs1 = re_patch_valid.subn(change_to.replace('\\', '\\\\'), match2.group(0), count=1)
        prep, subs2 = re_patch_valid.subn('', prep)
        if len(patch_nrs_valid) != subs1 + subs2:
            print("WARNING: Couldn't replace patches?!? Likely error in program logic", file=sys.stderr)
            return made_changes, data

        # First check if patches currently apply
        if not self.check_and_update_patches(check_only=True):
            return made_changes, data

        try:
            change_to = data.replace(match2.group(0), prep, 1)
            self._changed_spec_file = True
            write_file(self.path, change_to)

            # Validate patches still apply
            if self.check_and_update_patches(check_only=True):
                data = change_to
                self._changes['SILENT use autopatch'] = True
                made_changes = True
        finally:
            if not made_changes:
                write_file(self.path, data)

        return made_changes, data

    @property
    def buildrequires(self):
        """Get the BuildRequires of this spec file

        Parses the spec file to do this, so any macros are expanded"""

        rpm.delMacro("_topdir")
        rpm.addMacro("_topdir", str(self.cwd))
        trans_set = rpm.ts()
        try:
            spec = trans_set.parseSpec(str(self.path))
        except ValueError as exc:
            # Reraise this into a more specific exception
            raise SpecFileError from exc
        finally:
            # ts.parseSpec can affect changing of internal macros, e.g. redefining things like mkrel and so on
            # reload the config to fix this
            rpm.reloadConfig()

        requires = spec.sourceHeader[rpm.RPMTAG_REQUIRES]
        require_flags = spec.sourceHeader[rpm.RPMTAG_REQUIREFLAGS]
        require_ver = spec.sourceHeader[rpm.RPMTAG_REQUIREVERSION]

        buildreqs = {}
        for req, flag, ver in itertools.zip_longest(requires, require_flags, require_ver):
            # bitmask other than 15 means the require is (probably?) a require for a trigger or script
            if flag & 15 != flag:
                continue

            ver_cmp = ""
            if flag & rpm.RPMSENSE_LESS:
                ver_cmp += '<'
            if flag & rpm.RPMSENSE_GREATER:
                ver_cmp += '>'
            if flag & rpm.RPMSENSE_EQUAL:
                ver_cmp += '='

            buildreqs[req] = (ver_cmp, ver)
        return buildreqs

    @property
    def uses_autopatch(self):
        """Check if autopatch (or similar) is used"""
        return subprocess.call(['grep', '-Eq', '^%(apply_patches|autopatch|autosetup)', '--', self.path]) == 0

    def _revert_changes(self):
        """Revert uncommited changes made to spec file"""
        self._changes.clear()
        subprocess.check_call(["svn", "revert", "-R", self.cwd])

    def remove_patch(self, patchnr, info_patchname=None):
        """Remove a patch from the spec file"""
        # Uses string as well as numeric comparisons to determine which patch to remove
        nrs = [str(patchnr), patchnr]
        if patchnr == 0:
            nrs.append('')

        with self.path.open('r', encoding='utf-8') as fp_spec:
            data = fp_spec.read()

            data, subs = self.re_update_patch.subn(
                lambda match: '' if match.group('nr') in nrs or (match.group('nr').isdigit()
                                                                 and int(match.group('nr')) in nrs)
                else match.group(0), data
            )
            if not subs:
                print("ERROR: Could not remove patch nr %s!" % patchnr, file=sys.stderr)
                return False

            # Overwrite file with new version number
            self._changed_spec_file = True
            write_file(self.path, data)

            if info_patchname:
                self._changes['dropped merged patch %s' % info_patchname] = True
            else:
                self._changes['dropped merged patch %s' % patchnr] = True

        subprocess.check_call(['mgarepo', 'sync'], cwd=self.cwd)
        return True

    def check_and_update_patches(self, check_only=False, lines_in_log=15):
        """Check if patches still apply

        Remove any merged patches"""

        initial_patches = self.patches
        patches = initial_patches
        uses_autopatch = self.uses_autopatch if patches else False

        while True:
            try:
                # Check patches still apply
                subprocess.check_call(['bm', '-p', '--nodeps'], cwd=self.cwd)
            except subprocess.CalledProcessError:
                logfile = self.path.parent.joinpath(
                    'log.%s' % self.path.stem
                )

                failed_patch = None
                cmd = None
                cmd_output = []
                cmd_before = (None, None)
                # Determine the last command that failed
                if logfile.exists():
                    print(logfile)
                    with logfile.open('r', encoding='utf-8') as fp_logfile:
                        for line in line_input(fp_logfile):
                            if line.startswith('+ '):
                                cmd_before = (cmd, cmd_before)
                                cmd = line[2:]
                                cmd_output = []
                            else:
                                cmd_output.append(line)

                cmd_parsed = shlex.split(cmd) if cmd else []
                cmd_before_parsed = shlex.split(cmd_before[0]) if cmd_before[0] else []

                if not check_only and uses_autopatch and patches and cmd_parsed:
                    if Path(cmd_parsed[0]).name in ('patch', 'cat'):
                        if Path(cmd_parsed[-1]).exists():
                            failed_patch = Path(cmd_parsed[-1]).name
                        elif cmd_parsed[-1].startswith('-') and Path(cmd_before_parsed[-1]).exists():
# for %autopatch as well as %patch
#+ /usr/bin/cat /home/src/pkgs/gnome-getting-started-docs/SOURCES/gs-browse-web-firefox.page.patch
#+ /usr/bin/patch -p1 -s
                            failed_patch = Path(cmd_before_parsed[-1]).name

                    # Patch is merged if there is at least one 'ignored' line and no 'FAILED' line anywhere
                    has_ignored = False
                    has_reversed = False
                    has_failed = False
                    for line in cmd_output:
                        if 'FAILED' in line or 'File to patch:' in line:
                            has_failed = True
                            break

                        if 'ignored' in line:
                            has_ignored = True
                        elif 'saving rejects to file' in line:
                            has_failed = True
                            break

                        if 'Reversed (or previously applied) patch detected' in line:
                            has_reversed = True

                    if has_ignored and has_reversed and not has_failed:
                        # If patch was merged, drop it from spec file and rety
                        print("INFO: Patch has been merged: %s" % failed_patch, file=sys.stdout)
                        if failed_patch in patches:
                            if self.remove_patch(patches[failed_patch][1], failed_patch):
                                # try again
                                patches = self.patches
                                continue

                        print("ERROR: Problem removing merged patch: %s" % failed_patch, file=sys.stderr)
                        return False

                if cmd and len(cmd_output) > lines_in_log:
                    print('+ %s' % cmd, file=sys.stdout)
                    print("\n".join(cmd_output), file=sys.stdout)
                elif logfile.exists():
                    subprocess.call(['tail', '-n', str(lines_in_log), logfile])

                if failed_patch:
                    print("ERROR: Problem applying patch: %s" % failed_patch, file=sys.stderr)
                elif cmd:
                    print("ERROR: Problem in %%prep phase command: %s" % cmd, file=sys.stderr)
                elif patches:
                    print("ERROR: Problem applying patches and/or %prep phase", file=sys.stderr)
                else:
                    print("ERROR: Problem in %prep phase", file=sys.stderr)
                return False
            finally:
                # bm command doesn't cleanup after itself and the option it has to do that removes spec file + sources!
                buildpath = self.cwd.joinpath('BUILD', '%s-%s' % (self.module, self.version))
                if buildpath.exists():
                    shutil.rmtree(buildpath, ignore_errors=True)

            return True

    def update_br(self, changes, change_description='update buildrequirements'):
        """Update buildrequirement"""

        # XXX - doesn't handle buildrequires with version numbers :-(
        made_changes = False

        with self.path.open('r', encoding='utf-8') as fp_spec:
            data = fp_spec.read()

            # Change any "," in buildrequires into multiple lines
            data, subs = self.re_update_br_unsplit.subn(
                lambda match: ''.join((''.join((match.group('pre'), match2.group(0), match.group('unsplitpost')))
                                       for match2 in self.re_br_part.finditer(match.group('unsplit'))
                                       if match.group(0).strip() != '')),
                data
            )
            if subs:
                made_changes = True
                self._changes['SILENT one line per buildrequirement'] = True

            # Change =< and => operators into <= and >=
            data, subs = self.re_update_br_fix_operator.subn(
                lambda match: match.group(0).replace('=>', '>=').replace('=<', '<')
                if self.re_update_br.match(match.group(0).replace('=>', '>=').replace('=<', '<'))
                else match.group(0), data
            )
            if subs:
                made_changes = True
                self._changes['SILENT fix operator in buildrequires'] = True

            # Now update buildrequires if any
            data, subs = self.re_update_br.subn(
                lambda match: ''.join((match.group('pre'), changes[match.group('br')], match.group('post')))
                if match.group('br') in changes else match.group(0), data
            )

            if subs:
                made_changes = True
                self._changes['SILENT %s' % change_description] = True
            elif len(changes) != 0:
                print("ERROR: Could not update buildrequires for %s" % self.module, file=sys.stderr)
                print(changes)
                return False

            # Overwrite file with updated buildrequire
            self._changed_spec_file = True
            write_file(self.path, data)

        return made_changes


    MAX_JUDGEMENT = 5

    def ensure_no_local_changes(self, force=False):
        """Check if the repository has any local changes

        Can optionally force a clean checkout (by reverting any local
        changes). This should possibily be moved to Downstream class, or move
        this into a new Package class"""
        cmd = ["svn", "diff", self.cwd]
        svn_diff_output = subprocess.check_output(cmd, encoding='utf-8')
        if svn_diff_output != '':
            print(svn_diff_output)
            print("ERROR: Package has uncommitted changes!", file=sys.stderr)
            if not force:
                return False

            # Forcing package submission: revert changes
            try:
                print("WARNING: Force used; reverting svn changes", file=sys.stderr)
                self._revert_changes()
            except subprocess.CalledProcessError:
                return False

        return True

    def _check_can_update(self, force):
        data = None
        if not self.ensure_no_local_changes(force):
            return None

        with self.path.open('r', encoding='utf-8') as fp_spec:
            data = fp_spec.read()

            if data.count("%subrel") != 0:
                print("ERROR: %subrel found; don't know what to do!", file=sys.stderr)
                return None

            if data.count("%mkrel") != 1:
                print("ERROR: Multiple %mkrel found; don't know what to do!", file=sys.stderr)
                return None

        return data

    def update_release(self, release, reason, force=False):
        """Update release (usually for rebuilds)"""
        data = self._check_can_update(force)
        if data is None:
            return False

        # XXX - validate current release
        data, subs = self.re_update_release.subn(r'\g
%%mkrel %s\g' % release, data, 1)
        if subs != 1:
            print(data, file=sys.stdout)
            print("ERROR: Could not increase release!", file=sys.stderr)
            return False

        # Overwrite file with new release
        self._changed_spec_file = True
        write_file(self.path, data)

        self._changes['rebuild for %s' % reason] = True

        return True

    def update_version(self, version, force=False, max_judgement=MAX_JUDGEMENT):
        """Update version and reset release"""
        cur_version = self.version

        (judgement, msg) = judge_version_increase(cur_version, version, self.module)

        if judgement < 0:
            print("ERROR: %s!" % (msg), file=sys.stderr)
            return False

        if judgement < max_judgement:
            print("WARNING: %s!" % (msg))
            if not force:
                return False

        data = self._check_can_update(force)
        if data is None:
            return False

        data, subs = self.re_update_version.subn(r'\g
%s\g' % version, data, 1)
        if subs != 1:
            print("ERROR: Could not increase version!", file=sys.stderr)
            return False

        data, subs = self.re_update_release.subn(r'\g
%mkrel 1\g', data, 1)
        if subs != 1:
            print(data, file=sys.stdout)
            print("ERROR: Could not reset release!", file=sys.stderr)
            return False

        # Overwrite file with new version number
        self._changed_spec_file = True
        write_file(self.path, data)

        self._changes['new version %s' % version] = True

        # Verify that RPM also agrees that version number has changed
        if self.version != version:
            print("ERROR: Increased version to %s, but RPM doesn't agree!?!" % version, file=sys.stderr)
            return False

        # Try to download the new tarball various times and wait between attempts
        tries = 0
        while tries < SLEEP_TIMES:
            tries += 1
            if tries > 1:
                time.sleep(SLEEP_REPEAT * 2 ** (tries // 5))

            try:
                # Download new tarball
                subprocess.check_call(['mgarepo', 'sync', '-d'], cwd=self.cwd)
                # success, so exit loop
                break
            except subprocess.CalledProcessError as exc:
                # mgarepo sync returns 1 if the tarball cannot be downloaded
                if exc.returncode != 1:
                    self._revert_changes()
                    print("ERROR: Could not download tarball", file=sys.stderr)
                    return False
        else:
            # failed to download tarball
            self._revert_changes()
            print("ERROR: Could not download tarball", file=sys.stderr)
            return False

        return self.check_and_update_patches()

class Patch():
    """Do things with patches"""

    def __init__(self, path, show_path=False):
        """Path: path to patch (might not exist)"""
        self.path = path
        self.show_path = show_path

    def __str__(self):
        return self.path if self.show_path else self.path.name

    @cached_property
    def svn_author(self):
        """Return the svn author of this patch

        Makes use of svn log."""
        try:
            cmd = ['svn', 'log', '-q', "--", self.path]
            contents = subprocess.check_output(cmd, close_fds=True, encoding='utf-8').strip("\n").splitlines()

            for line in contents:
                if ' | ' not in line:
                    continue

                fields = line.split(' | ')
                if len(fields) >= 3:
                    return fields[1]
        except subprocess.CalledProcessError:
            pass

        return None


class Upstream():
    """Class handling anything related to upstream (meaning: GNOME)

    For now this is limited to:
      - finding the module names
      - finding latest version of a module"""


    URL = "https://download.gnome.org/sources/"
    limit = None

    def __init__(self):
        urlopen = urllib.request.build_opener()

        good_dir = re.compile('^[-A-Za-z0-9_+.]+/$')

        # Get the files
        usock = urlopen.open(self.URL)
        parser = URLLister()
        parser.feed(usock.read().decode('utf-8'))
        usock.close()
        parser.close()
        files = parser.urls

        tarballs = set((filename.replace('/', '') for filename in files if good_dir.search(filename)))
        if self.limit is not None:
            tarballs.intersection_update(self.limit)

        self._names = tarballs

    @property
    def names(self):
        """Return the upstream names"""
        return self._names

    @classmethod
    @lru_cache(maxsize=4096)
    def versions(cls, module):
        """Return the possible versions for a given module"""
        versions = None

        url = '%s%s/cache.json' % (cls.URL, module)
        request = requests.get(url)
        json = request.json()
        if json is not None and len(json) > 2 and module in json[2]:
            versions = json[2][module]

        return versions

class Downstream():
    """Class handling anything related to downstream (meaning: Mageia)"""


    re_file = re.compile(
        r'^(?P.*)[_-](?:(?P([0-9]+[\.])*[0-9]+)-)?(?P([0-9]+[\.\-])*[0-9]+)' \
        r'\.(?P(?:tar\.|diff\.)?[a-z][a-z0-9]*)$'
    )

    MEDIA = "Core Release Source"
    # PKGROOT will be overwritten (command line option)
    PKGROOT = Path('~/pkgs')
    DISTRO = None
    SECTION = None

    def __init__(self):
        cmd = ['urpmf', '--qf', '%name|%version|%files', '.', "--media", self.MEDIA]
        contents = subprocess.check_output(cmd, close_fds=True, encoding='utf-8').strip("\n").splitlines()

        srpm_files = {}
        module_srpm_tarballs = {}
        packages = set()

        for line in contents:
            try:
                srpm, version, filename = line.split("|")
            except SpecFileError:
                print(line, file=sys.stderr)
                continue

            packages.add(srpm)

            if '.tar' in filename:
                match = self.re_file.match(filename)
                if match:
                    fileinfo = match.groupdict()
                    module = fileinfo['module']

                    if module not in module_srpm_tarballs:
                        module_srpm_tarballs[module] = {}

                    if srpm in module_srpm_tarballs[module]:
                        # srpm seen before, check if version is newer
                        if version_cmp(module_srpm_tarballs[module][srpm], version) == 1:
                            module_srpm_tarballs[module][srpm] = version
                    else:
                        module_srpm_tarballs[module][srpm] = version

            if srpm not in srpm_files:
                srpm_files[srpm] = set()
            srpm_files[srpm].add(filename)

        self._packages = packages
        self.tarballs = module_srpm_tarballs
        self.files = srpm_files

    @property
    def packages(self):
        """Return all downstream package names"""
        return sorted(self._packages)

    @classmethod
    def package(cls, package):
        """Return an initialized package"""
        return Package(package, cls)

    _provide_to_alternate = {}
    @classmethod
    def alternative_provides(cls, search_for):
        """Give alternative provides for a certain provide

        Relies on urpmf. Results are cached. It will only provide alternatives if
        the alternative is only provided by one package. Meaning, if a pkgconfig(foo)
        is provided by 2 packages, the pkgconfig(foo) will NOT be given as an
        alternative.

        Sort of works like:
        $ urpmq --whatprovides $search_for --provides"""

        if not cls._provide_to_alternate:
            _provide_to_pkg = {}
            _pkg_to_provide = {}
            cmd = ['urpmf', "--qf", "%name\t%provides\t%arch", '.']
            for myline in subprocess.check_output(cmd, encoding='utf-8').splitlines():
                pkgname, pkgprovide, pkgarch = myline.split("\t")
                if pkgarch in ('src', 'i586'):
                    continue
                if '-debug' in pkgprovide:
                    continue

                if "[" in pkgprovide and pkgprovide.endswith("]"):
                    pkgprovidepart = pkgprovide.rstrip("]").partition("[")
                else:
                    pkgprovidepart = pkgprovide.partition("[")

                if pkgprovidepart[0] in _provide_to_pkg:
                    _provide_to_pkg[pkgprovidepart[0]].add(pkgname)
                else:
                    _provide_to_pkg[pkgprovidepart[0]] = set((pkgname,))

                if pkgname in _pkg_to_provide:
                    _pkg_to_provide[pkgname].add(pkgprovidepart[0])
                else:
                    _pkg_to_provide[pkgname] = set((pkgprovidepart[0],))

            provide_has_single = set()
            for key, stash in _provide_to_pkg.items():
                if len(stash) == 1:
                    provide_has_single.add(key)


            for key in provide_has_single:
                # Ignore some of the provides to optimize memory usage somewhat
                #
                # WARNING: This might need to be changed depending on how this
                # function is used
                if '(' in key and not ')(' in key:
                    continue

                for pkgname in _provide_to_pkg[key]:
                    for pkgprovide in _pkg_to_provide[pkgname]:
                        if not '(' in pkgprovide:
                            continue

                        if not key in cls._provide_to_alternate:
                            cls._provide_to_alternate[key] = (set(), set())

                        if pkgprovide in provide_has_single:
                            cls._provide_to_alternate[key][0].add(pkgprovide)
                        else:
                            cls._provide_to_alternate[key][1].add(pkgprovide)

        if search_for in cls._provide_to_alternate:
            return cls._provide_to_alternate[search_for]

        return (set(), set())

    def get_downstream_from_upstream(self, upstream, version):
        """Provide the downstream package(s) for a given upstream module name
        and version

        This will raise a ValueError exception in case a good match cannot be
        found"""


        if upstream not in self.tarballs:
            raise ValueError("No packages for upstream name: %s" % upstream)

        if len(self.tarballs[upstream]) == 1:
            return list(self.tarballs[upstream].keys())

        packages = {}
        for package_name in list(self.tarballs[upstream].keys()):
            package = self.package(package_name)
            # Checkout package to ensure the checkout reflects the latest changes
            try:
                package.checkout()
            except subprocess.CalledProcessError:
                raise ValueError("Multiple packages found and cannot checkout %s" % package_name)

            # Determine version from spec file
            try:
                packages[package_name] = package.spec.version
            except subprocess.CalledProcessError:
                raise ValueError("Multiple packages found and cannot determine version of %s" % package_name)

        # Return all packages reflecting the current version
        matches = [package_name for package_name in packages if packages[package_name] == version]
        if matches:
            return matches

        # Return all packages reflecting the version before the current version
        # - determine the newest version in any spec file
        latest_possible_version = get_latest_version(list(packages.values()))
        # - now get the latest version before the current version
        latest_version = get_latest_version(list(packages.values()), max_version=version)
        if latest_version is None:
            raise ValueError("Multiple packages found and all versions are newer than %s" % version)

        # - if the latest_possible_spec version is not the latest version, then
        #   ensure it follows get_safe_max_version
        if latest_version != latest_possible_version and version_cmp(get_safe_max_version(latest_version, upstream), \
                version) != 1:
            raise ValueError("Multiple packages found and unsafe version increase: %s (%s => %s)" % (
                upstream, latest_version, version) \
            )

        # - now really get the right packages
        matches = [package_name for package_name in packages if packages[package_name] == latest_version]
        if matches:
            return matches

        # Give up
        raise ValueError("Multiple packages found and cannot determine package for version %s" % version)

class Package:
    """Represents a downstream package"""

    def __init__(self, packagename, downstreamclass):
        self.name = packagename
        self._downstream = downstreamclass

    @property
    def path(self):
        """Provide the local checkout path for a given package

        Package might not be checked out yet!"""
        return self._downstream.PKGROOT.expanduser().joinpath(self.name)

    @cached_property
    def spec(self):
        """Return the SpecFile for a given package"""

        return SpecFile(self.spec_path, package=self, module=self.name)

    @cached_property
    def spec_path(self):
        """Return the expected location of the SpecFile"""

        return self.path.joinpath("SPECS", "%s.spec" % self.name)

    def ensure_checkout(self):
        """Ensure that the package is checked out"""
        if not self.spec_path.exists():
            try:
                self.checkout()
            except subprocess.CalledProcessError:
                print('WARNING: Cannot checkout package %s. Skipping.' % self.name, file=sys.stderr)
                return False

        return True

    @retry(subprocess.CalledProcessError)
    def checkout(self, cwd=None, spec_only=False):
        """Check out a package from the repository"""
        downstream = self._downstream
        if cwd is None:
            cwd = downstream.PKGROOT.expanduser()

        cmd = ['mgarepo', 'co']
        if downstream.DISTRO:
            cmd.extend(('-d', downstream.DISTRO))
        if spec_only:
            cmd.append('-s')
        cmd.append(self.name)
        return subprocess.check_call(cmd, stdin=subprocess.DEVNULL, cwd=cwd)

    @retry(subprocess.CalledProcessError)
    def checkin(self, changes, cwd=None):
        """Check in changes to the repository"""

        if cwd is None:
            cwd = self.path

        cmd = ['mgarepo', 'ci', '-m', changes]
        return subprocess.check_call(cmd, cwd=cwd)

    def submit(self, cwd=None):
        """Submit a package to the buildsystem

        If a specific distro release is chosen, the section will be set to
        core/updates_testing."""
        downstream = self._downstream
        if cwd is None:
            cwd = self.path

        cmd = ['mgarepo', 'submit']
        if downstream.DISTRO:
            cmd.extend(('--define', 'section=core/updates_testing', '-t', downstream.DISTRO))
        elif downstream.SECTION:
            cmd.extend(('--define', 'section={0}'.format(downstream.SECTION)))

        # retry submission various times, could be that some dependencies are being built at the same time
        @retry(subprocess.CalledProcessError, tries=10, delay=300, backoff=1.5)
        def _submit():
            subprocess.check_call(cmd, cwd=cwd)
        _submit()


def write_file(path, data):
    """Write to a file by creating a temporary file and renaming that to the
    new file"""

    with tempfile.NamedTemporaryFile(mode='w+t', dir=path.parent, delete=False, encoding='utf-8') as fdst:
        fdst.write(data)
        fdst.flush()
        os.replace(fdst.name, path)

def _cmd_checkout_multi(args):
    package_name, what_to_print, options = args

    print(what_to_print)

    try:
        Downstream.package(package_name).checkout(spec_only=options.spec_only)
    except subprocess.CalledProcessError:
        pass

def cmd_checkout(options):
    """Check out various packages in parallel"""
    if options.all:
        packages = ((package_name, package_name, options) for package_name in Downstream().packages)
    elif options.package:
        packages = ((package_name, package_name, options) for package_name in options.package)
    else:
        packages = ((l[0], "%s => %s" % (l[0], l[1]), options) for l in sorted(join_streams(auto_update=False)))

    if options.debug:
        for package_name in packages:
            _cmd_checkout_multi(package_name)
    else:
        with concurrent.futures.ProcessPoolExecutor(max_workers=8) as executor:
            executor.map(_cmd_checkout_multi, packages)

def join_streams(show_version=False, only_diff_version=False, auto_update=True):
    """Links upstream modules with downstream packages

    To figure out the downstream name of an upstream package, the Downstream
    class has logic to extract the name from a tarball file.

    If information is requested whereby the spec file is needed this function
    will automatically checkout any package"""

    upstream = Upstream().names
    downstream = Downstream()

    matches = upstream & set(downstream.tarballs.keys())
    for module in matches:
        for package_name in list(downstream.tarballs[module].keys()):
            package_version = downstream.tarballs[module][package_name]
            spec_version = None
            package = Downstream.package(package_name)
            cwd = package.path

            if show_version or only_diff_version:
                # ensure package is checked out
                if not cwd.exists():
                    try:
                        package.checkout()
                    except subprocess.CalledProcessError:
                        # XXX - ignoring packages which cannot be checked out
                        continue

                try:
                    spec_version = package.spec.version
                except subprocess.CalledProcessError:
                    spec_version = 'N/A'

                # in case upstream version is newer, update checkout
                if auto_update and package_version != spec_version and version_cmp(package_version, spec_version) == 1:
                    try:
                        package.checkout()
                        spec_version = package.spec.version
                    except subprocess.CalledProcessError:
                        pass

            if only_diff_version and package_version == spec_version:
                continue

            yield (package_name, module, package_version, spec_version, downstream.files[package_name])

def cmd_group_owner(options):
    """Show the packages of a rpm group"""


    groups = set(options.group)

    cmd = ["urpmf", "-F|", "--qf", "%group\t%name\t%sourcerpm\t%version\t%release", "."]
    output = [pkg.split("\t") for pkg in subprocess.check_output(cmd, encoding='utf-8').splitlines()]
    if not output:
        return

    # Filter by groups
    output = [pkg for pkg in output if pkg[0] in groups]
    if not output:
        return

    packages = {}
    for group, name, sourcerpm, version, release in output:
        if group not in packages:
            packages[group] = {}

        source = sourcerpm if sourcerpm else name
        for end in (".src.rpm", "-%s-%s" % (version, release)):
            if source.endswith(end):
                source = source[:len(source) - len(end)]

        if source not in packages[group]:
            packages[group][source] = set()

        packages[group][source].add(name)


    maints = {
        line.rpartition(" ")[::2]
        for line in subprocess.check_output(["mgarepo", "maintdb", "get"], encoding='utf-8').splitlines()
    }

    def get_output(source, maints, packages):
        for package_name in list(packages.keys()):
            maint = maints.get(source, "?")

            yield "\t".join((maint, source, ",".join(sorted(packages[package_name]))))

    first = True
    for group in list(packages.keys()):
        if first:
            first = False
        else:
            print("")
            print("")
        print(group)
        print("")

        for line in sorted(get_output(source, maints, packages[group])):
            print(line)

def cmd_cleanup(_):
    """Clean up the package root

    Removes things such as:
      - unneeded files in SOURCES (e.g. old tarballs)

    In future will also:
      - clean old directories in BUILDDIR
      - clean old files in RPMS and SRPMS"""


    root = Downstream.PKGROOT.expanduser()

#    packages = set(Downstream().packages)

    dirs = set((path for path in root.glob('*') if path.is_dir()))

#    dirs = dirs - packages

    import pysvn # pylint: disable=import-outside-toplevel
    dirs = [path for path in dirs if path.joinpath('SOURCES', 'sha1.lst').exists()
            and path.joinpath('SPECS', '%s.spec' % path.name).exists()]

    curtime = time.time()
    curtime_recent = curtime - (36 * 60 * 60)
    curtime_ancient = curtime - (180 * 24 * 60 * 60)

    for path in dirs:
        # Only select the packages which have not been modified for 36 hours
        mtime = path.joinpath('SPECS', '%s.spec' % path.name).stat().st_mtime
        if mtime > curtime_recent:
            print('INFO: Package was too recently modified: %s' % path.name)
            continue

        # Remove any packages not touched for 180 days
        if mtime < curtime_ancient:
            print('INFO: Package is ancient, removing: %s' % path.name)
            shutil.rmtree(path)
            continue


        # Cleanup BUILD and BUILDROOT directories
        for cleanup_dir in ('BUILD', 'BUILDROOT'):
            for cleanup_path in ((cleanup_path for cleanup_path in path.joinpath(cleanup_dir).glob('*') \
                                                   if cleanup_path.is_dir() \
                                                      and cleanup_path.stat().st_mtime < curtime_recent)):
                shutil.rmtree(cleanup_path)

        try:
            binaries = set((l.split('  ', 1)[1]
                            for l in path.joinpath('SOURCES', 'sha1.lst').open().read().splitlines()))
        except IndexError:
            print('ERROR: Problem parsing the sha1.lst of package %s' % path.name, file=sys.stderr)
#            shutil.rmtree(path)
#            Downstream.package(path).checkout()
            continue

        vcs = pysvn.Client()
        stats = [stat for stat in vcs.status(str(path.joinpath('SOURCES')), depth=pysvn.depth.immediates) # pylint: disable=no-member
                 if stat.text_status == pysvn.wc_status_kind.unversioned # pylint: disable=no-member
                 and Path(stat.path).name not in binaries]  # pylint: disable=no-member

        if stats:
            print(path)
            print(", ".join(Path(stat.path).name for stat in stats))
            print(stats)
            for stat in stats:
                stat_path = Path(stat.path)
                if stat_path.is_file():
                    stat_path.unlink()
                elif stat_path.is_dir():
                    shutil.rmtree(stat_path)

def cmd_ls(options):
    """Show upstream module names, downstream package names

    Optionally can even show version numbers"""

    streams = join_streams(show_version=options.show_version, only_diff_version=options.diff)
    if options.sort:
        # Sort packages on the line number in the file
        sort_helper = dict(list(zip(options.sort.read().splitlines(), itertools.count())))

        streams = sorted(streams, key=lambda a: (sort_helper.get(a[1], 9999), a[0]))
    else:
        streams = sorted(streams)

    for package_name, module, package_version, spec_version, _ in streams:
        sys.stdout.write(package_name)
        if options.spec:
            sys.stdout.write('/SPECS/%s.spec' % package_name)
        if options.upstream:
            sys.stdout.write("\t%s" % module)
        if options.show_version:
            sys.stdout.write("\t%s\t%s" % (spec_version, package_version))
        print()

def cmd_check_version(_):
    """Check for version mismatches between what's available in the spec
    version and as a source rpm.

    Example usage: locally check for failed builds"""


    streams = join_streams(show_version=True)

    for package_name, _, package_version, spec_version, _ in streams:
        if package_version == spec_version:
            continue

        sys.stdout.write(package_name)
        sys.stdout.write("\t%s\t%s" % (spec_version, package_version))
        sys.stdout.write("\n")

def cmd_check_latest(options):
    """Check if a package has a newer version upstream"""

    streams = join_streams(show_version=True)

    for package_name, module, package_version, spec_version, _ in streams:
        upgrade = set()
        sys.stdout.write(package_name)
        sys.stdout.write("\t%s\t%s" % (spec_version, package_version))

        safe_max_version = get_safe_max_version(spec_version, module=module)

        versions = Upstream.versions(module)
        if package_version != spec_version and spec_version != 'N/A' and package_version != 'N/A':
            upgrade.add('~')
        if versions:
            latest_version = get_latest_version(versions)
            safe_version = get_latest_version(versions, safe_max_version)

            cmp_latest = version_cmp(latest_version, spec_version)
            if cmp_latest < 0:
                latest_version = 'N/A'
                upgrade.add('l')
            elif cmp_latest > 0:
                upgrade.add('L')

            cmp_safe = version_cmp(safe_version, spec_version)
            if cmp_safe < 0:
                safe_version = 'N/A'
                upgrade.add('s')
            elif cmp_safe > 0:
                upgrade.add('S')

            sys.stdout.write("\t%s" % latest_version)
            sys.stdout.write("\t%s" % safe_version)
            sys.stdout.write("\t%s" % "".join(sorted(upgrade)))

        print()

        if 'S' in upgrade and options.submit and not Downstream.DISTRO:
            cmd = ['mgagnome', 'increase', package_name, safe_version]
            subprocess.call(cmd, cwd=Downstream.package(package_name).path)

def cmd_patches(options):
    """List files with extension .patch or .diff as found in the source rpms"""
    root = Downstream.PKGROOT.expanduser()

    for package_name, module, _, _, downstream_files in sorted(join_streams()):
        for filename in downstream_files:
            if '.patch' in filename or '.diff' in filename:

                this_patch = Patch(root.joinpath(package_name, 'SOURCES', filename), show_path=options.path)
                print("\t".join((module, package_name, str(this_patch))))

def cmd_check_prep(options):
    """Check if the %prep stage can be ran sucessfully

    This runs the %prep stage without dependencies"""
    package = Downstream.package(options.package)
    package.spec.check_and_update_patches()

def _cmd_clean_spec_multi(args):
    options, package_name = args

    print(package_name)
    package = Downstream.package(package_name)
    cwd = package.path

    if not package.ensure_checkout():
        return False

    spec = package.spec

    try:
        if not spec.ensure_no_local_changes(options.force):
            return False
    except subprocess.CalledProcessError:
        # Package was probably not checked out or something
        print("ERROR: cannot clean spec file for %s" % package_name, file=sys.stderr)
        return False

    made_changes = False

    # Convert perl- and -devel buildrequires into perl() and pkgconfig() requires
    if options.convert_br:
        if spec.convert_buildrequires(explain_changes=not options.doit):
            made_changes = True

    # Clean the spec file
    if spec.clean_spec():
        made_changes = True

    # If we made it this far, checkin the changes
    if made_changes:
        if options.doit:
            package.checkin(spec.changes, cwd=cwd)
        else:
            # show the diff and undo all changes
            print(spec.changes)
            spec.ensure_no_local_changes(force=True)

    return made_changes

def _cmd_check_spec_multi(args):
    _, package_name = args
    package = Downstream.package(package_name)

    if not package.spec_path.exists():
        return False

    spec = package.spec
    try:
        spec.patches
    except SpecFileError:
        print('ERROR: Broken spec file for package %s' % package_name, file=sys.stderr)
        return False

    return True


def cmd_check_spec(options):
    """Check if the spec files can be parsed by rpm"""
    if options.all:
        packages = Downstream().packages
    else:
        packages = options.package if options.package else (l[0] for l in join_streams())


    if options.debug:
        for package_name in packages:
            _cmd_check_spec_multi((options, package_name))
    else:
        workers = os.cpu_count() or 4
        with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
            executor.map(_cmd_check_spec_multi, ((options, package_name) for package_name in packages))



def cmd_clean_spec(options):
    """Clean spec files

    Updates spec files to remove known deprecated statements from spec files

    Can optionally also change the BuildRequires"""

    if options.all:
        packages = Downstream().packages
    else:
        packages = options.package if options.package else (l[0] for l in join_streams())


    if options.debug:
        for package_name in packages:
            _cmd_clean_spec_multi((options, package_name))
    else:
        workers = os.cpu_count() or 4
        # Hack: warm alternative provides cache
        if options.convert_br:
            Downstream.alternative_provides('XXXX')
        with concurrent.futures.ProcessPoolExecutor(max_workers=workers) as executor:
            executor.map(_cmd_clean_spec_multi, ((options, package_name) for package_name in packages))

def cmd_new_release(options):
    """Increase the release of a package (source rpm) and submit that to the buildsystem

    Also called a rebuild"""

    success = True
    for pkg in options.package:
        # Determine the package name
        if options.upstream:
            try:
                package_name = Downstream().get_downstream_from_upstream(pkg, options.version)[0]
            except ValueError as exc:
                print("ERROR: %s" % exc, file=sys.stderr)
                success = False
                continue
        else:
            package_name = pkg

        package = Downstream.package(package_name)
        cwd = package.path

        # Checkout package to ensure the checkout reflects the latest changes
        try:
            package.checkout()
        except subprocess.CalledProcessError:
            subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd)
            success = False
            continue

        # SpecFile class handles the actual version+release change
        # XXX - module should reflect upstream name, this gives it the package name
        spec = package.spec
        cur_release = spec.release
        try:
            new_release = int(cur_release)+1
        except ValueError:
            print("ERROR: Cannot increase the release for package %s" % package_name, file=sys.stderr)
            success = False
            continue
        cur_version = spec.version
        print("%s-%s => %s-%s" % (cur_version, cur_release, cur_version, new_release))

        # XXX - Duplicate check as should not revert changes if specfile has already been changed
        if not spec.ensure_no_local_changes(options.force):
            success = False
            continue

        if not spec.update_release(new_release, options.reason, force=options.force):
            subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd, stdout=subprocess.DEVNULL)
            success = False
            continue

        try:
            # If we made it this far, checkin the changes
            package.checkin(spec.changes, cwd=cwd)

            # Submit is optional
            if options.submit:
                package.submit()
        except subprocess.CalledProcessError:
            success = False
            continue

    if not success:
        sys.exit(1)

def cmd_package_new_version(options):
    """Increase the version number of a package (source rpm)

    Can optionally submit the package to the buildsystem."""

    # Determine the package name
    if options.upstream:
        try:
            package_name = Downstream().get_downstream_from_upstream(options.package, options.version)[0]
        except ValueError as exc:
            print("ERROR: %s" % exc, file=sys.stderr)
            sys.exit(1)
    else:
        package_name = options.package

    package = Downstream.package(package_name)

    # Directories packages are located in
    cwd = package.path

    # Checkout package to ensure the checkout reflects the latest changes
    try:
        package.checkout()
    except subprocess.CalledProcessError:
        subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd)
        sys.exit(1)

    # SpecFile class handles the actual version+release change
    # XXX - module should reflect upstream name, this gives it the package name
    spec = package.spec
    print("%s => %s" % (spec.version, options.version))

    # XXX - Duplicate check as should not revert changes if specfile has already been changed
    if not spec.ensure_no_local_changes(options.force):
        sys.exit(1)

    if not spec.update_version(options.version, force=options.force):
        # XXX - hack to automatically revert changes when auto upgrading from ftp release list
        if options.hexdigest is not None:
            subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd, stdout=subprocess.DEVNULL)
        sys.exit(1)

    # Check hash, if given
    if options.hexdigest is not None:
        sources = [name for name, value in spec.sources.items() if '://' in value[0]]
        if not sources:
            print("ERROR: Cannot determine source file (for hash check)!", file=sys.stderr)
            sys.exit(1)

        # If there are multiple sources, try to see if there is a preferred name
        # --> needed for metacity hash check (multiple tarball sources)
        if len(sources) > 1:
            preferred_name = '%s-%s.tar.xz' % (package_name, options.version)
            if preferred_name in sources:
                sources = [preferred_name]

        for filename in sources:
            path = cwd.joinpath("SOURCES", filename)
            if not _is_valid_hash(path, options.algo, options.hexdigest):
                print("ERROR: Hash file failed check for %s!" % path, file=sys.stderr)
                print("ERROR: Reverting changes!", file=sys.stderr)
                subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd)
                sys.exit(1)

    try:
        # If we made it this far, checkin the changes
        package.checkin(spec.changes, cwd=cwd)

        # Submit is optional
        if options.submit:
            package.submit()

    except subprocess.CalledProcessError:
        sys.exit(1)

def cmd_parse_ftp_release_list(options):
    """Submit packages by parsing the GNOME ftp-release-list email

    """

    def _send_reply_mail(contents, orig_msg, to_addr, packages=None, error=False):
        """Send an reply email"""
        contents.seek(0)
        msg = MIMEText(contents.read(), _charset='utf-8')

        if error:
            # XXX - ugly
            contents.seek(0)
            lastline = contents.read().rstrip().splitlines()[-1]
            # Remove things like "ERROR: " and so on from the last line
            lastline = re.sub(r'^(?:[^ :]+:\s+)+', '', lastline)
            # Remove things like "   - " (youri output from mgarepo submit)
            lastline = re.sub(r'^\s+-\s+', '', lastline)
            subjecterror = " (ERROR: %s)" % lastline if lastline else " (ERROR)"
        else:
            subjecterror = ""

        if packages:
            subject = "%s %s%s" % (", ".join(packages), orig_msg['X-Module-Version'], subjecterror)
        else:
            subject = "Re: %s%s" % (orig_msg['Subject'], subjecterror)

        msg['Subject'] = subject
        msg['To'] = to_addr
        msg["In-Reply-To"] = orig_msg["Message-ID"]
        msg["References"] = orig_msg["Message-ID"]

        # Call sendmail program directly so it doesn't matter if the service is running
        cmd = ['/usr/sbin/sendmail', '-oi', '--']
        cmd.extend([to_addr])
        proc = subprocess.Popen(cmd, stdin=subprocess.PIPE)
        proc.stdin.write(msg.as_bytes())
        proc.stdin.flush()
        proc.stdin.close()
        proc.wait()


    msg = email.message_from_file(sys.stdin)

    if options.mail:
        stdout = tempfile.TemporaryFile(mode='w+t', encoding='utf-8')
        stderr = stdout
    else:
        stdout = sys.stdout
        stderr = sys.stderr

    try:
        module = msg['X-Module-Name']
        version = msg['X-Module-Version']
        hexdigest = msg['X-Module-SHA256-tar.xz']
    except KeyError as exc:
        print("ERROR: %s" % exc, file=stderr)
        if options.mail:
            _send_reply_mail(stdout, msg, options.mail, error=True)

        sys.exit(1)

    try:
        packages = Downstream().get_downstream_from_upstream(module, version)
    except (ValueError, SpecFileError) as exc:
        #print(bytes("ERROR: %s" % str(exc), 'UTF-8'), file=stderr)
        stderr.write("ERROR: %s" % str(exc))
        if options.mail:
            _send_reply_mail(stdout, msg, options.mail, error=True)

        sys.exit(1)

    if options.wait or options.fork:
        # maildrop aborts and will try to deliver after 5min
        # fork to avoid this
        if os.fork() != 0:
            sys.exit(0)

    if options.wait:
        # wait SLEEP_INITIAL after the message was sent
        secs = SLEEP_INITIAL
        parsed_date = email.utils.parsedate_tz(msg['Date'])
        if parsed_date is not None:
            msg_time = email.utils.mktime_tz(parsed_date)
            secs = SLEEP_INITIAL - (time.time() - msg_time)

        if secs > 0:
            time.sleep(secs)

    error = False
    for package_name in packages:
        cmd = ['mgagnome', 'increase', '--hash', hexdigest]
        if options.submit:
            cmd.append('--submit')
        if options.force:
            cmd.append('--force')
        cmd.extend((package_name, version))
        if subprocess.call(cmd, stdout=stdout, stderr=stderr):
            error = True

    if options.mail:
        _send_reply_mail(stdout, msg, options.mail, packages=packages, error=error)

def main():
    """Parse arguments and call main subfunction

    The Subfunctions are the functions whose name starts with cmd_"""


    description = """Mageia GNOME commands."""
    epilog = """Report bugs to Olav Vitters"""
    parser = argparse.ArgumentParser(description=description, epilog=epilog)
    parser.add_argument("-l", "--limit", type=argparse.FileType('r', 0),
                        dest="limit_upstream", metavar="FILE",
                        help="File containing upstream names")
    parser.add_argument("-p", "--root", type=Path, action="store", dest="PKGROOT",
                        help="Package root directory")
    parser.add_argument("-d", "--distro", action="store", dest="distro",
                        help="Distribution release")
    parser.add_argument("--debug", action="store_true", dest="debug",
                        help="Use for debugging")
    parser.set_defaults(
        debug=False, PKGROOT=Downstream.PKGROOT
    )

    # SUBPARSERS
    subparsers = parser.add_subparsers(title='subcommands')
    subparser = subparsers.add_parser('check-latest', help='check for latest version of packages')
    subparser.add_argument("-s", "--submit", action="store_true", dest="submit",
                           help="Increase version for stable upgrades and submit")
    subparser.set_defaults(
        func=cmd_check_latest, submit=False
    )

    subparser = subparsers.add_parser('check-prep', help='check prep phase')
    subparser.add_argument("package", help="Package name")
    subparser.set_defaults(
        func=cmd_check_prep
    )

    subparser = subparsers.add_parser('check-spec', help='check if spec file is ok')
    subparser.add_argument("package", help="Package name", nargs='*')
    subparser.add_argument("-a", "--all", action="store_true", dest="all",
                           help="checkout all Downstream packages")
    subparser.set_defaults(
        func=cmd_check_spec
    )

    subparser = subparsers.add_parser('check-version', help='check if spec version and downstream version match')
    subparser.set_defaults(
        func=cmd_check_version
    )

    subparser = subparsers.add_parser('clean-spec', help='clean specfile')
    subparser.add_argument("package", help="Package name", nargs='*')
    subparser.add_argument("-d", "-s", action="store_true", dest="doit", help="submit the changes")
    subparser.add_argument("-f", "--force", action="store_true")
    subparser.add_argument("-a", "--all", action="store_true", dest="all",
                           help="checkout all Downstream packages")
    subparser.add_argument("--convert", action="store_true", dest="convert_br",
                           help="convert -buildrequirements to perl/pkgconfig if possible")
    subparser.set_defaults(
        func=cmd_clean_spec, doit=False, convert_br=False, all=False, force=False
    )

    subparser = subparsers.add_parser('cleanup', help='cleanup pkg directory')
    subparser.set_defaults(
        func=cmd_cleanup
    )

    subparser = subparsers.add_parser('co', help='checkout all GNOME packages')
    subparser.add_argument("-a", "--all", action="store_true", dest="all",
                           help="checkout all Downstream packages")
    subparser.add_argument("-s", action="store_true", dest="spec_only",
                           help="only checkout SPECS/ directory")
    subparser.add_argument("package", help="Package name", nargs='*')
    subparser.set_defaults(
        func=cmd_checkout, all=False
    )

    subparser = subparsers.add_parser('gnome-release-email',
                                      help='submit packages based on GNOME ftp-release-list email')
    subparser.add_argument("-m", "--mail", help="Email address to send the progress to")
    subparser.add_argument("--fork", action="store_true",
                           help="Fork as quickly as possible")
    subparser.add_argument("-w", "--wait", action="store_true",
                           help="Wait before trying to retrieve the new version")
    subparser.add_argument("-s", "--submit", action="store_true", dest="submit",
                           help="Commit changes and submit")
    subparser.add_argument("-f", "--force", action="store_true",
                           help="Force submission")
    subparser.set_defaults(
        func=cmd_parse_ftp_release_list, force=False, wait=False, fork=False
    )

    subparser = subparsers.add_parser('group-owner', help='list packages by group')
    subparser.add_argument('group', metavar="GROUP", nargs='+')
    subparser.set_defaults(
        func=cmd_group_owner
    )

    subparser = subparsers.add_parser('increase', help='increase version number')
    subparser.add_argument("package", help="Package name")
    subparser.add_argument("version", help="Version number")
    subparser.add_argument("-f", "--force", action="store_true", dest="force",
                           help="Override warnings, just do it")
    subparser.add_argument("-u", "--upstream", action="store_true", dest="upstream",
                           help="Package name reflects the upstream name")
    subparser.add_argument("-s", "--submit", action="store_true", dest="submit",
                           help="Commit changes and submit")
    subparser.add_argument("--no-submit", action="store_false", dest="submit",
                           help="Do not commit changes and submit")
    subparser.add_argument("-a", "--algorithm", choices=hashlib.algorithms_available, dest="algo",
                           help="Hash algorithm")
    subparser.add_argument("--hash", dest="hexdigest",
                           help="Hexdigest of the hash")
    subparser.set_defaults(
        func=cmd_package_new_version, submit=argparse.SUPPRESS, upstream=False,
        hexdigest=None, algo="sha256", force=False
    )

    subparser = subparsers.add_parser('packages', help='list all GNOME packages')
    subparser.add_argument("-m", "--m", action="store_true", dest="upstream",
                           help="Show upstream module")
    subparser.add_argument("--version", action="store_true", dest="show_version",
                           help="Show version numbers")
    subparser.add_argument("--diff", action="store_true", dest="diff",
                           help="Only show packages with different version")
    subparser.add_argument("--sort", type=argparse.FileType('r', 0),
                           dest="sort", metavar="FILE",
                           help="Sort packages according to order in given FILE")
    subparser.add_argument("--spec", action="store_true", dest="spec",
                           help="Give spec file location")
    subparser.set_defaults(
        func=cmd_ls, upstream=False, show_version=False, diff=False
    )

    subparser = subparsers.add_parser('patches', help='list all GNOME patches')
    subparser.add_argument("-p", "--path", action="store_true", dest="path",
                           help="Show full path to patch")
    subparser.set_defaults(
        func=cmd_patches, path=False
    )

    subparser = subparsers.add_parser('rebuild', help='increase release')
    subparser.add_argument("package", help="Package name", nargs="*")
    subparser.add_argument("-m", "--reason", dest="reason", required=True, help="Reason for the rebuild")
    subparser.add_argument("-f", "--force", action="store_true", dest="force",
                           help="Override warnings, just do it")
    subparser.add_argument("-u", "--upstream", action="store_true", dest="upstream",
                           help="Package name reflects the upstream name")
    subparser.add_argument("-s", "--submit", action="store_true", dest="submit",
                           help="Commit changes and submit")
    subparser.add_argument("--no-submit", action="store_false", dest="submit",
                           help="Do not commit changes and submit")
    subparser.set_defaults(
        func=cmd_new_release, submit=argparse.SUPPRESS, upstream=False,
        force=False
    )

    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(2)

    options = parser.parse_args()
    if options.limit_upstream:
        Upstream.limit = set(options.limit_upstream.read().strip("\n").splitlines())

    if not hasattr(options, 'submit'):
        options.submit = not options.distro

    Downstream.PKGROOT = options.PKGROOT
    if options.distro:
        Downstream.PKGROOT = options.PKGROOT.joinpath(options.distro)
        Downstream.MEDIA = "Core Release {0} Source,Core {0} Updates Source," \
                "Core {0} Updates Testing Source".format(options.distro)
        Downstream.DISTRO = options.distro

    log_format = '%(levelname): %(message)s'
    logging.basicConfig(format=log_format)
    log = logging.getLogger(None)
    if options.debug:
        log.setLevel(logging.DEBUG)
    else:
        log.setLevel(logging.INFO)


    try:
        options.func(options)
    except KeyboardInterrupt:
        print('Interrupted')
        sys.exit(1)
    except EOFError:
        print('EOF')
        sys.exit(1)
    except IOError as exc:
        if exc.errno != errno.EPIPE:
            raise
        sys.exit(0)

if __name__ == "__main__":
    os.environ['PYTHONUNBUFFERED'] = '1'
    main()