#!/usr/bin/python3 -u """Mageia GNOME commands.""" # A lot of the code comes from ftpadmin, see # https://git.gnome.org/browse/sysadmin-bin/tree/ftpadmin # Written by Olav Vitters from functools import wraps, lru_cache # basic modules: import os import os.path import sys import re import subprocess # command line parsing, error handling: import argparse import errno # overwriting files by moving them (safer): import tempfile import shutil # getting links from HTML document: from html.parser import HTMLParser import urllib.request import urllib.error import urllib.parse # for checking hashes import hashlib # for parsing ftp-release-list emails import email from email.mime.text import MIMEText # to be able to sleep for a while import time # packages --sort import itertools # automatically dropping merged patches import shlex import concurrent.futures # for merging comments in order import collections # for debugging output import pprint # check-latest import requests # version comparison: import rpm SLEEP_INITIAL = 180 SLEEP_REPEAT = 30 SLEEP_TIMES = 30 re_majmin = re.compile(r'^([0-9]+\.[0-9]+).*') re_version = re.compile(r'([-.]|\d+|[^-.\d]+)') def retry(exceptions, tries=4, delay=3, backoff=2, logger=None): """Retry calling the decorated function using an exponential backoff. http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/ original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry :param exceptions: the exception to check. may be a tuple of exceptions to check :type exceptions: Exception or tuple :param tries: number of times to try (not retry) before giving up :type tries: int :param delay: initial delay between retries in seconds :type delay: int :param backoff: backoff multiplier e.g. value of 2 will double the delay each retry :type backoff: int :param logger: logger to use. If None, print :type logger: logging.Logger instance """ def deco_retry(func): @wraps(func) def func_retry(*args, **kwargs): mtries, mdelay = tries, delay while mtries > 1: try: return func(*args, **kwargs) except exceptions as exc: msg = "%s, Retrying in %d seconds..." % (str(exc), mdelay) if logger: logger.warning(msg) else: print(msg) time.sleep(mdelay) mtries -= 1 mdelay *= backoff return func(*args, **kwargs) return func_retry # true decorator return deco_retry def version_cmp(version_a, version_b): """Compares two versions Returns -1 if a < b 0 if a == b 1 if a > b """ return rpm.labelCompare(('1', version_a, '1'), ('1', version_b, '1')) def get_latest_version(versions, max_version=None): """Gets the latest version number if max_version is specified, gets the latest version number before max_version""" latest = None for version in versions: if (latest is None or version_cmp(version, latest) > 0) \ and (max_version is None or version_cmp(version, max_version) < 0): latest = version return latest MAJOR_VERSIONS = { # NAMES MUST BE IN LOWERCASE! 'networkmanager': set(('0.9',)), 'networkmanager-applet': set(('0.9',)), 'networkmanager-openconnect': set(('0.9',)), 'networkmanager-openvpn': set(('0.9',)), 'networkmanager-pptp': set(('0.9',)), 'networkmanager-vpnc': set(('0.9',)) } def get_majmin(version, module=None): nrs = version.split('.') if module and module.lower() in MAJOR_VERSIONS: module_versions = [version.split(".") for version in MAJOR_VERSIONS[module.lower()]] nrstest = nrs[:] while len(nrstest) >= 2: if nrstest in module_versions: return (".".join(nrs[:len(nrstest)]), nrs[len(nrstest)]) nrstest.pop() return (nrs[0], nrs[1]) def get_safe_max_version(version, module=None): match = re_majmin.match(version) if version is None or not match: return None majmin = get_majmin(match.group(1), module) min_nr = int(majmin[1]) # Add 2 for stable releases, 1 for unstable min_nr += 1 if min_nr % 2 == 0 else 2 return "%s.%d" % (majmin[0], min_nr) def judge_version_increase(version_old, version_new, module=None): """Judge quality of version increase: Returns a tuple containing judgement and message Judgement: Less than 0: Error 0 to 4: Better not 5+: Ok""" versions = (version_old, version_new) # First do a basic version comparison to ensure version_new is actually newer compare = version_cmp(version_new, version_old) if compare == 0: # 1.0.0 -> 1.0.1 return (-2, "Already at version %s!" % (version_old)) if compare != 1: # 1.0.1 -> 1.0.0 return (-3, "Version %s is older than current version %s!" % (version_new, version_old)) # Version is newer, but we don't want to see if it follows the GNOME versioning scheme majmins = [get_majmin(ver, module) for ver in versions if re_majmin.match(ver) is not None] if len(majmins) == 1: return (-1, "Version number scheme changes: %s" % (", ".join(versions))) if len(majmins) == 0: return (0, "Unsupported version numbers: %s" % (", ".join(versions))) # Follows GNOME versioning scheme # Meaning: x.y.z # x = major # y = minor : even if stable # z = micro # Major+minor the same? Then go ahead and upgrade! if majmins[0] == majmins[1]: # Majmin of both versions are the same, looks good! # 1.1.x -> 1.1.x or 1.0.x -> 1.0.x return (10, None) # Check/ensure major version number is the same if majmins[0][0] != majmins[1][0]: # 1.0.x -> 2.0.x return (1, "Major version number increase") # Minor indicates stable/unstable devstate = (int(majmins[0][1]) % 2 == 0, int(majmins[1][1]) % 2 == 0) # Upgrading to unstable is weird if not devstate[1]: if devstate[0]: # 1.2.x -> 1.3.x return (1, "Stable to unstable increase") # 1.3.x -> 1.5.x return (3, "Unstable to unstable version increase") # Unstable => stable is always ok if not devstate[0]: # 1.1.x -> 1.2.x return (5, "Unstable to stable") # Can only be increase of minors from one stable to the next # 1.0.x -> 1.2.x return (4, "Stable version increase") def line_input(file): for line in file: if line[-1] == '\n': yield line[:-1] else: yield line def distinct(iterable, keyfunc=None): seen = set() for item in iterable: key = item if keyfunc is None else keyfunc(item) if key not in seen: seen.add(key) yield item def call_editor(filename): """Return a sequence of possible editor binaries for the current platform""" editors = [] for varname in 'VISUAL', 'EDITOR': if varname in os.environ: editors.append(os.environ[varname]) editors.extend(('/usr/bin/editor', 'vi', 'pico', 'nano', 'joe')) for editor in editors: try: ret = subprocess.call([editor, filename]) except OSError as exc: if exc.errno == 2: continue raise if ret == 127: continue return True class URLLister(HTMLParser): def reset(self): HTMLParser.reset(self) self.urls = [] def handle_starttag(self, tag, attrs): if tag == 'a': href = [v for k, v in attrs if k == 'href'] if href: self.urls.extend(href) def is_valid_hash(path, algo, hexdigest): if algo not in hashlib.algorithms_available: raise ValueError("Unknown hash algorithm: %s" % algo) local_hash = getattr(hashlib, algo)() with open(path, 'rb') as fp: data = fp.read(32768) while data: local_hash.update(data) data = fp.read(32768) return local_hash.hexdigest() == hexdigest re_clean_1 = re.compile(r'\[[^]()]+\]$') def clean_pkgconfig_prov(prov): prov = re_clean_1.sub('', prov) return prov class SpecFileError(Exception): """Used for problems in the spec file""" class SpecFile(): re_update_version = re.compile(r'^(?P
Version[ \t]*:\s*)(?P.+)(?P\s*)$', re.MULTILINE + re.IGNORECASE)
    re_update_release = re.compile(r'^(?P
Release[ \t]*:\s*)(?P%mkrel [0-9.]+)(?P\s*)$', re.MULTILINE + re.IGNORECASE)
    re_update_patch = re.compile(r'^(?P
Patch0*?)(?P[0-9]*)(?P[ \t]*:\s*)(?P.+)(?P\s*)\n', re.MULTILINE + re.IGNORECASE)

    re_br_part = re.compile(r'(?P
[^\s%{},<>=][^\s%{},<>=]*)\b(?P\s*(?:(?P=|>=|<=|=<|=>|>|<)\s*(?P[^\s%{},]+|\%\{[^\s{%}]+\}|\%[^\s%{},]+)\b)?)') #re_update_br = re.compile(r'^(?P
BuildRequires:\s*)(?P
[^\s%{},]+?)(?P\s*(?:(?:[<>]=?|=)\s+[^\s%{},]+?)?\s*\n)', re.MULTILINE + re.IGNORECASE) re_update_br = re.compile(r'^(?P
BuildRequires[ \t]*:\s*)(?P
[^\s%{},]+?)(?P\s*(?:(?:[<>]=?|=)\s+(?:[^\s%{},]+|\%\{[^\s{%}]+\}|\%[^\s%{},]+))?\s*\n)', re.MULTILINE + re.IGNORECASE) # re_update_br_unsplit = re.compile(r'^(?P
BuildRequires:\s*)(?P[^\n,]+,[^\n]*)(?P\s*\n)', re.MULTILINE + re.IGNORECASE)

    re_update_br_fix_operator = re.compile(r'^(?P
BuildRequires[ \t]*:\s*[^\n]*)(?P=<|=>)(?P[^\n]+)\n', re.MULTILINE + re.IGNORECASE)
    re_update_br_unsplit = re.compile(r'^(?P
BuildRequires[ \t]*:\s*)(?P(?:%s,?(?:[ \t\f\v]+|$)){2,})(?P\n)' % (re_br_part.pattern,), re.MULTILINE + re.IGNORECASE)

    def __init__(self, path, module=None):
        self.path = path
        self.cwd = os.path.dirname(path)
        self.module = module
        self._changes = collections.OrderedDict()
        self._should_rebuild = False
        self._changed_spec_file = False

    @property
    def changes(self):
        return ''.join(self._changes.keys()) if len(self._changes) == 1 else "\n".join(("- %s" % change for change in self._changes.keys()))

    @property
    def made_changes(self):
        return self._changed_spec_file

    @property
    def version(self):
        return subprocess.check_output(["rpm", "--define", "_topdir %s" % os.path.join(self.cwd, ".."), "--specfile", self.path, "--queryformat", "%{VERSION}\n"]).decode("utf-8").splitlines()[0]

    @property
    def should_rebuild(self):
        return self._should_rebuild

    @property
    def release(self):
        return subprocess.check_output(["rpm", "--define", "%dist %nil", "--define", "_topdir %s" % os.path.join(self.cwd, ".."), "--specfile", self.path, "--queryformat", "%{RELEASE}\n"]).decode("utf-8").splitlines()[0]

    def _sources_and_patches(self, flag=None):
        os.chdir(self.cwd)
        rpm.delMacro("_topdir")
        rpm.addMacro("_topdir", os.path.join(self.cwd, '..'))
        trans_set = rpm.ts()
        spec = trans_set.parseSpec(self.path)
        try:
            srclist = spec.sources if isinstance(spec.sources, (list, tuple)) \
                            else spec.sources()
        except ValueError as exc:
            # Reraise this into a more specific exception
            raise SpecFileError from exc
        finally:
            # trans_set.parseSpec can affect changing of internal macros, e.g. redefining things like mkrel and so on
            # reload the config to fix this
            rpm.reloadConfig()
        return dict((os.path.basename(name), [name, 0 if no == 2147483647 and flags == 2 else no]) for name, no, flags in srclist if flag is None or flags == flag)

    @property
    def patches(self):
        return self._sources_and_patches(flag=2)

    @property
    def sources(self):
        return self._sources_and_patches(flag=1)

    def clean_spec(self):
        re_rm_buildroot = r'^(?:\s*\[[^\n\]\[]+\][ \t]+\&\&[ \t]+)?(?:rm|\%__rm|\%\{__rm\}) *(?:-rf|-fr|-r) *"?(?:[%$]buildroot|[%$]\{buildroot\}|[%$]\{buildroot\}|\$RPM_BUILDROOT|\$RPM_BUILD_ROOT|\$\{RPM_BUILD_ROOT\}|\$RPM_BUILD_DIR)"?/?[ \t]*\n'
        re_clean_spec = [
            # remove %defattr
            ('remove defattr', None, re.compile(r'(?P^\%files(?:[ \t]+[^\n]*)?\n(?:^\%doc [^\n]+\n)?)^\%defattr\s*\(- *, *root *, *root *(?:, *-)?\)\s*\n', re.MULTILINE + re.IGNORECASE)),
            ('remove cleaning buildroot in install', None, re.compile(r'(?P^\%install(?:[ \t]+[^\n]*)?\n)' + re_rm_buildroot + r'\n?', re.MULTILINE + re.IGNORECASE)),
            ('remove clean section', None, re.compile(r'^\%clean[ \t]*\n(?:' + re_rm_buildroot + r')?\s*(?P(?:^#[^%\n]+\n)*^(?:\%files|\%post|\%pre|\%trigger|\%install|\%package|\%check|\%_font_pkg|$(?!.|\n)))', re.MULTILINE + re.IGNORECASE)),
            ('remove buildroot definition', None, re.compile(r'^BuildRoot[ \t]*:[^\n]+\n', re.MULTILINE + re.IGNORECASE)),
            ('remove unneeded setup option', None, re.compile(r'^(?P\%setup -q)(?: -n|n) (?:\%name|\%\{name\})-(?:\%version|\%\{version\})(?P\n)', re.MULTILINE + re.IGNORECASE)),
            ('https for download.gnome.org', r'\ghttps://\g', re.compile(r'^(?PSource[0-9]*[ \t]*:[^\n]+)http://(?Pdownload.gnome.org/[^\n]+\n)', re.MULTILINE + re.IGNORECASE)),
            ('download.gnome.org instead of ftp.gnome.org', r'\ghttps://download.gnome.org\g', re.compile(r'^(?PSource[0-9]*[ \t]*:[^\n]+)(?:ftp|http|https)://ftp.gnome.org/pub/GNOME(?P/[^\n]+\n)', re.MULTILINE + re.IGNORECASE)),
            ('restrict what libraries are matched with major numbers', r'\g{,.*}', re.compile(r'^(?P%{_libdir}[^\n]+})\*$', re.MULTILINE)),
            ('keep library matching using two lines', r'\g\n\g.*', re.compile(r'^(?P%{_libdir}[^\n]+})$\n(?P=keeppre)\{,\.\*\}$', re.MULTILINE)),
            ('make use of autopatch', r'%autopatch -p1', re.compile(r'^%apply_patches$', re.MULTILINE)),
            ('change configure2_5x macro to configure', r'%configure', re.compile(r'^%configure2_5x\b', re.MULTILINE)),
            ('change make macro to make_build', r'%make_build', re.compile(r'^%make\b', re.MULTILINE), True),
            ('change find_lang --with-help into --with-gnome', r'\g --with-gnome\g', re.compile(r'^(?P\s*\%find_lang[^\\\n]+) --with-help(?P[^\\\n]*\n)', re.MULTILINE + re.IGNORECASE)),
            ('change find_lang remove duplicate with_gnome', None, re.compile(r'^(?P\%find_lang[^\\\n]+ --with-gnome) --with-gnome(?P[^\\\n]*\n)', re.MULTILINE + re.IGNORECASE)),
            # Use new Python macros
            ('use new Python macros', r'%py3_build', re.compile(r'^%{__python3} setup.py build$', re.MULTILINE)),

            ('use new Python macros', r'%py3_install', re.compile(r'^%{__python3} setup.py install$', re.MULTILINE)),

            ('use new Python macros', r'%{python3_version}', re.compile(r'%{py3ver}', re.MULTILINE)),
        ]
        re_convert_br = [
            ('remove py_requires', ('python',), re.compile(r'^\%(?:py_requires|\{py_requires\})[ \t]*\n', re.MULTILINE)),
            ('remove py_requires -d', ('python', 'python-devel'), re.compile(r'^\%(?:py_requires[ \t]+-d|\{py_requires[ \t]+-d\})[ \t]*\n', re.MULTILINE)),
        ]

        made_changes = False
        with open(self.path, "r", encoding="utf-8") as f:
            data = f.read()
            for reason, change_to, regexp, *extra in re_clean_spec:
                if extra:
                    should_rebuild = extra[0]
                else:
                    should_rebuild = False


                if change_to is None:
                    change_to = ""
                    if 'keeppre' in regexp.groupindex:
                        change_to += r'\g'
                    if 'keeppost' in regexp.groupindex:
                        change_to += r'\g'
                data, nr = regexp.subn(change_to, data)
                if nr:
                    made_changes = True
                    self._changes['SILENT %s' % reason] = True
                    if should_rebuild:
                        self._should_rebuild = True

            # Convert %py_requires and %py_requires -d
            #  - first figure out how a buildrequire is usually defined
            match = self.re_update_br.search(data)
            br_pre = match.group('pre') if match and match.group('pre') else "BuildRequires:\t"

            for reason, new_brs, regexp in re_convert_br:
                match = regexp.search(data)
                if match:
                    # Don't add multiple buildrequires
                    change_to = ""
                    brs_in_file = set()
                    for match2 in self.re_update_br.finditer(data):
                        if match2.group('br') in new_brs:
                            brs_in_file.add(match2.group('br'))

                    for br in set(new_brs) - brs_in_file:
                        change_to += ''.join((br_pre, br, "\n"))
                    data, nr = regexp.subn(change_to, data)
                    if nr:
                        made_changes = True
                        self._changes['SILENT %s' % reason] = True

            # Convert:
            #   %define name SOMETHING
            #   name: %{name}
            # Into
            #   name: SOMETHING
            converted_defines = []
            for search_for in ('name', 'version', 'release', 'summary', 'Summary', 'group'):
                search_for_ignore_case = ''.join(("[%s%s]" % (letter, letter.swapcase()) for letter in search_for))
                re_spec = re.compile(r'^(?P' + re.escape(search_for_ignore_case) + r'[ \t]*:[ \t]*)(?:\%' + re.escape(search_for) + r'|\%\{' + re.escape(search_for) + r'\})(?P[ \t]*\n)', re.MULTILINE)
                re_variable = re.compile(r'^(?P\%define[ \t]+'+ re.escape(search_for) + r'[ \t]+(?P[^\n]+?))(?P[ \t]*\n)', re.MULTILINE)

                match = re_variable.search(data)
                if match and match.group('definition') and len(re_variable.findall(data)) == 1:
                    match2 = re_spec.search(data)
                    if match2:
                        data, nr = re_spec.subn(r'\g' + match.group('definition').replace('\\', '\\\\') + r'\g', data)
                        if nr:
                            made_changes = True
                            data, nr = re_variable.subn('', data)
                            converted_defines.append(search_for)

            if made_changes and converted_defines:
                data = data.lstrip()
                self._changes['SILENT remove variable definition(s) %s' % ", ".join(converted_defines)] = True

            made_changes, data = self._clean_spec_patches(made_changes, data)

            # Overwrite file with new version number
            if made_changes:
                self._changed_spec_file = True
                write_file(self.path, data)

        return made_changes

    def _clean_spec_patches(self, made_changes, data):
        re_autopatch = re.compile(r'^[ \t]*\%autopatch(?:[ \t]+-p(?P[0-9]+))?$', re.MULTILINE)

        re_patch_header = re.compile('^Patch(?P[0-9]*)[ \t]*:[ \t]*(?P[^\n]+)\n', re.MULTILINE + re.IGNORECASE)
        re_patch_any = re.compile(r'^[ \t]*\%patch(?P[0-9]*)', re.MULTILINE)
        re_patch_valid = re.compile(r'^[ \t+]*\%patch(?P[0-9]*)(?:[ \t]+-p(?P[0-9]+))?(?:[ \t]+-b[ \t]+\S+)?$\n?', re.MULTILINE)
        re_prep_patches = re.compile(r'^\%setup[^\n]+$(?:' + re_patch_valid.pattern + r'|^#[^%\n]+\n|^[ \t]*(?:%{_bindir}/|%_bindir)?autoreconf[ \t][^\n]+$|\s)+\n\%build', re.MULTILINE)

        give_patchnr = lambda match: (match.group('nr') if len(match.group('nr')) == 1 else match.group('nr').lstrip('0')) if match.group('nr') else "0"

        # Make use of %apply_patches

        if re_autopatch.search(data):
            # Ignore if using %autopatch
            return made_changes, data

        patches = self.patches
        if not patches:
            return made_changes, data

        if len(patches) > 5:
            print("NOTICE: More than 5 patches, skipping package", file=sys.stderr)
            return made_changes, data

        if self.uses_apply_patches:
            return made_changes, data

        # XXX -- apparently this is really inefficient with e.g. apache
        match2 = re_prep_patches.search(data)
        patch_nrs_header = set((give_patchnr(match) for match in re_patch_header.finditer(data)))
        patch_nrs_any = set((give_patchnr(match) for match in re_patch_any.finditer(data)))
        patch_nrs_valid = set((give_patchnr(match) for match in re_patch_valid.finditer(match2.group(0)))) if match2 else set()

        if not patch_nrs_header:
            # XXX -- weird, self.patches should've returned 0 already
            return made_changes, data

        if not patch_nrs_header == patch_nrs_any == patch_nrs_valid:
            print("NOTICE: Unable to automatically convert %s patches into %%autopatch (header/patch/valid: %s, %s, %s)" % (self.module, len(patch_nrs_header), len(patch_nrs_any), len(patch_nrs_valid)), file=sys.stderr)
            return made_changes, data

        patch_flags = set((0 if match.group('strip') is None else match.group('strip') for match in re_patch_valid.finditer(match2.group(0))))

        if len(patch_flags) != 1:
            print("NOTICE: Unable to automatically convert patches into as different -p / strip levels used", file=sys.stderr)
            return made_changes, data

        # Whoot, we can convert!!
        change_to = "%%autopatch -p%s\n" % list(patch_flags)[0]
        prep, subs1 = re_patch_valid.subn(change_to.replace('\\', '\\\\'), match2.group(0), count=1)
        prep, subs2 = re_patch_valid.subn('', prep)
        if len(patch_nrs_valid) != subs1 + subs2:
            print("WARNING: Couldn't replace patches?!? Likely error in program logic", file=sys.stderr)
            return made_changes, data

        # First check if patches currently apply
        if not self.check_and_update_patches(check_only=True):
            return made_changes, data

        try:
            change_to = data.replace(match2.group(0), prep, 1)
            self._changed_spec_file = True
            write_file(self.path, change_to)

            # Validate patches still apply
            if self.check_and_update_patches(check_only=True):
                data = change_to
                self._changes['SILENT use autopatch'] = True
                made_changes = True
        finally:
            if not made_changes:
                write_file(self.path, data)

        return made_changes, data

    @property
    def buildrequires(self):
        rpm.delMacro("_topdir")
        rpm.addMacro("_topdir", os.path.join(self.cwd, '..'))
        ts = rpm.ts()
        try:
            spec = ts.parseSpec(self.path)
        except ValueError as exc:
            # Reraise this into a more specific exception
            raise SpecFileError from exc
        finally:
            # ts.parseSpec can affect changing of internal macros, e.g. redefining things like mkrel and so on
            # reload the config to fix this
            rpm.reloadConfig()

        requires = spec.sourceHeader[rpm.RPMTAG_REQUIRES]
        require_flags = spec.sourceHeader[rpm.RPMTAG_REQUIREFLAGS]
        require_ver = spec.sourceHeader[rpm.RPMTAG_REQUIREVERSION]

        br = {}
        for req, flag, ver in itertools.zip_longest(requires, require_flags, require_ver):
            # bitmask other than 15 means the require is (probably?) a require for a trigger or script
            if flag & 15 != flag:
                continue

            ver_cmp = ""
            if flag & rpm.RPMSENSE_LESS:
                ver_cmp += '<'
            if flag & rpm.RPMSENSE_GREATER:
                ver_cmp += '>'
            if flag & rpm.RPMSENSE_EQUAL:
                ver_cmp += '='

            br[req] = (ver_cmp, ver)
        return br

    @property
    def uses_apply_patches(self):
        return subprocess.call(['grep', '-Eq', '^%(apply_patches|autopatch|autosetup)', '--', self.path]) == 0

    def _revert_changes(self):
        """Revert uncommited changes made to spec file"""
        self._changes.clear()
        subprocess.check_call(["svn", "revert", "-R", os.path.join(self.cwd, '..')])

    def remove_patch(self, patchnr, info_patchname=None):
        """Remove a patch from the spec file"""
        # Uses string as well as numeric comparisons to determine which patch to remove
        nrs = [str(patchnr), patchnr]
        if patchnr == 0:
            nrs.append('')

        with open(self.path, "r", encoding="utf-8") as f:
            data = f.read()

            data, nr = self.re_update_patch.subn(lambda match: '' if match.group('nr') in nrs or (match.group('nr').isdigit() and int(match.group('nr')) in nrs) else match.group(0), data)

            if not nr:
                print("ERROR: Could not remove patch nr %s!" % patchnr, file=sys.stderr)
                return False

            # Overwrite file with new version number
            self._changed_spec_file = True
            write_file(self.path, data)

            if info_patchname:
                self._changes['dropped merged patch %s' % info_patchname] = True
            else:
                self._changes['dropped merged patch %s' % patchnr] = True

        subprocess.check_call(['mgarepo', 'sync'], cwd=self.cwd)
        return True

    def check_and_update_patches(self, check_only=False):
        """Check if patches still apply

        Remove any merged patches"""

        LOGLINES = 15

        initial_patches = self.patches
        patches = initial_patches
        uses_apply_patches = self.uses_apply_patches if patches else False

        while True:
            try:
                # Check patches still apply
                subprocess.check_call(['bm', '-p', '--nodeps'], cwd=self.cwd)
            except subprocess.CalledProcessError:
                logfile = os.path.join(self.cwd, 'log.%s' % os.path.splitext(os.path.basename(self.path))[0])

                failed_patch = None
                cmd = None
                cmd_output = []
                cmd_before = (None, None)
                # Determine the last command that failed
                if os.path.exists(logfile):
                    print(logfile)
                    with open(logfile, "r", encoding="utf-8") as f:
                        for line in line_input(f):
                            if line.startswith('+ '):
                                cmd_before = (cmd, cmd_before)
                                cmd = line[2:]
                                cmd_output = []
                            else:
                                cmd_output.append(line)

                cmd_parsed = shlex.split(cmd) if cmd else []
                cmd_before_parsed = shlex.split(cmd_before[0]) if cmd_before[0] else []

                if not check_only and uses_apply_patches and patches and cmd_parsed:
                    if os.path.basename(cmd_parsed[0]) in ('patch', 'cat'):
                        if os.path.exists(cmd_parsed[-1]):
                            failed_patch = os.path.basename(cmd_parsed[-1])
                        elif cmd_parsed[-1].startswith('-') and os.path.exists(cmd_before_parsed[-1]):
                            # for %autopatch as well as %patch
                            #+ /usr/bin/cat /home/src/pkgs/gnome-getting-started-docs/SOURCES/gs-browse-web-firefox.page.patch
                            #+ /usr/bin/patch -p1 -s
                            failed_patch = os.path.basename(cmd_before_parsed[-1])

                    # Patch is merged if there is at least one 'ignored' line and no 'FAILED' line anywhere
                    has_ignored = False
                    has_reversed = False
                    has_failed = False
                    for line in cmd_output:
                        if 'FAILED' in line or 'File to patch:' in line:
                            has_failed = True
                            break

                        if 'ignored' in line:
                            has_ignored = True
                        elif 'saving rejects to file' in line:
                            has_failed = True
                            break

                        if 'Reversed (or previously applied) patch detected' in line:
                            has_reversed = True

                    if has_ignored and has_reversed and not has_failed:
                        # If patch was merged, drop it from spec file and rety
                        print("INFO: Patch has been merged: %s" % failed_patch, file=sys.stdout)
                        if failed_patch in patches:
                            if self.remove_patch(patches[failed_patch][1], failed_patch):
                                # try again
                                patches = self.patches
                                continue

                        print("ERROR: Problem removing merged patch: %s" % failed_patch, file=sys.stderr)
                        return False

                if cmd and len(cmd_output) > LOGLINES:
                    print('+ %s' % cmd, file=sys.stdout)
                    print("\n".join(cmd_output), file=sys.stdout)
                elif os.path.exists(logfile):
                    subprocess.call(['tail', '-n', str(LOGLINES), logfile])

                if failed_patch:
                    print("ERROR: Problem applying patch: %s" % failed_patch, file=sys.stderr)
                elif cmd:
                    print("ERROR: Problem in %%prep phase command: %s" % cmd, file=sys.stderr)
                elif patches:
                    print("ERROR: Problem applying patches and/or %prep phase", file=sys.stderr)
                else:
                    print("ERROR: Problem in %prep phase", file=sys.stderr)
                return False
            finally:
                # bm command doesn't cleanup after itself and the option it has to do that removes spec file + sources!
                buildpath = os.path.join(self.cwd, '..', 'BUILD', '%s-%s' % (self.module, self.version))
                if os.path.exists(buildpath):
                    shutil.rmtree(buildpath, ignore_errors=True)

            return True

    def update_br(self, changes, change_description='update buildrequirements'):
        """Update buildrequirement"""

        # XXX - doesn't handle buildrequires with version numbers :-(
        made_changes = False

        with open(self.path, "r", encoding="utf-8") as f:
            data = f.read()

            # Change any "," in buildrequires into multiple lines
            data, nr = self.re_update_br_unsplit.subn(lambda match: ''.join((''.join((match.group('pre'), match2.group(0), match.group('unsplitpost'))) for match2 in self.re_br_part.finditer(match.group('unsplit')) if match.group(0).strip() != '')), data)
            if nr:
                made_changes = True
                self._changes['SILENT one line per buildrequirement'] = True

            # Change =< and => operators into <= and >=
            data, nr = self.re_update_br_fix_operator.subn(lambda match: match.group(0).replace('=>', '>=').replace('=<', '<') if self.re_update_br.match(match.group(0).replace('=>', '>=').replace('=<', '<')) else match.group(0), data)
            if nr:
                made_changes = True
                self._changes['SILENT fix operator in buildrequires'] = True

            # Now update buildrequires if any
            data, nr = self.re_update_br.subn(lambda match: ''.join((match.group('pre'), changes[match.group('br')], match.group('post'))) if match.group('br') in changes else match.group(0), data)

            if nr:
                made_changes = True
                self._changes['SILENT %s' % change_description] = True
            elif len(changes) != 0:
                print("ERROR: Could not update buildrequires for %s" % self.module, file=sys.stderr)
                print(changes)
                return False

            # Overwrite file with updated buildrequire
            self._changed_spec_file = True
            write_file(self.path, data)

        return made_changes


    MAX_JUDGEMENT = 5

    def ensure_no_local_changes(self, force=False):
        # XXX - os.path.join is hackish
        svn_diff_output = subprocess.check_output(["svn", "diff", os.path.normpath(os.path.join(self.cwd, '..'))]).decode('utf-8')
        if svn_diff_output != '':
            print(svn_diff_output)
            print("ERROR: Package has uncommitted changes!", file=sys.stderr)
            if not force:
                return False

            # Forcing package submission: revert changes
            try:
                print("WARNING: Force used; reverting svn changes", file=sys.stderr)
                self._revert_changes()
            except subprocess.CalledProcessError:
                return False

        return True

    def _check_can_update(self, force):
        data = None
        if not self.ensure_no_local_changes(force):
            return None

        with open(self.path, "r", encoding="utf-8") as f:
            data = f.read()

            if data.count("%subrel") != 0:
                print("ERROR: %subrel found; don't know what to do!", file=sys.stderr)
                return None

            if data.count("%mkrel") != 1:
                print("ERROR: Multiple %mkrel found; don't know what to do!", file=sys.stderr)
                return None

        return data

    def update_release(self, release, reason, force=False):
        """Update release (usually for rebuilds)"""
        data = self._check_can_update(force)
        if data is None:
            return False

        # XXX - validate current release
        data, nr = self.re_update_release.subn(r'\g
%%mkrel %s\g' % release, data, 1)
        if nr != 1:
            print(data, file=sys.stdout)
            print("ERROR: Could not increase release!", file=sys.stderr)
            return False

        # Overwrite file with new release
        self._changed_spec_file = True
        write_file(self.path, data)

        self._changes['rebuild for %s' % reason] = True

        return True

    def update_version(self, version, force=False, max_judgement=MAX_JUDGEMENT):
        """Update version and reset release"""
        cur_version = self.version

        (judgement, msg) = judge_version_increase(cur_version, version, self.module)

        if judgement < 0:
            print("ERROR: %s!" % (msg), file=sys.stderr)
            return False

        if judgement < max_judgement:
            print("WARNING: %s!" % (msg))
            if not force:
                return False

        data = self._check_can_update(force)
        if data is None:
            return False

        data, nr = self.re_update_version.subn(r'\g
%s\g' % version, data, 1)
        if nr != 1:
            print("ERROR: Could not increase version!", file=sys.stderr)
            return False

        data, nr = self.re_update_release.subn(r'\g
%mkrel 1\g', data, 1)
        if nr != 1:
            print(data, file=sys.stdout)
            print("ERROR: Could not reset release!", file=sys.stderr)
            return False

        # Overwrite file with new version number
        self._changed_spec_file = True
        write_file(self.path, data)

        self._changes['new version %s' % version] = True

        # Verify that RPM also agrees that version number has changed
        if self.version != version:
            print("ERROR: Increased version to %s, but RPM doesn't agree!?!" % version, file=sys.stderr)
            return False

        # Try to download the new tarball various times and wait between attempts
        tries = 0
        while tries < SLEEP_TIMES:
            tries += 1
            if tries > 1:
                time.sleep(SLEEP_REPEAT * 2 ** (tries // 5))

            try:
                # Download new tarball
                subprocess.check_call(['mgarepo', 'sync', '-d'], cwd=self.cwd)
                # success, so exit loop
                break
            except subprocess.CalledProcessError as exc:
                # mgarepo sync returns 1 if the tarball cannot be downloaded
                if exc.returncode != 1:
                    self._revert_changes()
                    print("ERROR: Could not download tarball", file=sys.stderr)
                    return False
        else:
            # failed to download tarball
            self._revert_changes()
            print("ERROR: Could not download tarball", file=sys.stderr)
            return False

        return self.check_and_update_patches()

class Patch():
    """Do things with patches"""

    def __init__(self, path, show_path=False):
        """Path: path to patch (might not exist)"""
        self.path = path
        self.show_path = show_path

    def __str__(self):
        return self.path if self.show_path else os.path.basename(self.path)

    @property
    def svn_author(self):
        if not hasattr(self, '_svn_author'):
            try:
                contents = subprocess.check_output(['svn', 'log', '-q', "--", self.path], close_fds=True).strip("\n").decode('utf-8').splitlines()

                for line in contents:
                    if ' | ' not in line:
                        continue

                    fields = line.split(' | ')
                    if len(fields) >= 3:
                        self._svn_author = fields[1]
            except subprocess.CalledProcessError:
                pass

        if not hasattr(self, '_svn_author'):
            return None

        return self._svn_author


class Upstream():

    URL = "https://download.gnome.org/sources/"
    limit = None

    def __init__(self):
        urlopen = urllib.request.build_opener()

        good_dir = re.compile('^[-A-Za-z0-9_+.]+/$')

        # Get the files
        usock = urlopen.open(self.URL)
        parser = URLLister()
        parser.feed(usock.read().decode('utf-8'))
        usock.close()
        parser.close()
        files = parser.urls

        tarballs = set((filename.replace('/', '') for filename in files if good_dir.search(filename)))
        if self.limit is not None:
            tarballs.intersection_update(self.limit)

        self.names = tarballs

    @classmethod
    @lru_cache(maxsize=4096)
    def versions(cls, module):
        versions = None

        url = '%s%s/cache.json' % (cls.URL, module)
        request = requests.get(url)
        json = request.json()
        if json is not None and len(json) > 2 and module in json[2]:
            versions = json[2][module]

        return versions

class Downstream():
    re_file = re.compile(r'^(?P.*)[_-](?:(?P([0-9]+[\.])*[0-9]+)-)?(?P([0-9]+[\.\-])*[0-9]+)\.(?P(?:tar\.|diff\.)?[a-z][a-z0-9]*)$')

    MEDIA = "Core Release Source"
    # PKGROOT will be overwritten (command line option)
    PKGROOT = '~/pkgs'
    DISTRO = None
    SECTION = None

    def __init__(self):
        contents = subprocess.check_output(['urpmf', '--qf', '%name|%version|%files', '.', "--media", self.MEDIA], close_fds=True).decode("utf-8").strip("\n").splitlines()

        FILES = {}
        TARBALLS = {}
        PACKAGES = set()

        for line in  contents:
            try:
                srpm, version, filename = line.split("|")
            except SpecFileError:
                print(line, file=sys.stderr)
                continue

            PACKAGES.add(srpm)

            if '.tar' in filename:
                match = self.re_file.match(filename)
                if match:
                    fileinfo = match.groupdict()
                    module = fileinfo['module']

                    if module not in TARBALLS:
                        TARBALLS[module] = {}

                    if srpm in TARBALLS[module]:
                        # srpm seen before, check if version is newer
                        if version_cmp(TARBALLS[module][srpm], version) == 1:
                            TARBALLS[module][srpm] = version
                    else:
                        TARBALLS[module][srpm] = version

            if srpm not in FILES:
                FILES[srpm] = set()
            FILES[srpm].add(filename)

        self._packages = PACKAGES
        self.tarballs = TARBALLS
        self.files = FILES

    @property
    def packages(self):
        return sorted(self._packages)

    _provide_to_alternate = {}
    @classmethod
    def alternative_provides(cls, search_for):
        """Give alternative provides for a certain provide

        Relies on urpmf. Results are cached. It will only provide alternatives if
        the alternative is only provided by one package. Meaning, if a pkgconfig(foo)
        is provided by 2 packages, the pkgconfig(foo) will NOT be given as an
        alternative.

        Sort of works like:
        $ urpmq --whatprovides $search_for --provides"""

        if not cls._provide_to_alternate:
            _provide_to_pkg = {}
            _pkg_to_provide = {}
            for myline in subprocess.check_output(['urpmf', "--qf", "%name\t%provides\t%arch", '.']).decode("utf-8").splitlines():
                pkgname, pkgprovide, pkgarch = myline.split("\t")
                if pkgarch in ('src', 'i586'):
                    continue
                if '-debug' in pkgprovide:
                    continue

                if "[" in pkgprovide and pkgprovide.endswith("]"):
                    pkgprovidepart = pkgprovide.rstrip("]").partition("[")
                else:
                    pkgprovidepart = pkgprovide.partition("[")

                if pkgprovidepart[0] in _provide_to_pkg:
                    _provide_to_pkg[pkgprovidepart[0]].add(pkgname)
                else:
                    _provide_to_pkg[pkgprovidepart[0]] = set((pkgname,))

                if pkgname in _pkg_to_provide:
                    _pkg_to_provide[pkgname].add(pkgprovidepart[0])
                else:
                    _pkg_to_provide[pkgname] = set((pkgprovidepart[0],))

            provide_has_single = set()
            for key, stash in _provide_to_pkg.items():
                if len(stash) == 1:
                    provide_has_single.add(key)


            for key in provide_has_single:
                # Ignore some of the provides to optimize memory usage somewhat
                #
                # WARNING: This might need to be changed depending on how this
                # function is used
                if '(' in key and not ')(' in key:
                    continue

                for pkgname in _provide_to_pkg[key]:
                    for pkgprovide in _pkg_to_provide[pkgname]:
                        if not '(' in pkgprovide:
                            continue

                        if not key in cls._provide_to_alternate:
                            cls._provide_to_alternate[key] = (set(), set())

                        if pkgprovide in provide_has_single:
                            cls._provide_to_alternate[key][0].add(pkgprovide)
                        else:
                            cls._provide_to_alternate[key][1].add(pkgprovide)

        if search_for in cls._provide_to_alternate:
            return cls._provide_to_alternate[search_for]

        return (set(), set())

    @classmethod
    def package_path(cls, package):
        return os.path.join(os.path.expanduser(Downstream.PKGROOT), package)

    @classmethod
    def package_spec(cls, package):
        path = cls.package_path(package)

        return SpecFile(os.path.join(path, "SPECS", "%s.spec" % package), module=package)


    @classmethod
    @retry(subprocess.CalledProcessError)
    def co(cls, package, cwd=None, spec_only=False):
        if cwd is None:
            cwd = os.path.expanduser(cls.PKGROOT)

        cmd = ['mgarepo', 'co']
        if cls.DISTRO:
            cmd.extend(('-d', cls.DISTRO))
        if spec_only:
            cmd.append('-s')
        cmd.append(package)
        return subprocess.check_call(cmd, stdin=subprocess.DEVNULL, cwd=cwd)

    @classmethod
    @retry(subprocess.CalledProcessError)
    def ci(cls, package, changes, cwd=None):
        if cwd is None:
            cwd = cls.package_path(package)

        cmd = ['mgarepo', 'ci', '-m', changes]
        return subprocess.check_call(cmd, cwd=cwd)

    @classmethod
    def submit(cls, package, cwd=None):
        if cwd is None:
            cwd = cls.package_path(package)

        cmd = ['mgarepo', 'submit']
        if cls.DISTRO:
            cmd.extend(('--define', 'section=core/updates_testing', '-t', cls.DISTRO))
        elif cls.SECTION:
            cmd.extend(('--define', 'section={0}'.format(cls.SECTION)))

        # retry submission various times, could be that some dependencies are being built at the same time
        @retry(subprocess.CalledProcessError, tries=10, delay=300, backoff=1.5)
        def _submit():
            subprocess.check_call(cmd, cwd=cwd)
        _submit()


    def get_downstream_from_upstream(self, upstream, version):
        if upstream not in self.tarballs:
            raise ValueError("No packages for upstream name: %s" % upstream)

        if len(self.tarballs[upstream]) == 1:
            return list(self.tarballs[upstream].keys())

        packages = {}
        for package in list(self.tarballs[upstream].keys()):
            # Checkout package to ensure the checkout reflects the latest changes
            try:
                self.co(package)
            except subprocess.CalledProcessError:
                raise ValueError("Multiple packages found and cannot checkout %s" % package)

            # Determine version from spec file
            try:
                packages[package] = self.package_spec(package).version
            except subprocess.CalledProcessError:
                raise ValueError("Multiple packages found and cannot determine version of %s" % package)

        # Return all packages reflecting the current version
        matches = [package for package in packages if packages[package] == version]
        if matches:
            return matches

        # Return all packages reflecting the version before the current version
        # - determine the newest version in any spec file
        latest_possible_version = get_latest_version(list(packages.values()))
        # - now get the latest version before the current version
        latest_version = get_latest_version(list(packages.values()), max_version=version)
        if latest_version is None:
            raise ValueError("Multiple packages found and all versions are newer than %s" % version)

        # - if the latest_possible_spec version is not the latest version, then
        #   ensure it follows get_safe_max_version
        if latest_version != latest_possible_version and version_cmp(get_safe_max_version(latest_version, upstream), \
                version) != 1:
            raise ValueError("Multiple packages found and unsafe version increase: %s (%s => %s)" % (upstream, latest_version, version))

        # - now really get the right packages
        matches = [package for package in packages if packages[package] == latest_version]
        if matches:
            return matches

        # Give up
        raise ValueError("Multiple packages found and cannot determine package for version %s" % version)

def write_file(path, data):
    with tempfile.NamedTemporaryFile(mode='w+t', dir=os.path.dirname(path), delete=False, encoding="utf-8") as fdst:
        fdst.write(data)
        fdst.flush()
        os.rename(fdst.name, path)

def cmd_co_multi(args):
    package, what_to_print, options = args

    print(what_to_print)

    try:
        Downstream.co(package, spec_only=options.spec_only)
    except subprocess.CalledProcessError:
        pass

def cmd_co(options):
    if options.all:
        packages = ((package, package, options) for package in Downstream().packages)
    elif options.package:
        packages = ((package, package, options) for package in options.package)
    else:
        packages = ((l[0], "%s => %s" % (l[0], l[1]), options) for l in sorted(join_streams(auto_update=False)))

    if options.debug:
        for package in packages:
            cmd_co_multi(package)
    else:
        with concurrent.futures.ProcessPoolExecutor(max_workers=8) as executor:
            executor.map(cmd_co_multi, packages)

def join_streams(show_version=False, only_diff_version=False, auto_update=True):
    upstream = Upstream().names
    downstream = Downstream()

    matches = upstream & set(downstream.tarballs.keys())
    for module in matches:
        for package in list(downstream.tarballs[module].keys()):
            package_version = downstream.tarballs[module][package]
            spec_version = None
            cwd = Downstream.package_path(package)

            if show_version or only_diff_version:
                # ensure package is checked out
                if not os.path.exists(cwd):
                    try:
                        downstream.co(package)
                    except subprocess.CalledProcessError:
                        # XXX - ignoring packages which cannot be checked out
                        continue
                try:
                    spec_version = Downstream.package_spec(package).version
                except subprocess.CalledProcessError:
                    spec_version = 'N/A'

                # in case upstream version is newer, update checkout
                if auto_update and package_version != spec_version and version_cmp(package_version, spec_version) == 1:
                    try:
                        downstream.co(package)
                        spec_version = downstream.package_spec(package).version
                    except subprocess.CalledProcessError:
                        pass

            if only_diff_version and package_version == spec_version:
                continue

            yield (package, module, package_version, spec_version, downstream.files[package])

def cmd_group_owner(options):
    groups = set(options.group)

    output = [pkg.split("\t") for pkg in subprocess.check_output(["urpmf", "-F|", "--qf", "%group\t%name\t%sourcerpm\t%version\t%release", "."]).decode("utf-8").splitlines()]
    if not output:
        return

    # Filter by groups
    output = [pkg for pkg in output if pkg[0] in groups]
    if not output:
        return

    packages = {}
    for group, name, sourcerpm, version, release in output:
        if group not in packages:
            packages[group] = {}

        source = sourcerpm if sourcerpm else name
        end = ".src.rpm"
        if source.endswith(end):
            source = source[:len(source) - len(end)]
        end = "-%s-%s" %(version, release)
        if source.endswith(end):
            source = source[:len(source) - len(end)]

        if source not in packages[group]:
            packages[group][source] = set()

        packages[group][source].add(name)


    maints = {line.rpartition(" ")[::2] for line in subprocess.check_output(["mgarepo", "maintdb", "get"]).decode("utf-8").splitlines()}

    def get_output(source, maints, packages):
        for package in list(packages.keys()):
            maint = maints.get(source, "?")

            yield "\t".join((maint, source, ",".join(sorted(packages[package]))))

    first = True
    for group in list(packages.keys()):
        if first:
            first = False
        else:
            print("")
            print("")
        print(group)
        print("")

        for line in sorted(get_output(source, maints, packages[group])):
            print(line)

def cmd_cleanup(options):
    root = os.path.expanduser(Downstream.PKGROOT)

#    packages = set(Downstream().packages)

    dirs = set((o for o in os.listdir(root) if os.path.isdir(os.path.join(root, o))))

#    dirs = dirs - packages

    import pysvn
    dirs = [o for o in dirs if os.path.exists(os.path.join(root, o, "SOURCES", 'sha1.lst'))]

    for path in dirs:
        try:
            binaries = set((l.split('  ', 1)[1] for l in open(os.path.join(root, path, 'SOURCES', 'sha1.lst')).read().splitlines()))
        except IndexError:
            print(os.path.join(root, path, 'SOURCES', 'sha1.lst'))
#            shutil.rmtree(os.path.join(root, path))
#            Downstream.co(path)
            continue

        vcs = pysvn.Client()
        stats = [stat for stat in vcs.status(os.path.join(root, path, 'SOURCES'), depth=pysvn.depth.immediates) if stat.text_status == pysvn.wc_status_kind.unversioned and os.path.basename(stat.path) not in binaries]

        if stats:
            print(path)
            print(", ".join(os.path.basename(stat.path) for stat in stats))
            print(stats)
            for stat in stats:
                if os.path.isfile(stat.path):
                    os.remove(stat.path)
                elif os.path.isdir(stat.path):
                    shutil.rmtree(stat.path)

def cmd_ls(options):
    streams = join_streams(show_version=options.show_version, only_diff_version=options.diff)
    if options.sort:
        # Sort packages on the line number in the file
        sort_helper = dict(list(zip(options.sort.read().splitlines(), itertools.count())))

        streams = sorted(streams, key=lambda a: (sort_helper.get(a[1], 9999), a[0]))
    else:
        streams = sorted(streams)

    for package, module, package_version, spec_version, downstream_files in streams:
        sys.stdout.write(package)
        if options.spec:
            sys.stdout.write('/SPECS/%s.spec' % package)
        if options.upstream:
            sys.stdout.write("\t%s" % module)
        if options.show_version:
            sys.stdout.write("\t%s\t%s" % (spec_version, package_version))
        print()

def cmd_check_version(options):
    streams = join_streams(show_version=True)

    for package, module, package_version, spec_version, downstream_files in streams:
        if package_version == spec_version:
            continue

        sys.stdout.write(package)
        sys.stdout.write("\t%s\t%s" % (spec_version, package_version))
        sys.stdout.write("\n")

def cmd_check_latest(options):
    streams = join_streams(show_version=True)

    for package, module, package_version, spec_version, downstream_files in streams:
        upgrade = set()
        sys.stdout.write(package)
        sys.stdout.write("\t%s\t%s" % (spec_version, package_version))

        safe_max_version = get_safe_max_version(spec_version, module=module)

        versions = Upstream.versions(module)
        if package_version != spec_version and spec_version != 'N/A' and package_version != 'N/A':
            upgrade.add('~')
        if versions:
            latest_version = get_latest_version(versions)
            safe_version = get_latest_version(versions, safe_max_version)

            cmp_latest = version_cmp(latest_version, spec_version)
            if cmp_latest < 0:
                latest_version = 'N/A'
                upgrade.add('l')
            elif cmp_latest > 0:
                upgrade.add('L')

            cmp_safe = version_cmp(safe_version, spec_version)
            if cmp_safe < 0:
                safe_version = 'N/A'
                upgrade.add('s')
            elif cmp_safe > 0:
                upgrade.add('S')

            sys.stdout.write("\t%s" % latest_version)
            sys.stdout.write("\t%s" % safe_version)
            sys.stdout.write("\t%s" % "".join(sorted(upgrade)))

        print()

        if 'S' in upgrade and options.submit and not Downstream.DISTRO:
            cmd = ['mgagnome', 'increase', package, safe_version]
            subprocess.call(cmd, cwd=Downstream.package_path(package))

def cmd_patches(options):
    root = os.path.expanduser(Downstream.PKGROOT)

    for package, module, package_version, spec_version, downstream_files in sorted(join_streams()):
        for filename in downstream_files:
            if '.patch' in filename or '.diff' in filename:

                p = Patch(os.path.join(root, package, "SOURCES", filename), show_path=options.path)
                print("\t".join((module, package, str(p))))

def cmd_check_prep(options):
    spec = Downstream.package_spec(options.package)
    spec.check_and_update_patches()

def cmd_clean_spec_multi(args):
    options, package = args

    print(package)
    cwd = Downstream.package_path(package)

    path = os.path.join(cwd, "SPECS", "%s.spec" % package)
    if not os.path.exists(path):
        try:
            Downstream.co(package)
        except subprocess.CalledProcessError:
            print('WARNING: Cannot check package %s. Skipping.' % package, file=sys.stderr)
            return False

        print('ERROR: Cannot find spec file for package %s' % package, file=sys.stderr)
        return False

    spec = Downstream.package_spec(package)

    try:
        if not spec.ensure_no_local_changes(options.force):
            return False
    except subprocess.CalledProcessError:
        # Package was probably not checked out or something
        print("ERROR: cannot clean spec file for %s" % package, file=sys.stderr)
        return False

    made_changes = False

    # Convert perl- and -devel buildrequires into perl() and pkgconfig() requires
    if options.convert_br:
        re_prov_get_version = re.compile(r'^[^(]+\([^)]+-(?P[0-9]+\.[0-9][0-9.]*)\)$')
        br = spec.buildrequires
        no_alt = set()
        no_change = {}
        convert_brs = {
            'pkgconfig': {
                'desc': 'convert -devel buildrequires into pkgconfig',
                'check_br': lambda req: req.endswith('-devel'),
                'check_provide': lambda prov: prov.startswith('pkgconfig('),
                'basereqs': lambda req: [req[:-len('-devel')]],
                'basereq_no_version': lambda basereqs: [basereq.rstrip('1234567890.') for basereq in basereqs if basereq[-1] in '1234567890'],
                'versions_from_basereq': lambda basereqs: set((basereq[len(basereq.rstrip('01234567890.')):] for basereq in basereqs if basereq[-1] in '1234567890')),
                'versions_basereq_extra': lambda versions: set(("%s.0" % version for version in versions if '.' not in version)),
                'extra': lambda basereqs, versions: \
                    ['pkgconfig(%s)' % basereq for basereq in basereqs] +
                         ['pkgconfig(%s)' % basereq[len('lib'):] if basereq.startswith('lib') else 'pkgconfig(lib%s)' % basereq for basereq in basereqs] +
                         ['pkgconfig(%s-%s)' % (basereq, version) for basereq in basereqs for version in versions],
            },
            'perl': {
                'desc': 'convert perl- buildrequires into perl()',
                'check_br': lambda req: req.startswith('perl-'),
                'check_provide': lambda prov: prov.startswith('perl('),
                'basereqs': lambda req: [req[len('perl-'):]],
                'extra': lambda basereqs, versions: ['perl(%s)' % basereq.replace('-', '::') for basereq in basereqs],
            },
            # PySolFC.spec:BuildRequires:       python3-setuptools
            # $ rpm -q python3-setuptools --provides | grep python3dist
            # python3dist(setuptools)
            # python3dist(setuptools) = 46.1.3
            #
            # There's also provides such as:
            # python3.8dist(setuptools)
            # pythonegg(3)(setuptools)
            'python-pkg': {
                'disabled': True,
                'desc': 'convert python buildrequires into python3dist()',
                'check_br':  lambda req: req.startswith('python3-'),
                'check_provide': lambda prov: prov.startswith('python3dist('),
                'basereqs': lambda req: [req[len('python3-'):]],
                'extra': lambda basereqs, versions: ['python3dist(%s)' % basereq for basereq in basereqs],
            },
            'python-egg': {
                'desc': 'convert pythonegg(3) into python3dist()',
                'check_br':  lambda req: req.startswith('pythonegg(3)(') and req.endswith(')'),
                'check_provide': lambda prov: prov.startswith('python3dist('),
                'basereqs': lambda req: [req[len('pythonegg(3)('):-1]],
                'extra': lambda basereqs, versions: ['python3dist(%s)' % basereq for basereq in basereqs],
            },
        }

        for keys in convert_brs.values():
            if 'disabled' in keys and keys['disabled']:
                continue

            keys['changes'] = {}
            br_old = [r for r in list(br.keys()) if keys['check_br'](r)]
            if options.debug and br_old:
                pprint.pprint(br_old)
            for req in br_old:
                every_provides, every_ignored_provide = Downstream.alternative_provides(req)
                # XXX - document what clean_pkgconfig_prov is for
                #       maybe integrate clean_pkgconfig_prov in alternative_provides function?
                provides = [clean_pkgconfig_prov(prov) for prov in every_provides if keys['check_provide'](prov)]
                provides_ignored = [clean_pkgconfig_prov(prov) for prov in every_ignored_provide if keys['check_provide'](prov)]
                change_to = None
                if len(provides) == 1 and not provides_ignored:
                    if options.debug:
                        print("NOTICE: Only one available option, using %s" % provides[0])

                    change_to = provides[0]
                elif provides and 'extra' in keys:
                    # Determine base require (e.g. gtk+3.0-devel --> gtk+3.0)
                    basereqs = keys['basereqs'](req)

                    # Determine version matches
                    versions = set()
                    if 'versions_from_basereq' in keys:
                        # Determine if the basereq has a version at the end (e.g. gtk+3.0 --> 3.0)
                        versions.update(keys['versions_from_basereq'](basereqs))
                        if versions and 'basereq_no_version' in keys:
                            basereqs.extend(keys['basereq_no_version'](basereqs))
                            # Make it unique again, but keep the order
                            #
                            # This is done so that e.g. python3-devel changes to pkgconfig(python3),
                            # even if pkgconfig(python) might be available
                            basereqs = list(distinct(basereqs))
                        if 'versions_basereq_extra' in keys:
                            versions.update(keys['versions_basereq_extra'](versions))

                    if not versions:
                        # In case no versions were retrieved from the basereq,
                        # match with any version found from the alternative
                        # provides (heuristic matching)
                        #
                        # This is only done as a last resort to avoid matching
                        # e.g. gtk+3.0-devel --> pkgconfig(gtk+2.0)
                        for prov in provides:
                            for match in re_prov_get_version.finditer(prov):
                                if options.debug:
                                    print("NOTICE: Heuristically adding version %s from provide %s" % (match.group('version'), prov))
                                versions.add(match.group('version'))

                    check_for = keys['extra'](basereqs, versions)

                    if options.debug and versions:
                        pprint.pprint(versions)

                    for check in check_for:
                        if check in provides:
                            if options.debug:
                                print("NOTICE: Matched: %s => %s" % (check, provides))
                            change_to = check
                            break

                if change_to is None and provides:
                    provides_no_versions = []
                    for prov in provides:
                        if re_prov_get_version.fullmatch(prov) is None:
                            provides_no_versions.append(prov)

                    if len(provides_no_versions) == 1 and not provides_ignored:
                        change_to = provides_no_versions[0]
                        if options.debug:
                            print("NOTICE: Only one available versionless option, using %s" % change_to)


                if provides:
                    if change_to is None:
                        no_change[req] = (provides, check_for)
                else:
                    no_alt.add(req)

                if change_to is not None:
                    keys['changes'][req] = change_to

        if not options.doit:
            if options.debug:
                for keys in list(convert_brs.items()):
                    if 'changes' in keys and keys['changes']:
                        pprint.pprint(keys['changes'])

            if no_alt:
                print("WARNING: no alternatives found for: %s" % ", ".join(sorted(no_alt)))

            if no_change and options.debug:
                pprint.pprint(no_change)
    else:
        convert_brs = {}

    keys_with_changes = [keys for keys in convert_brs.values() if 'changes' in keys and keys['changes']]
# XXX - seems to cause false messages
#    if not keys_with_changes:
#        keys_with_changes.append({'changes': [], 'desc': 'unsplit BRs'})

    for keys in keys_with_changes:
        if spec.update_br(keys['changes'], change_description=keys['desc']):
            made_changes = True

    if spec.clean_spec():
        made_changes = True

    # If we made it this far, checkin the changes
    if made_changes:
        if options.doit:
            Downstream.ci(package, spec.changes, cwd=cwd)
            if spec.should_rebuild:
                cmd = ['mgagnome', 'rebuild', '-s', '-m', 'to test removal of deprecated macros', package]
                subprocess.call(cmd, cwd=cwd)
        else:
            # show the diff and undo all changes
            if spec.should_rebuild:
                print("NOTICE: Package should be rebuilt")
            print(spec.changes)
            spec.ensure_no_local_changes(force=True)

    return made_changes

def cmd_check_spec_multi(args):
    options, package = args
    cwd = Downstream.package_path(package)

    path = os.path.join(cwd, "SPECS", "%s.spec" % package)
    if not os.path.exists(path):
        return False


    spec = Downstream.package_spec(package)
    try:
        spec.patches
    except SpecFileError:
        print('ERROR: Broken spec file for package %s' % package, file=sys.stderr)
        return False

    return True


def cmd_check_spec(options):
    if options.all:
        packages = Downstream().packages
    else:
        packages = options.package if options.package else (l[0] for l in join_streams())


    if options.debug:
        for package in packages:
            cmd_check_spec_multi((options, package))
    else:
        workers = os.cpu_count() or 4
        with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
            executor.map(cmd_check_spec_multi, ((options, package) for package in packages))



def cmd_clean_spec(options):
    if options.all:
        packages = Downstream().packages
    else:
        packages = options.package if options.package else (l[0] for l in join_streams())


    if options.debug:
        for package in packages:
            cmd_clean_spec_multi((options, package))
    else:
        workers = os.cpu_count() or 4
        # Hack: warm alternative provides cache
        if options.convert_br:
            Downstream.alternative_provides('XXXX')
        with concurrent.futures.ProcessPoolExecutor(max_workers=workers) as executor:
            executor.map(cmd_clean_spec_multi, ((options, package) for package in packages))

def cmd_new_release(options):
    success = True
    for pkg in options.package:
        # Determine the package name
        if options.upstream:
            try:
                package = Downstream().get_downstream_from_upstream(pkg, options.version)[0]
            except ValueError as exc:
                print("ERROR: %s" % exc, file=sys.stderr)
                success = False
                continue
        else:
            package = pkg

        cwd = Downstream.package_path(package)

        # Checkout package to ensure the checkout reflects the latest changes
        try:
            Downstream.co(package)
        except subprocess.CalledProcessError:
            subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd)
            success = False
            continue

        # SpecFile class handles the actual version+release change
        # XXX - module should reflect upstream name, this gives it the package name
        spec = Downstream.package_spec(package)
        cur_release = spec.release
        try:
            new_release = int(cur_release)+1
        except ValueError:
            print("ERROR: Cannot increase the release for package %s" % pkg, file=sys.stderr)
            success = False
            continue
        cur_version = spec.version
        print("%s-%s => %s-%s" % (cur_version, cur_release, cur_version, new_release))

        # XXX - Duplicate check as should not revert changes if specfile has already been changed
        if not spec.ensure_no_local_changes(options.force):
            success = False
            continue

        if not spec.update_release(new_release, options.reason, force=options.force):
            subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd, stdout=subprocess.DEVNULL)
            success = False
            continue

        try:
            # If we made it this far, checkin the changes
            Downstream.ci(package, spec.changes, cwd=cwd)

            # Submit is optional
            if options.submit:
                Downstream.submit(package)
        except subprocess.CalledProcessError:
            success = False
            continue

    if not success:
        sys.exit(1)

def cmd_package_new_version(options):
    # Determine the package name
    if options.upstream:
        try:
            package = Downstream().get_downstream_from_upstream(options.package, options.version)[0]
        except ValueError as exc:
            print("ERROR: %s" % exc, file=sys.stderr)
            sys.exit(1)
    else:
        package = options.package

    # Directories packages are located in
    cwd = Downstream.package_path(package)

    # Checkout package to ensure the checkout reflects the latest changes
    try:
        Downstream.co(package)
    except subprocess.CalledProcessError:
        subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd)
        sys.exit(1)

    # SpecFile class handles the actual version+release change
    # XXX - module should reflect upstream name, this gives it the package name
    spec = Downstream.package_spec(package)
    print("%s => %s" % (spec.version, options.version))

    # XXX - Duplicate check as should not revert changes if specfile has already been changed
    if not spec.ensure_no_local_changes(options.force):
        sys.exit(1)

    if not spec.update_version(options.version, force=options.force):
        # XXX - hack to automatically revert changes when auto upgrading from ftp release list
        if options.hexdigest is not None:
            subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd, stdout=subprocess.DEVNULL)
        sys.exit(1)

    # Check hash, if given
    if options.hexdigest is not None:
        sources = [name for name, value in spec.sources.items() if '://' in value[0]]
        if not sources:
            print("ERROR: Cannot determine source file (for hash check)!", file=sys.stderr)
            sys.exit(1)

        # If there are multiple sources, try to see if there is a preferred name
        # --> needed for metacity hash check (multiple tarball sources)
        if len(sources) > 1:
            preferred_name = '%s-%s.tar.xz' % (package, options.version)
            if preferred_name in sources:
                sources = [preferred_name]

        for filename in sources:
            path = os.path.join(cwd, "SOURCES", filename)
            if not is_valid_hash(path, options.algo, options.hexdigest):
                print("ERROR: Hash file failed check for %s!" % path, file=sys.stderr)
                print("ERROR: Reverting changes!", file=sys.stderr)
                subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd)
                sys.exit(1)

    try:
        # If we made it this far, checkin the changes
        Downstream.ci(package, spec.changes, cwd=cwd)

        # Submit is optional
        if options.submit:
            Downstream.submit(package)

    except subprocess.CalledProcessError:
        sys.exit(1)

def cmd_parse_ftp_release_list(options):
    def _send_reply_mail(contents, orig_msg, to_addr, packages=None, error=False):
        """Send an reply email"""
        contents.seek(0)
        msg = MIMEText(contents.read(), _charset='utf-8')

        if error:
            # XXX - ugly
            contents.seek(0)
            lastline = contents.read().decode('utf-8').rstrip().splitlines()[-1]
            # Remove things like "ERROR: " and so on from the last line
            lastline = re.sub(r'^(?:[^ :]+:\s+)+', '', lastline)
            # Remove things like "   - " (youri output from mgarepo submit)
            lastline = re.sub(r'^\s+-\s+', '', lastline)
            subjecterror = " (ERROR: %s)" % lastline if lastline else " (ERROR)"
        else:
            subjecterror = ""

        if packages:
            subject = "%s %s%s" % (", ".join(packages), orig_msg['X-Module-Version'], subjecterror)
        else:
            subject = "Re: %s%s" % (orig_msg['Subject'], subjecterror)

        msg['Subject'] = subject
        msg['To'] = to_addr
        msg["In-Reply-To"] = orig_msg["Message-ID"]
        msg["References"] = orig_msg["Message-ID"]

        # Call sendmail program directly so it doesn't matter if the service is running
        cmd = ['/usr/sbin/sendmail', '-oi', '--']
        cmd.extend([to_addr])
        p = subprocess.Popen(cmd, stdin=subprocess.PIPE)
        p.stdin.write(msg.as_bytes())
        p.stdin.flush()
        p.stdin.close()
        p.wait()


    msg = email.message_from_file(sys.stdin)

    if options.mail:
        stdout = tempfile.TemporaryFile()
        stderr = stdout
    else:
        stdout = sys.stdout
        stderr = sys.stderr

    try:
        module = msg['X-Module-Name']
        version = msg['X-Module-Version']
        hexdigest = msg['X-Module-SHA256-tar.xz']
    except KeyError as exc:
        print("ERROR: %s" % exc, file=stderr)
        if options.mail:
            _send_reply_mail(stdout, msg, options.mail, error=True)

        sys.exit(1)

    try:
        packages = Downstream().get_downstream_from_upstream(module, version)
    except (ValueError, SpecFileError) as exc:
        #print(bytes("ERROR: %s" % str(exc), 'UTF-8'), file=stderr)
        stderr.write(bytes("ERROR: %s" % str(exc), 'UTF-8'))
        if options.mail:
            _send_reply_mail(stdout, msg, options.mail, error=True)

        sys.exit(1)

    if options.wait or options.fork:
        # maildrop aborts and will try to deliver after 5min
        # fork to avoid this
        if os.fork() != 0:
            sys.exit(0)

    if options.wait:
        # wait SLEEP_INITIAL after the message was sent
        secs = SLEEP_INITIAL
        parsed_date = email.utils.parsedate_tz(msg['Date'])
        if parsed_date is not None:
            msg_time = email.utils.mktime_tz(parsed_date)
            secs = SLEEP_INITIAL - (time.time() - msg_time)

        if secs > 0:
            time.sleep(secs)

    error = False
    for package in packages:
        cmd = ['mgagnome', 'increase', '--hash', hexdigest]
        if options.submit:
            cmd.append('--submit')
        if options.force:
            cmd.append('--force')
        cmd.extend((package, version))
        if subprocess.call(cmd, stdout=stdout, stderr=stderr):
            error = True

    if options.mail:
        _send_reply_mail(stdout, msg, options.mail, packages=packages, error=error)

def main():
    description = """Mageia GNOME commands."""
    epilog = """Report bugs to Olav Vitters"""
    parser = argparse.ArgumentParser(description=description, epilog=epilog)
    parser.add_argument("-l", "--limit", type=argparse.FileType('r', 0),
                        dest="limit_upstream", metavar="FILE",
                        help="File containing upstream names")
    parser.add_argument("-p", "--root", action="store", dest="PKGROOT",
                        help="Package root directory")
    parser.add_argument("-d", "--distro", action="store", dest="distro",
                        help="Distribution release")
    parser.add_argument("--debug", action="store_true", dest="debug",
                        help="Use for debugging")
    parser.set_defaults(
        debug=False, PKGROOT="~/pkgs"
    )

    # SUBPARSERS
    subparsers = parser.add_subparsers(title='subcommands')
    subparser = subparsers.add_parser('check-latest', help='check for latest version of packages')
    subparser.add_argument("-s", "--submit", action="store_true", dest="submit",
                           help="Increase version for stable upgrades and submit")
    subparser.set_defaults(
        func=cmd_check_latest, submit=False
    )

    subparser = subparsers.add_parser('check-prep', help='check prep phase')
    subparser.add_argument("package", help="Package name")
    subparser.set_defaults(
        func=cmd_check_prep
    )

    subparser = subparsers.add_parser('check-spec', help='check if spec file is ok')
    subparser.add_argument("package", help="Package name", nargs='*')
    subparser.add_argument("-a", "--all", action="store_true", dest="all",
                           help="checkout all Downstream packages")
    subparser.set_defaults(
        func=cmd_check_spec
    )

    subparser = subparsers.add_parser('check-version', help='check if spec version and downstream version match')
    subparser.set_defaults(
        func=cmd_check_version
    )

    subparser = subparsers.add_parser('clean-spec', help='clean specfile')
    subparser.add_argument("package", help="Package name", nargs='*')
    subparser.add_argument("-d", "-s", action="store_true", dest="doit", help="submit the changes")
    subparser.add_argument("-f", "--force", action="store_true")
    subparser.add_argument("-a", "--all", action="store_true", dest="all",
                           help="checkout all Downstream packages")
    subparser.add_argument("--convert", action="store_true", dest="convert_br",
                           help="convert -buildrequirements to perl/pkgconfig if possible")
    subparser.set_defaults(
        func=cmd_clean_spec, doit=False, convert_br=False, all=False, force=False
    )

    subparser = subparsers.add_parser('cleanup', help='cleanup pkg directory')
    subparser.set_defaults(
        func=cmd_cleanup
    )

    subparser = subparsers.add_parser('co', help='checkout all GNOME packages')
    subparser.add_argument("-a", "--all", action="store_true", dest="all",
                           help="checkout all Downstream packages")
    subparser.add_argument("-s", action="store_true", dest="spec_only",
                           help="only checkout SPECS/ directory")
    subparser.add_argument("package", help="Package name", nargs='*')
    subparser.set_defaults(
        func=cmd_co, all=False
    )

    subparser = subparsers.add_parser('gnome-release-email', help='submit packages based on GNOME ftp-release-list email')
    subparser.add_argument("-m", "--mail", help="Email address to send the progress to")
    subparser.add_argument("--fork", action="store_true",
                           help="Fork as quickly as possible")
    subparser.add_argument("-w", "--wait", action="store_true",
                           help="Wait before trying to retrieve the new version")
    subparser.add_argument("-s", "--submit", action="store_true", dest="submit",
                           help="Commit changes and submit")
    subparser.add_argument("-f", "--force", action="store_true",
                           help="Force submission")
    subparser.set_defaults(
        func=cmd_parse_ftp_release_list, force=False, wait=False, fork=False
    )

    subparser = subparsers.add_parser('group-owner', help='list packages by group')
    subparser.add_argument('group', metavar="GROUP", nargs='+')
    subparser.set_defaults(
        func=cmd_group_owner
    )

    subparser = subparsers.add_parser('increase', help='increase version number')
    subparser.add_argument("package", help="Package name")
    subparser.add_argument("version", help="Version number")
    subparser.add_argument("-f", "--force", action="store_true", dest="force",
                           help="Override warnings, just do it")
    subparser.add_argument("-u", "--upstream", action="store_true", dest="upstream",
                           help="Package name reflects the upstream name")
    subparser.add_argument("-s", "--submit", action="store_true", dest="submit",
                           help="Commit changes and submit")
    subparser.add_argument("--no-submit", action="store_false", dest="submit",
                           help="Do not commit changes and submit")
    subparser.add_argument("-a", "--algorithm", choices=hashlib.algorithms_available, dest="algo",
                           help="Hash algorithm")
    subparser.add_argument("--hash", dest="hexdigest",
                           help="Hexdigest of the hash")
    subparser.set_defaults(
        func=cmd_package_new_version, submit=argparse.SUPPRESS, upstream=False,
        hexdigest=None, algo="sha256", force=False
    )

    subparser = subparsers.add_parser('packages', help='list all GNOME packages')
    subparser.add_argument("-m", "--m", action="store_true", dest="upstream",
                           help="Show upstream module")
    subparser.add_argument("--version", action="store_true", dest="show_version",
                           help="Show version numbers")
    subparser.add_argument("--diff", action="store_true", dest="diff",
                           help="Only show packages with different version")
    subparser.add_argument("--sort", type=argparse.FileType('r', 0),
                           dest="sort", metavar="FILE",
                           help="Sort packages according to order in given FILE")
    subparser.add_argument("--spec", action="store_true", dest="spec",
                           help="Give spec file location")
    subparser.set_defaults(
        func=cmd_ls, upstream=False, show_version=False, diff=False
    )

    subparser = subparsers.add_parser('patches', help='list all GNOME patches')
    subparser.add_argument("-p", "--path", action="store_true", dest="path",
                           help="Show full path to patch")
    subparser.set_defaults(
        func=cmd_patches, path=False
    )

    subparser = subparsers.add_parser('rebuild', help='increase release')
    subparser.add_argument("package", help="Package name", nargs="*")
    subparser.add_argument("-m", "--reason", dest="reason", required=True, help="Reason for the rebuild")
    subparser.add_argument("-f", "--force", action="store_true", dest="force",
                           help="Override warnings, just do it")
    subparser.add_argument("-u", "--upstream", action="store_true", dest="upstream",
                           help="Package name reflects the upstream name")
    subparser.add_argument("-s", "--submit", action="store_true", dest="submit",
                           help="Commit changes and submit")
    subparser.add_argument("--no-submit", action="store_false", dest="submit",
                           help="Do not commit changes and submit")
    subparser.set_defaults(
        func=cmd_new_release, submit=argparse.SUPPRESS, upstream=False,
        force=False
    )

    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(2)

    options = parser.parse_args()
    if options.limit_upstream:
        Upstream.limit = set(options.limit_upstream.read().strip("\n").splitlines())

    if not hasattr(options, 'submit'):
        options.submit = not options.distro

    Downstream.PKGROOT = options.PKGROOT
    if options.distro:
        Downstream.PKGROOT = os.path.join(options.PKGROOT, options.distro)
        Downstream.MEDIA = "Core Release {0} Source,Core {0} Updates Source,Core {0} Updates Testing Source".format(options.distro)
        Downstream.DISTRO = options.distro

    try:
        options.func(options)
    except KeyboardInterrupt:
        print('Interrupted')
        sys.exit(1)
    except EOFError:
        print('EOF')
        sys.exit(1)
    except IOError as exc:
        if exc.errno != errno.EPIPE:
            raise
        sys.exit(0)

if __name__ == "__main__":
    os.environ['PYTHONUNBUFFERED'] = '1'
    main()