From 9be921d7c924d94f488cbfa0066a4334f84d89bb Mon Sep 17 00:00:00 2001 From: Olav Vitters Date: Wed, 29 Apr 2020 14:46:17 +0200 Subject: pylint: fix too long lines, missing docstrings, etc --- mgagnome | 594 +++++++++++++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 446 insertions(+), 148 deletions(-) diff --git a/mgagnome b/mgagnome index 03843fe..ef87d7e 100755 --- a/mgagnome +++ b/mgagnome @@ -5,7 +5,7 @@ # https://git.gnome.org/browse/sysadmin-bin/tree/ftpadmin # Written by Olav Vitters -from functools import wraps, lru_cache +from functools import wraps, lru_cache, cached_property # basic modules: import os @@ -62,8 +62,8 @@ SLEEP_INITIAL = 180 SLEEP_REPEAT = 30 SLEEP_TIMES = 30 -re_majmin = re.compile(r'^([0-9]+\.[0-9]+).*') -re_version = re.compile(r'([-.]|\d+|[^-.\d]+)') +_re_majmin = re.compile(r'^([0-9]+\.[0-9]+).*') +_re_version = re.compile(r'([-.]|\d+|[^-.\d]+)') @@ -144,6 +144,7 @@ MAJOR_VERSIONS = { } def get_majmin(version, module=None): + """Return a tuple with major and minor versions given a version""" nrs = version.split('.') if module and module.lower() in MAJOR_VERSIONS: @@ -161,7 +162,10 @@ def get_majmin(version, module=None): def get_safe_max_version(version, module=None): - match = re_majmin.match(version) + """Provide the maximum version a module could safely be increased to + + This assumed the module is uses symantic versioning""" + match = _re_majmin.match(version) if version is None or not match: return None @@ -197,7 +201,7 @@ def judge_version_increase(version_old, version_new, module=None): return (-3, "Version %s is older than current version %s!" % (version_new, version_old)) # Version is newer, but we don't want to see if it follows the GNOME versioning scheme - majmins = [get_majmin(ver, module) for ver in versions if re_majmin.match(ver) is not None] + majmins = [get_majmin(ver, module) for ver in versions if _re_majmin.match(ver) is not None] if len(majmins) == 1: return (-1, "Version number scheme changes: %s" % (", ".join(versions))) @@ -244,6 +248,7 @@ def judge_version_increase(version_old, version_new, module=None): return (4, "Stable version increase") def line_input(file): + """Iterate over a file and ignore any newline""" for line in file: if line[-1] == '\n': yield line[:-1] @@ -251,6 +256,7 @@ def line_input(file): yield line def distinct(iterable, keyfunc=None): + """Iterate over an iterable and only return unique/distinct iterables""" seen = set() for item in iterable: key = item if keyfunc is None else keyfunc(item) @@ -283,6 +289,9 @@ def call_editor(filename): return True class URLLister(HTMLParser): + """Parse links from HTML""" + # pylint: disable=abstract-method + def reset(self): HTMLParser.reset(self) self.urls = [] @@ -293,17 +302,17 @@ class URLLister(HTMLParser): if href: self.urls.extend(href) -def is_valid_hash(path, algo, hexdigest): +def _is_valid_hash(path, algo, hexdigest): if algo not in hashlib.algorithms_available: raise ValueError("Unknown hash algorithm: %s" % algo) local_hash = getattr(hashlib, algo)() - with open(path, 'rb') as fp: - data = fp.read(32768) + with open(path, 'rb') as fp_file: + data = fp_file.read(32768) while data: local_hash.update(data) - data = fp.read(32768) + data = fp_file.read(32768) return local_hash.hexdigest() == hexdigest @@ -318,18 +327,46 @@ class SpecFileError(Exception): class SpecFile(): - re_update_version = re.compile(r'^(?P
Version[ \t]*:\s*)(?P.+)(?P\s*)$', re.MULTILINE + re.IGNORECASE)
-    re_update_release = re.compile(r'^(?P
Release[ \t]*:\s*)(?P%mkrel [0-9.]+)(?P\s*)$', re.MULTILINE + re.IGNORECASE)
-    re_update_patch = re.compile(r'^(?P
Patch0*?)(?P[0-9]*)(?P[ \t]*:\s*)(?P.+)(?P\s*)\n', re.MULTILINE + re.IGNORECASE)
-
-    re_br_part = re.compile(r'(?P
[^\s%{},<>=][^\s%{},<>=]*)\b(?P\s*(?:(?P=|>=|<=|=<|=>|>|<)\s*(?P[^\s%{},]+|\%\{[^\s{%}]+\}|\%[^\s%{},]+)\b)?)') - - #re_update_br = re.compile(r'^(?P
BuildRequires:\s*)(?P
[^\s%{},]+?)(?P\s*(?:(?:[<>]=?|=)\s+[^\s%{},]+?)?\s*\n)', re.MULTILINE + re.IGNORECASE) - re_update_br = re.compile(r'^(?P
BuildRequires[ \t]*:\s*)(?P
[^\s%{},]+?)(?P\s*(?:(?:[<>]=?|=)\s+(?:[^\s%{},]+|\%\{[^\s{%}]+\}|\%[^\s%{},]+))?\s*\n)', re.MULTILINE + re.IGNORECASE) -# re_update_br_unsplit = re.compile(r'^(?P
BuildRequires:\s*)(?P[^\n,]+,[^\n]*)(?P\s*\n)', re.MULTILINE + re.IGNORECASE)
-
-    re_update_br_fix_operator = re.compile(r'^(?P
BuildRequires[ \t]*:\s*[^\n]*)(?P=<|=>)(?P[^\n]+)\n', re.MULTILINE + re.IGNORECASE)
-    re_update_br_unsplit = re.compile(r'^(?P
BuildRequires[ \t]*:\s*)(?P(?:%s,?(?:[ \t\f\v]+|$)){2,})(?P\n)' % (re_br_part.pattern,), re.MULTILINE + re.IGNORECASE)
+    """Functions related to a spec file"""
+    re_update_version = re.compile(
+        r'^(?P
Version[ \t]*:\s*)(?P.+)(?P\s*)$',
+        re.MULTILINE + re.IGNORECASE
+    )
+    re_update_release = re.compile(
+        r'^(?P
Release[ \t]*:\s*)(?P%mkrel [0-9.]+)(?P\s*)$',
+        re.MULTILINE + re.IGNORECASE
+    )
+    re_update_patch = re.compile(
+        r'^(?P
Patch0*?)(?P[0-9]*)(?P[ \t]*:\s*)(?P.+)(?P\s*)\n',
+        re.MULTILINE + re.IGNORECASE
+    )
+    re_br_part = re.compile(
+        r'(?P
[^\s%{},<>=][^\s%{},<>=]*)\b(?P\s*(?:(?P=|>=|<=|=<|=>|>|<)\s*' \ + r'(?P[^\s%{},]+|\%\{[^\s{%}]+\}|\%[^\s%{},]+)\b)?)' + ) + #re_update_br = re.compile( + # r'^(?P
BuildRequires:\s*)(?P
[^\s%{},]+?)(?P\s*(?:(?:[<>]=?|=)\s+[^\s%{},]+?)?\s*\n)', + # re.MULTILINE + re.IGNORECASE + #) + re_update_br = re.compile( + r'^(?P
BuildRequires[ \t]*:\s*)(?P
[^\s%{},]+?)' \ + r'(?P\s*(?:(?:[<>]=?|=)\s+(?:[^\s%{},]+|\%\{[^\s{%}]+\}|\%[^\s%{},]+))?\s*\n)', + re.MULTILINE + re.IGNORECASE + ) + #re_update_br_unsplit = re.compile( + # r'^(?P
BuildRequires:\s*)(?P[^\n,]+,[^\n]*)(?P\s*\n)',
+    #    re.MULTILINE + re.IGNORECASE
+    #)
+
+    re_update_br_fix_operator = re.compile(
+        r'^(?P
BuildRequires[ \t]*:\s*[^\n]*)(?P=<|=>)(?P[^\n]+)\n',
+        re.MULTILINE + re.IGNORECASE
+    )
+    re_update_br_unsplit = re.compile(
+        r'^(?P
BuildRequires[ \t]*:\s*)(?P(?:%s,?(?:[ \t\f\v]+|$)){2,})' \
+        r'(?P\n)' % (re_br_part.pattern,),
+        re.MULTILINE + re.IGNORECASE
+    )
 
     def __init__(self, path, module=None):
         self.path = path
@@ -341,23 +378,34 @@ class SpecFile():
 
     @property
     def changes(self):
-        return ''.join(self._changes.keys()) if len(self._changes) == 1 else "\n".join(("- %s" % change for change in self._changes.keys()))
+        """Description of the changes made to the spec file"""
+        return ''.join(self._changes.keys()) \
+            if len(self._changes) == 1 \
+            else "\n".join(("- %s" % change for change in self._changes.keys()))
 
     @property
     def made_changes(self):
+        """Determine if the spec file was changed"""
         return self._changed_spec_file
 
     @property
     def version(self):
-        return subprocess.check_output(["rpm", "--define", "_topdir %s" % os.path.join(self.cwd, ".."), "--specfile", self.path, "--queryformat", "%{VERSION}\n"]).decode("utf-8").splitlines()[0]
+        """Provide the version as parsed by rpm"""
+        cmd = ["rpm", "--define", "_topdir %s" % os.path.join(self.cwd, ".."), "--specfile", self.path,
+               "--queryformat", "%{VERSION}\n"]
+        return subprocess.check_output(cmd).decode("utf-8").splitlines()[0]
 
     @property
     def should_rebuild(self):
+        """Determine if the package should be rebuilt"""
         return self._should_rebuild
 
     @property
     def release(self):
-        return subprocess.check_output(["rpm", "--define", "%dist %nil", "--define", "_topdir %s" % os.path.join(self.cwd, ".."), "--specfile", self.path, "--queryformat", "%{RELEASE}\n"]).decode("utf-8").splitlines()[0]
+        """Provide the release as parsed by rpm"""
+        cmd = ["rpm", "--define", "%dist %nil", "--define", "_topdir %s" % os.path.join(self.cwd, ".."),
+               "--specfile", self.path, "--queryformat", "%{RELEASE}\n"]
+        return subprocess.check_output(cmd).decode("utf-8").splitlines()[0]
 
     def _sources_and_patches(self, flag=None):
         os.chdir(self.cwd)
@@ -375,34 +423,100 @@ class SpecFile():
             # trans_set.parseSpec can affect changing of internal macros, e.g. redefining things like mkrel and so on
             # reload the config to fix this
             rpm.reloadConfig()
-        return dict((os.path.basename(name), [name, 0 if no == 2147483647 and flags == 2 else no]) for name, no, flags in srclist if flag is None or flags == flag)
+        return dict(
+            (os.path.basename(name), [name, 0 if no == 2147483647 and flags == 2 else no])
+            for name, no, flags in srclist if flag is None or flags == flag
+        )
 
     @property
     def patches(self):
+        """Return the patches"""
         return self._sources_and_patches(flag=2)
 
     @property
     def sources(self):
+        """Return the sources"""
         return self._sources_and_patches(flag=1)
 
     def clean_spec(self):
-        re_rm_buildroot = r'^(?:\s*\[[^\n\]\[]+\][ \t]+\&\&[ \t]+)?(?:rm|\%__rm|\%\{__rm\}) *(?:-rf|-fr|-r) *"?(?:[%$]buildroot|[%$]\{buildroot\}|[%$]\{buildroot\}|\$RPM_BUILDROOT|\$RPM_BUILD_ROOT|\$\{RPM_BUILD_ROOT\}|\$RPM_BUILD_DIR)"?/?[ \t]*\n'
+        """Clean the spec file of deprecated statements"""
+
+        re_rm_buildroot = r'^(?:\s*\[[^\n\]\[]+\][ \t]+\&\&[ \t]+)?(?:rm|\%__rm|\%\{__rm\}) *' \
+            r'(?:-rf|-fr|-r) *"?(?:[%$]buildroot|[%$]\{buildroot\}|[%$]\{buildroot\}|' \
+            r'\$RPM_BUILDROOT|\$RPM_BUILD_ROOT|\$\{RPM_BUILD_ROOT\}|\$RPM_BUILD_DIR)"?/?[ \t]*\n'
+
         re_clean_spec = [
             # remove %defattr
-            ('remove defattr', None, re.compile(r'(?P^\%files(?:[ \t]+[^\n]*)?\n(?:^\%doc [^\n]+\n)?)^\%defattr\s*\(- *, *root *, *root *(?:, *-)?\)\s*\n', re.MULTILINE + re.IGNORECASE)),
-            ('remove cleaning buildroot in install', None, re.compile(r'(?P^\%install(?:[ \t]+[^\n]*)?\n)' + re_rm_buildroot + r'\n?', re.MULTILINE + re.IGNORECASE)),
-            ('remove clean section', None, re.compile(r'^\%clean[ \t]*\n(?:' + re_rm_buildroot + r')?\s*(?P(?:^#[^%\n]+\n)*^(?:\%files|\%post|\%pre|\%trigger|\%install|\%package|\%check|\%_font_pkg|$(?!.|\n)))', re.MULTILINE + re.IGNORECASE)),
-            ('remove buildroot definition', None, re.compile(r'^BuildRoot[ \t]*:[^\n]+\n', re.MULTILINE + re.IGNORECASE)),
-            ('remove unneeded setup option', None, re.compile(r'^(?P\%setup -q)(?: -n|n) (?:\%name|\%\{name\})-(?:\%version|\%\{version\})(?P\n)', re.MULTILINE + re.IGNORECASE)),
-            ('https for download.gnome.org', r'\ghttps://\g', re.compile(r'^(?PSource[0-9]*[ \t]*:[^\n]+)http://(?Pdownload.gnome.org/[^\n]+\n)', re.MULTILINE + re.IGNORECASE)),
-            ('download.gnome.org instead of ftp.gnome.org', r'\ghttps://download.gnome.org\g', re.compile(r'^(?PSource[0-9]*[ \t]*:[^\n]+)(?:ftp|http|https)://ftp.gnome.org/pub/GNOME(?P/[^\n]+\n)', re.MULTILINE + re.IGNORECASE)),
-            ('restrict what libraries are matched with major numbers', r'\g{,.*}', re.compile(r'^(?P%{_libdir}[^\n]+})\*$', re.MULTILINE)),
-            ('keep library matching using two lines', r'\g\n\g.*', re.compile(r'^(?P%{_libdir}[^\n]+})$\n(?P=keeppre)\{,\.\*\}$', re.MULTILINE)),
+            (
+                'remove defattr',
+                None,
+                re.compile(r'(?P^\%files(?:[ \t]+[^\n]*)?\n(?:^\%doc [^\n]+\n)?)'
+                           r'^\%defattr\s*\(- *, *root *, *root *(?:, *-)?\)\s*\n',
+                           re.MULTILINE + re.IGNORECASE)
+            ),
+            (
+                'remove cleaning buildroot in install',
+                None,
+                re.compile(r'(?P^\%install(?:[ \t]+[^\n]*)?\n)' + re_rm_buildroot + r'\n?',
+                           re.MULTILINE + re.IGNORECASE)
+            ),
+            (
+                'remove clean section',
+                None,
+                re.compile(r'^\%clean[ \t]*\n(?:' + re_rm_buildroot +
+                           r')?\s*(?P(?:^#[^%\n]+\n)*^(?:\%files|\%post|\%pre|\%trigger|'
+                           r'\%install|\%package|\%check|\%_font_pkg|$(?!.|\n)))',
+                           re.MULTILINE + re.IGNORECASE)
+            ),
+            (
+                'remove buildroot definition',
+                None,
+                re.compile(r'^BuildRoot[ \t]*:[^\n]+\n', re.MULTILINE + re.IGNORECASE)
+            ),
+            (
+                'remove unneeded setup option',
+                None,
+                re.compile(r'^(?P\%setup -q)(?: -n|n) (?:\%name|\%\{name\})-'
+                           r'(?:\%version|\%\{version\})(?P\n)',
+                           re.MULTILINE + re.IGNORECASE)),
+            (
+                'https for download.gnome.org',
+                r'\ghttps://\g',
+                re.compile(r'^(?PSource[0-9]*[ \t]*:[^\n]+)http://(?Pdownload.gnome.org/[^\n]+\n)',
+                           re.MULTILINE + re.IGNORECASE)
+            ),
+            (
+                'download.gnome.org instead of ftp.gnome.org',
+                r'\ghttps://download.gnome.org\g',
+                re.compile(r'^(?PSource[0-9]*[ \t]*:[^\n]+)(?:ftp|http|https):' +
+                           r'//ftp.gnome.org/pub/GNOME(?P/[^\n]+\n)',
+                           re.MULTILINE + re.IGNORECASE)
+            ),
+            (
+                'restrict what libraries are matched with major numbers',
+                r'\g{,.*}',
+                re.compile(r'^(?P%{_libdir}[^\n]+})\*$', re.MULTILINE)
+            ),
+            (
+                'keep library matching using two lines',
+                r'\g\n\g.*',
+                re.compile(r'^(?P%{_libdir}[^\n]+})$\n(?P=keeppre)\{,\.\*\}$', re.MULTILINE)
+            ),
             ('make use of autopatch', r'%autopatch -p1', re.compile(r'^%apply_patches$', re.MULTILINE)),
             ('change configure2_5x macro to configure', r'%configure', re.compile(r'^%configure2_5x\b', re.MULTILINE)),
             ('change make macro to make_build', r'%make_build', re.compile(r'^%make\b', re.MULTILINE), True),
-            ('change find_lang --with-help into --with-gnome', r'\g --with-gnome\g', re.compile(r'^(?P\s*\%find_lang[^\\\n]+) --with-help(?P[^\\\n]*\n)', re.MULTILINE + re.IGNORECASE)),
-            ('change find_lang remove duplicate with_gnome', None, re.compile(r'^(?P\%find_lang[^\\\n]+ --with-gnome) --with-gnome(?P[^\\\n]*\n)', re.MULTILINE + re.IGNORECASE)),
+            (
+                'change find_lang --with-help into --with-gnome',
+                r'\g --with-gnome\g',
+                re.compile(r'^(?P\s*\%find_lang[^\\\n]+) --with-help(?P[^\\\n]*\n)',
+                           re.MULTILINE + re.IGNORECASE)
+            ),
+            (
+                'change find_lang remove duplicate with_gnome',
+                None,
+                re.compile(r'^(?P\%find_lang[^\\\n]+ --with-gnome) --with-gnome(?P[^\\\n]*\n)',
+                           re.MULTILINE + re.IGNORECASE)
+            ),
             # Use new Python macros
             ('use new Python macros', r'%py3_build', re.compile(r'^%{__python3} setup.py build$', re.MULTILINE)),
 
@@ -411,13 +525,21 @@ class SpecFile():
             ('use new Python macros', r'%{python3_version}', re.compile(r'%{py3ver}', re.MULTILINE)),
         ]
         re_convert_br = [
-            ('remove py_requires', ('python',), re.compile(r'^\%(?:py_requires|\{py_requires\})[ \t]*\n', re.MULTILINE)),
-            ('remove py_requires -d', ('python', 'python-devel'), re.compile(r'^\%(?:py_requires[ \t]+-d|\{py_requires[ \t]+-d\})[ \t]*\n', re.MULTILINE)),
+            (
+                'remove py_requires',
+                ('python',),
+                re.compile(r'^\%(?:py_requires|\{py_requires\})[ \t]*\n', re.MULTILINE)
+            ),
+            (
+                'remove py_requires -d',
+                ('python', 'python-devel'),
+                re.compile(r'^\%(?:py_requires[ \t]+-d|\{py_requires[ \t]+-d\})[ \t]*\n', re.MULTILINE)
+            ),
         ]
 
         made_changes = False
-        with open(self.path, "r", encoding="utf-8") as f:
-            data = f.read()
+        with open(self.path, "r", encoding="utf-8") as fp_spec:
+            data = fp_spec.read()
             for reason, change_to, regexp, *extra in re_clean_spec:
                 if extra:
                     should_rebuild = extra[0]
@@ -468,14 +590,25 @@ class SpecFile():
             converted_defines = []
             for search_for in ('name', 'version', 'release', 'summary', 'Summary', 'group'):
                 search_for_ignore_case = ''.join(("[%s%s]" % (letter, letter.swapcase()) for letter in search_for))
-                re_spec = re.compile(r'^(?P' + re.escape(search_for_ignore_case) + r'[ \t]*:[ \t]*)(?:\%' + re.escape(search_for) + r'|\%\{' + re.escape(search_for) + r'\})(?P[ \t]*\n)', re.MULTILINE)
-                re_variable = re.compile(r'^(?P\%define[ \t]+'+ re.escape(search_for) + r'[ \t]+(?P[^\n]+?))(?P[ \t]*\n)', re.MULTILINE)
+                re_spec = re.compile(
+                    r'^(?P' + re.escape(search_for_ignore_case) + r'[ \t]*:[ \t]*)(?:\%' +
+                    re.escape(search_for) + r'|\%\{' + re.escape(search_for) +
+                    r'\})(?P[ \t]*\n)', re.MULTILINE
+                )
+                re_variable = re.compile(
+                    r'^(?P\%define[ \t]+' + re.escape(search_for) +
+                    r'[ \t]+(?P[^\n]+?))(?P[ \t]*\n)',
+                    re.MULTILINE
+                )
 
                 match = re_variable.search(data)
                 if match and match.group('definition') and len(re_variable.findall(data)) == 1:
                     match2 = re_spec.search(data)
                     if match2:
-                        data, subs = re_spec.subn(r'\g' + match.group('definition').replace('\\', '\\\\') + r'\g', data)
+                        data, subs = re_spec.subn(
+                            r'\g' + match.group('definition').replace('\\', '\\\\') + r'\g',
+                            data
+                        )
                         if subs:
                             made_changes = True
                             data, subs = re_variable.subn('', data)
@@ -497,12 +630,24 @@ class SpecFile():
     def _clean_spec_patches(self, made_changes, data):
         re_autopatch = re.compile(r'^[ \t]*\%autopatch(?:[ \t]+-p(?P[0-9]+))?$', re.MULTILINE)
 
-        re_patch_header = re.compile('^Patch(?P[0-9]*)[ \t]*:[ \t]*(?P[^\n]+)\n', re.MULTILINE + re.IGNORECASE)
+        re_patch_header = re.compile(
+            '^Patch(?P[0-9]*)[ \t]*:[ \t]*(?P[^\n]+)\n',
+            re.MULTILINE + re.IGNORECASE
+        )
         re_patch_any = re.compile(r'^[ \t]*\%patch(?P[0-9]*)', re.MULTILINE)
-        re_patch_valid = re.compile(r'^[ \t+]*\%patch(?P[0-9]*)(?:[ \t]+-p(?P[0-9]+))?(?:[ \t]+-b[ \t]+\S+)?$\n?', re.MULTILINE)
-        re_prep_patches = re.compile(r'^\%setup[^\n]+$(?:' + re_patch_valid.pattern + r'|^#[^%\n]+\n|^[ \t]*(?:%{_bindir}/|%_bindir)?autoreconf[ \t][^\n]+$|\s)+\n\%build', re.MULTILINE)
-
-        give_patchnr = lambda match: (match.group('nr') if len(match.group('nr')) == 1 else match.group('nr').lstrip('0')) if match.group('nr') else "0"
+        re_patch_valid = re.compile(
+            r'^[ \t+]*\%patch(?P[0-9]*)(?:[ \t]+-p(?P[0-9]+))?(?:[ \t]+-b[ \t]+\S+)?$\n?',
+            re.MULTILINE
+        )
+        re_prep_patches = re.compile(
+            r'^\%setup[^\n]+$(?:' + re_patch_valid.pattern +
+            r'|^#[^%\n]+\n|^[ \t]*(?:%{_bindir}/|%_bindir)?autoreconf[ \t][^\n]+$|\s)+\n\%build',
+            re.MULTILINE
+        )
+
+        give_patchnr = lambda match: (
+            match.group('nr') if len(match.group('nr')) == 1 else match.group('nr').lstrip('0')
+            ) if match.group('nr') else "0"
 
         # Make use of %apply_patches
 
@@ -518,27 +663,37 @@ class SpecFile():
             print("NOTICE: More than 5 patches, skipping package", file=sys.stderr)
             return made_changes, data
 
-        if self.uses_apply_patches:
+        if self.uses_autopatch:
             return made_changes, data
 
         # XXX -- apparently this is really inefficient with e.g. apache
         match2 = re_prep_patches.search(data)
         patch_nrs_header = set((give_patchnr(match) for match in re_patch_header.finditer(data)))
         patch_nrs_any = set((give_patchnr(match) for match in re_patch_any.finditer(data)))
-        patch_nrs_valid = set((give_patchnr(match) for match in re_patch_valid.finditer(match2.group(0)))) if match2 else set()
+        patch_nrs_valid = set(
+            (give_patchnr(match) for match in re_patch_valid.finditer(match2.group(0)))
+            ) if match2 else set()
 
         if not patch_nrs_header:
             # XXX -- weird, self.patches should've returned 0 already
             return made_changes, data
 
         if not patch_nrs_header == patch_nrs_any == patch_nrs_valid:
-            print("NOTICE: Unable to automatically convert %s patches into %%autopatch (header/patch/valid: %s, %s, %s)" % (self.module, len(patch_nrs_header), len(patch_nrs_any), len(patch_nrs_valid)), file=sys.stderr)
+            print("NOTICE: Unable to automatically convert %s patches into %%autopatch "
+                  "(header/patch/valid: %s, %s, %s)" % (self.module, len(patch_nrs_header), len(patch_nrs_any),
+                                                        len(patch_nrs_valid)),
+                  file=sys.stderr \
+            )
             return made_changes, data
 
-        patch_flags = set((0 if match.group('strip') is None else match.group('strip') for match in re_patch_valid.finditer(match2.group(0))))
+        patch_flags = set((
+            0 if match.group('strip') is None else match.group('strip')
+            for match in re_patch_valid.finditer(match2.group(0))
+        ))
 
         if len(patch_flags) != 1:
-            print("NOTICE: Unable to automatically convert patches into as different -p / strip levels used", file=sys.stderr)
+            print("NOTICE: Unable to automatically convert spec file to use autopatch as"
+                  "different -p / strip levels are used", file=sys.stderr)
             return made_changes, data
 
         # Whoot, we can convert!!
@@ -571,6 +726,10 @@ class SpecFile():
 
     @property
     def buildrequires(self):
+        """Get the BuildRequires of this spec file
+
+        Parses the spec file to do this, so any macros are expanded"""
+
         rpm.delMacro("_topdir")
         rpm.addMacro("_topdir", os.path.join(self.cwd, '..'))
         trans_set = rpm.ts()
@@ -606,7 +765,8 @@ class SpecFile():
         return buildreqs
 
     @property
-    def uses_apply_patches(self):
+    def uses_autopatch(self):
+        """Check if autopatch (or similar) is used"""
         return subprocess.call(['grep', '-Eq', '^%(apply_patches|autopatch|autosetup)', '--', self.path]) == 0
 
     def _revert_changes(self):
@@ -621,11 +781,14 @@ class SpecFile():
         if patchnr == 0:
             nrs.append('')
 
-        with open(self.path, "r", encoding="utf-8") as f:
-            data = f.read()
-
-            data, subs = self.re_update_patch.subn(lambda match: '' if match.group('nr') in nrs or (match.group('nr').isdigit() and int(match.group('nr')) in nrs) else match.group(0), data)
+        with open(self.path, "r", encoding="utf-8") as fp_spec:
+            data = fp_spec.read()
 
+            data, subs = self.re_update_patch.subn(
+                lambda match: '' if match.group('nr') in nrs or (match.group('nr').isdigit()
+                                                                 and int(match.group('nr')) in nrs)
+                else match.group(0), data
+            )
             if not subs:
                 print("ERROR: Could not remove patch nr %s!" % patchnr, file=sys.stderr)
                 return False
@@ -649,7 +812,7 @@ class SpecFile():
 
         initial_patches = self.patches
         patches = initial_patches
-        uses_apply_patches = self.uses_apply_patches if patches else False
+        uses_autopatch = self.uses_autopatch if patches else False
 
         while True:
             try:
@@ -665,8 +828,8 @@ class SpecFile():
                 # Determine the last command that failed
                 if os.path.exists(logfile):
                     print(logfile)
-                    with open(logfile, "r", encoding="utf-8") as f:
-                        for line in line_input(f):
+                    with open(logfile, "r", encoding="utf-8") as fp_logfile:
+                        for line in line_input(fp_logfile):
                             if line.startswith('+ '):
                                 cmd_before = (cmd, cmd_before)
                                 cmd = line[2:]
@@ -677,14 +840,14 @@ class SpecFile():
                 cmd_parsed = shlex.split(cmd) if cmd else []
                 cmd_before_parsed = shlex.split(cmd_before[0]) if cmd_before[0] else []
 
-                if not check_only and uses_apply_patches and patches and cmd_parsed:
+                if not check_only and uses_autopatch and patches and cmd_parsed:
                     if os.path.basename(cmd_parsed[0]) in ('patch', 'cat'):
                         if os.path.exists(cmd_parsed[-1]):
                             failed_patch = os.path.basename(cmd_parsed[-1])
                         elif cmd_parsed[-1].startswith('-') and os.path.exists(cmd_before_parsed[-1]):
-                            # for %autopatch as well as %patch
-                            #+ /usr/bin/cat /home/src/pkgs/gnome-getting-started-docs/SOURCES/gs-browse-web-firefox.page.patch
-                            #+ /usr/bin/patch -p1 -s
+# for %autopatch as well as %patch
+#+ /usr/bin/cat /home/src/pkgs/gnome-getting-started-docs/SOURCES/gs-browse-web-firefox.page.patch
+#+ /usr/bin/patch -p1 -s
                             failed_patch = os.path.basename(cmd_before_parsed[-1])
 
                     # Patch is merged if there is at least one 'ignored' line and no 'FAILED' line anywhere
@@ -746,23 +909,35 @@ class SpecFile():
         # XXX - doesn't handle buildrequires with version numbers :-(
         made_changes = False
 
-        with open(self.path, "r", encoding="utf-8") as f:
-            data = f.read()
+        with open(self.path, "r", encoding="utf-8") as fp_spec:
+            data = fp_spec.read()
 
             # Change any "," in buildrequires into multiple lines
-            data, subs = self.re_update_br_unsplit.subn(lambda match: ''.join((''.join((match.group('pre'), match2.group(0), match.group('unsplitpost'))) for match2 in self.re_br_part.finditer(match.group('unsplit')) if match.group(0).strip() != '')), data)
+            data, subs = self.re_update_br_unsplit.subn(
+                lambda match: ''.join((''.join((match.group('pre'), match2.group(0), match.group('unsplitpost')))
+                                       for match2 in self.re_br_part.finditer(match.group('unsplit'))
+                                       if match.group(0).strip() != '')),
+                data
+            )
             if subs:
                 made_changes = True
                 self._changes['SILENT one line per buildrequirement'] = True
 
             # Change =< and => operators into <= and >=
-            data, subs = self.re_update_br_fix_operator.subn(lambda match: match.group(0).replace('=>', '>=').replace('=<', '<') if self.re_update_br.match(match.group(0).replace('=>', '>=').replace('=<', '<')) else match.group(0), data)
+            data, subs = self.re_update_br_fix_operator.subn(
+                lambda match: match.group(0).replace('=>', '>=').replace('=<', '<')
+                if self.re_update_br.match(match.group(0).replace('=>', '>=').replace('=<', '<'))
+                else match.group(0), data
+            )
             if subs:
                 made_changes = True
                 self._changes['SILENT fix operator in buildrequires'] = True
 
             # Now update buildrequires if any
-            data, subs = self.re_update_br.subn(lambda match: ''.join((match.group('pre'), changes[match.group('br')], match.group('post'))) if match.group('br') in changes else match.group(0), data)
+            data, subs = self.re_update_br.subn(
+                lambda match: ''.join((match.group('pre'), changes[match.group('br')], match.group('post')))
+                if match.group('br') in changes else match.group(0), data
+            )
 
             if subs:
                 made_changes = True
@@ -782,8 +957,14 @@ class SpecFile():
     MAX_JUDGEMENT = 5
 
     def ensure_no_local_changes(self, force=False):
+        """Check if the repository has any local changes
+
+        Can optionally force a clean checkout (by reverting any local
+        changes). This should possibily be moved to Downstream class, or move
+        this into a new Package class"""
         # XXX - os.path.join is hackish
-        svn_diff_output = subprocess.check_output(["svn", "diff", os.path.normpath(os.path.join(self.cwd, '..'))]).decode('utf-8')
+        cmd = ["svn", "diff", os.path.normpath(os.path.join(self.cwd, '..'))]
+        svn_diff_output = subprocess.check_output(cmd).decode('utf-8')
         if svn_diff_output != '':
             print(svn_diff_output)
             print("ERROR: Package has uncommitted changes!", file=sys.stderr)
@@ -804,8 +985,8 @@ class SpecFile():
         if not self.ensure_no_local_changes(force):
             return None
 
-        with open(self.path, "r", encoding="utf-8") as f:
-            data = f.read()
+        with open(self.path, "r", encoding="utf-8") as fp_spec:
+            data = fp_spec.read()
 
             if data.count("%subrel") != 0:
                 print("ERROR: %subrel found; don't know what to do!", file=sys.stderr)
@@ -916,29 +1097,35 @@ class Patch():
     def __str__(self):
         return self.path if self.show_path else os.path.basename(self.path)
 
-    @property
+    @cached_property
     def svn_author(self):
-        if not hasattr(self, '_svn_author'):
-            try:
-                contents = subprocess.check_output(['svn', 'log', '-q', "--", self.path], close_fds=True).strip("\n").decode('utf-8').splitlines()
+        """Return the svn author of this patch
 
-                for line in contents:
-                    if ' | ' not in line:
-                        continue
+        Makes use of svn log."""
+        try:
+            cmd = ['svn', 'log', '-q', "--", self.path]
+            contents = subprocess.check_output(cmd, close_fds=True).strip("\n").decode('utf-8').splitlines()
 
-                    fields = line.split(' | ')
-                    if len(fields) >= 3:
-                        self._svn_author = fields[1]
-            except subprocess.CalledProcessError:
-                pass
+            for line in contents:
+                if ' | ' not in line:
+                    continue
 
-        if not hasattr(self, '_svn_author'):
-            return None
+                fields = line.split(' | ')
+                if len(fields) >= 3:
+                    return fields[1]
+        except subprocess.CalledProcessError:
+            pass
 
-        return self._svn_author
+        return None
 
 
 class Upstream():
+    """Class handling anything related to upstream (meaning: GNOME)
+
+    For now this is limited to:
+      - finding the module names
+      - finding latest version of a module"""
+
 
     URL = "https://download.gnome.org/sources/"
     limit = None
@@ -965,6 +1152,7 @@ class Upstream():
     @classmethod
     @lru_cache(maxsize=4096)
     def versions(cls, module):
+        """Return the possible versions for a given module"""
         versions = None
 
         url = '%s%s/cache.json' % (cls.URL, module)
@@ -976,7 +1164,13 @@ class Upstream():
         return versions
 
 class Downstream():
-    re_file = re.compile(r'^(?P.*)[_-](?:(?P([0-9]+[\.])*[0-9]+)-)?(?P([0-9]+[\.\-])*[0-9]+)\.(?P(?:tar\.|diff\.)?[a-z][a-z0-9]*)$')
+    """Class handling anything related to downstream (meaning: Mageia)"""
+
+
+    re_file = re.compile(
+        r'^(?P.*)[_-](?:(?P([0-9]+[\.])*[0-9]+)-)?(?P([0-9]+[\.\-])*[0-9]+)' \
+        r'\.(?P(?:tar\.|diff\.)?[a-z][a-z0-9]*)$'
+    )
 
     MEDIA = "Core Release Source"
     # PKGROOT will be overwritten (command line option)
@@ -985,13 +1179,14 @@ class Downstream():
     SECTION = None
 
     def __init__(self):
-        contents = subprocess.check_output(['urpmf', '--qf', '%name|%version|%files', '.', "--media", self.MEDIA], close_fds=True).decode("utf-8").strip("\n").splitlines()
+        cmd = ['urpmf', '--qf', '%name|%version|%files', '.', "--media", self.MEDIA]
+        contents = subprocess.check_output(cmd, close_fds=True).decode("utf-8").strip("\n").splitlines()
 
         srpm_files = {}
         module_srpm_tarballs = {}
         packages = set()
 
-        for line in  contents:
+        for line in contents:
             try:
                 srpm, version, filename = line.split("|")
             except SpecFileError:
@@ -1026,6 +1221,7 @@ class Downstream():
 
     @property
     def packages(self):
+        """Return all downstream package names"""
         return sorted(self._packages)
 
     _provide_to_alternate = {}
@@ -1044,7 +1240,8 @@ class Downstream():
         if not cls._provide_to_alternate:
             _provide_to_pkg = {}
             _pkg_to_provide = {}
-            for myline in subprocess.check_output(['urpmf', "--qf", "%name\t%provides\t%arch", '.']).decode("utf-8").splitlines():
+            cmd = ['urpmf', "--qf", "%name\t%provides\t%arch", '.']
+            for myline in subprocess.check_output(cmd).decode("utf-8").splitlines():
                 pkgname, pkgprovide, pkgarch = myline.split("\t")
                 if pkgarch in ('src', 'i586'):
                     continue
@@ -1100,10 +1297,14 @@ class Downstream():
 
     @classmethod
     def package_path(cls, package):
+        """Provide the local checkout path for a given package
+
+        Package might not be checked out yet!"""
         return os.path.join(os.path.expanduser(Downstream.PKGROOT), package)
 
     @classmethod
     def package_spec(cls, package):
+        """Return the SpecFile for a given package"""
         path = cls.package_path(package)
 
         return SpecFile(os.path.join(path, "SPECS", "%s.spec" % package), module=package)
@@ -1111,7 +1312,8 @@ class Downstream():
 
     @classmethod
     @retry(subprocess.CalledProcessError)
-    def co(cls, package, cwd=None, spec_only=False):
+    def checkout(cls, package, cwd=None, spec_only=False):
+        """Check out a package from the repository"""
         if cwd is None:
             cwd = os.path.expanduser(cls.PKGROOT)
 
@@ -1125,7 +1327,9 @@ class Downstream():
 
     @classmethod
     @retry(subprocess.CalledProcessError)
-    def ci(cls, package, changes, cwd=None):
+    def checkin(cls, package, changes, cwd=None):
+        """Check in changes to the repository"""
+
         if cwd is None:
             cwd = cls.package_path(package)
 
@@ -1134,6 +1338,10 @@ class Downstream():
 
     @classmethod
     def submit(cls, package, cwd=None):
+        """Submit a package to the buildsystem
+
+        If a specific distro release is chosen, the section will be set to
+        core/updates_testing."""
         if cwd is None:
             cwd = cls.package_path(package)
 
@@ -1151,6 +1359,13 @@ class Downstream():
 
 
     def get_downstream_from_upstream(self, upstream, version):
+        """Provide the downstream package(s) for a given upstream module name
+        and version
+
+        This will raise a ValueError exception in case a good match cannot be
+        found"""
+
+
         if upstream not in self.tarballs:
             raise ValueError("No packages for upstream name: %s" % upstream)
 
@@ -1161,7 +1376,7 @@ class Downstream():
         for package in list(self.tarballs[upstream].keys()):
             # Checkout package to ensure the checkout reflects the latest changes
             try:
-                self.co(package)
+                self.checkout(package)
             except subprocess.CalledProcessError:
                 raise ValueError("Multiple packages found and cannot checkout %s" % package)
 
@@ -1188,7 +1403,9 @@ class Downstream():
         #   ensure it follows get_safe_max_version
         if latest_version != latest_possible_version and version_cmp(get_safe_max_version(latest_version, upstream), \
                 version) != 1:
-            raise ValueError("Multiple packages found and unsafe version increase: %s (%s => %s)" % (upstream, latest_version, version))
+            raise ValueError("Multiple packages found and unsafe version increase: %s (%s => %s)" % (
+                upstream, latest_version, version) \
+            )
 
         # - now really get the right packages
         matches = [package for package in packages if packages[package] == latest_version]
@@ -1199,22 +1416,26 @@ class Downstream():
         raise ValueError("Multiple packages found and cannot determine package for version %s" % version)
 
 def write_file(path, data):
+    """Write to a file by creating a temporary file and renaming that to the
+    new file"""
+
     with tempfile.NamedTemporaryFile(mode='w+t', dir=os.path.dirname(path), delete=False, encoding="utf-8") as fdst:
         fdst.write(data)
         fdst.flush()
         os.rename(fdst.name, path)
 
-def cmd_co_multi(args):
+def _cmd_checkout_multi(args):
     package, what_to_print, options = args
 
     print(what_to_print)
 
     try:
-        Downstream.co(package, spec_only=options.spec_only)
+        Downstream.checkout(package, spec_only=options.spec_only)
     except subprocess.CalledProcessError:
         pass
 
-def cmd_co(options):
+def cmd_checkout(options):
+    """Check out various packages in parallel"""
     if options.all:
         packages = ((package, package, options) for package in Downstream().packages)
     elif options.package:
@@ -1224,12 +1445,20 @@ def cmd_co(options):
 
     if options.debug:
         for package in packages:
-            cmd_co_multi(package)
+            _cmd_checkout_multi(package)
     else:
         with concurrent.futures.ProcessPoolExecutor(max_workers=8) as executor:
-            executor.map(cmd_co_multi, packages)
+            executor.map(_cmd_checkout_multi, packages)
 
 def join_streams(show_version=False, only_diff_version=False, auto_update=True):
+    """Links upstream modules with downstream packages
+
+    To figure out the downstream name of an upstream package, the Downstream
+    class has logic to extract the name from a tarball file.
+
+    If information is requested whereby the spec file is needed this function
+    will automatically checkout any package"""
+
     upstream = Upstream().names
     downstream = Downstream()
 
@@ -1244,7 +1473,7 @@ def join_streams(show_version=False, only_diff_version=False, auto_update=True):
                 # ensure package is checked out
                 if not os.path.exists(cwd):
                     try:
-                        downstream.co(package)
+                        downstream.checkout(package)
                     except subprocess.CalledProcessError:
                         # XXX - ignoring packages which cannot be checked out
                         continue
@@ -1256,7 +1485,7 @@ def join_streams(show_version=False, only_diff_version=False, auto_update=True):
                 # in case upstream version is newer, update checkout
                 if auto_update and package_version != spec_version and version_cmp(package_version, spec_version) == 1:
                     try:
-                        downstream.co(package)
+                        downstream.checkout(package)
                         spec_version = downstream.package_spec(package).version
                     except subprocess.CalledProcessError:
                         pass
@@ -1267,9 +1496,13 @@ def join_streams(show_version=False, only_diff_version=False, auto_update=True):
             yield (package, module, package_version, spec_version, downstream.files[package])
 
 def cmd_group_owner(options):
+    """Show the packages of a rpm group"""
+
+
     groups = set(options.group)
 
-    output = [pkg.split("\t") for pkg in subprocess.check_output(["urpmf", "-F|", "--qf", "%group\t%name\t%sourcerpm\t%version\t%release", "."]).decode("utf-8").splitlines()]
+    cmd = ["urpmf", "-F|", "--qf", "%group\t%name\t%sourcerpm\t%version\t%release", "."]
+    output = [pkg.split("\t") for pkg in subprocess.check_output(cmd).decode("utf-8").splitlines()]
     if not output:
         return
 
@@ -1284,12 +1517,9 @@ def cmd_group_owner(options):
             packages[group] = {}
 
         source = sourcerpm if sourcerpm else name
-        end = ".src.rpm"
-        if source.endswith(end):
-            source = source[:len(source) - len(end)]
-        end = "-%s-%s" %(version, release)
-        if source.endswith(end):
-            source = source[:len(source) - len(end)]
+        for end in (".src.rpm", "-%s-%s" % (version, release)):
+            if source.endswith(end):
+                source = source[:len(source) - len(end)]
 
         if source not in packages[group]:
             packages[group][source] = set()
@@ -1297,7 +1527,10 @@ def cmd_group_owner(options):
         packages[group][source].add(name)
 
 
-    maints = {line.rpartition(" ")[::2] for line in subprocess.check_output(["mgarepo", "maintdb", "get"]).decode("utf-8").splitlines()}
+    maints = {
+        line.rpartition(" ")[::2]
+        for line in subprocess.check_output(["mgarepo", "maintdb", "get"]).decode("utf-8").splitlines()
+    }
 
     def get_output(source, maints, packages):
         for package in list(packages.keys()):
@@ -1319,6 +1552,16 @@ def cmd_group_owner(options):
             print(line)
 
 def cmd_cleanup(_):
+    """Clean up the package root
+
+    Removes things such as:
+      - unneeded files in SOURCES (e.g. old tarballs)
+
+    In future will also:
+      - clean old directories in BUILDDIR
+      - clean old files in RPMS and SRPMS"""
+
+
     root = os.path.expanduser(Downstream.PKGROOT)
 
 #    packages = set(Downstream().packages)
@@ -1327,20 +1570,23 @@ def cmd_cleanup(_):
 
 #    dirs = dirs - packages
 
-    import pysvn
+    import pysvn # pylint: disable=import-outside-toplevel
     dirs = [o for o in dirs if os.path.exists(os.path.join(root, o, "SOURCES", 'sha1.lst'))]
 
     for path in dirs:
         try:
-            binaries = set((l.split('  ', 1)[1] for l in open(os.path.join(root, path, 'SOURCES', 'sha1.lst')).read().splitlines()))
+            binaries = set((l.split('  ', 1)[1]
+                            for l in open(os.path.join(root, path, 'SOURCES', 'sha1.lst')).read().splitlines()))
         except IndexError:
             print(os.path.join(root, path, 'SOURCES', 'sha1.lst'))
 #            shutil.rmtree(os.path.join(root, path))
-#            Downstream.co(path)
+#            Downstream.checkout(path)
             continue
 
         vcs = pysvn.Client()
-        stats = [stat for stat in vcs.status(os.path.join(root, path, 'SOURCES'), depth=pysvn.depth.immediates) if stat.text_status == pysvn.wc_status_kind.unversioned and os.path.basename(stat.path) not in binaries]
+        stats = [stat for stat in vcs.status(os.path.join(root, path, 'SOURCES'), depth=pysvn.depth.immediates) # pylint: disable=no-member
+                 if stat.text_status == pysvn.wc_status_kind.unversioned # pylint: disable=no-member
+                 and os.path.basename(stat.path) not in binaries]  # pylint: disable=no-member
 
         if stats:
             print(path)
@@ -1353,6 +1599,10 @@ def cmd_cleanup(_):
                     shutil.rmtree(stat.path)
 
 def cmd_ls(options):
+    """Show upstream module names, downstream package names
+
+    Optionally can even show version numbers"""
+
     streams = join_streams(show_version=options.show_version, only_diff_version=options.diff)
     if options.sort:
         # Sort packages on the line number in the file
@@ -1373,6 +1623,12 @@ def cmd_ls(options):
         print()
 
 def cmd_check_version(_):
+    """Check for version mismatches between what's available in the spec
+    version and as a source rpm.
+
+    Example usage: locally check for failed builds"""
+
+
     streams = join_streams(show_version=True)
 
     for package, _, package_version, spec_version, _ in streams:
@@ -1384,6 +1640,8 @@ def cmd_check_version(_):
         sys.stdout.write("\n")
 
 def cmd_check_latest(options):
+    """Check if a package has a newer version upstream"""
+
     streams = join_streams(show_version=True)
 
     for package, module, package_version, spec_version, _ in streams:
@@ -1425,20 +1683,24 @@ def cmd_check_latest(options):
             subprocess.call(cmd, cwd=Downstream.package_path(package))
 
 def cmd_patches(options):
+    """List files with extension .patch or .diff as found in the source rpms"""
     root = os.path.expanduser(Downstream.PKGROOT)
 
     for package, module, _, _, downstream_files in sorted(join_streams()):
         for filename in downstream_files:
             if '.patch' in filename or '.diff' in filename:
 
-                p = Patch(os.path.join(root, package, "SOURCES", filename), show_path=options.path)
-                print("\t".join((module, package, str(p))))
+                this_patch = Patch(os.path.join(root, package, "SOURCES", filename), show_path=options.path)
+                print("\t".join((module, package, str(this_patch))))
 
 def cmd_check_prep(options):
+    """Check if the %prep stage can be ran sucessfully
+
+    This runs the %prep stage without dependencies"""
     spec = Downstream.package_spec(options.package)
     spec.check_and_update_patches()
 
-def cmd_clean_spec_multi(args):
+def _cmd_clean_spec_multi(args):
     options, package = args
 
     print(package)
@@ -1447,7 +1709,7 @@ def cmd_clean_spec_multi(args):
     path = os.path.join(cwd, "SPECS", "%s.spec" % package)
     if not os.path.exists(path):
         try:
-            Downstream.co(package)
+            Downstream.checkout(package)
         except subprocess.CalledProcessError:
             print('WARNING: Cannot check package %s. Skipping.' % package, file=sys.stderr)
             return False
@@ -1479,12 +1741,17 @@ def cmd_clean_spec_multi(args):
                 'check_br': lambda req: req.endswith('-devel'),
                 'check_provide': lambda prov: prov.startswith('pkgconfig('),
                 'basereqs': lambda req: [req[:-len('-devel')]],
-                'basereq_no_version': lambda basereqs: [basereq.rstrip('1234567890.') for basereq in basereqs if basereq[-1] in '1234567890'],
-                'versions_from_basereq': lambda basereqs: set((basereq[len(basereq.rstrip('01234567890.')):] for basereq in basereqs if basereq[-1] in '1234567890')),
-                'versions_basereq_extra': lambda versions: set(("%s.0" % version for version in versions if '.' not in version)),
+                'basereq_no_version': lambda basereqs: [basereq.rstrip('1234567890.') for basereq in basereqs
+                                                        if basereq[-1] in '1234567890'],
+                'versions_from_basereq': lambda basereqs: set((basereq[len(basereq.rstrip('01234567890.')):]
+                                                               for basereq in basereqs if basereq[-1] in '1234567890')),
+                'versions_basereq_extra': lambda versions: set(("%s.0" % version for version in versions
+                                                                if '.' not in version)),
                 'extra': lambda basereqs, versions: \
                     ['pkgconfig(%s)' % basereq for basereq in basereqs] +
-                         ['pkgconfig(%s)' % basereq[len('lib'):] if basereq.startswith('lib') else 'pkgconfig(lib%s)' % basereq for basereq in basereqs] +
+                         ['pkgconfig(%s)' % basereq[len('lib'):]
+                          if basereq.startswith('lib') else 'pkgconfig(lib%s)' % basereq
+                          for basereq in basereqs] +
                          ['pkgconfig(%s-%s)' % (basereq, version) for basereq in basereqs for version in versions],
             },
             'perl': {
@@ -1505,14 +1772,14 @@ def cmd_clean_spec_multi(args):
             'python-pkg': {
                 'disabled': True,
                 'desc': 'convert python buildrequires into python3dist()',
-                'check_br':  lambda req: req.startswith('python3-'),
+                'check_br': lambda req: req.startswith('python3-'),
                 'check_provide': lambda prov: prov.startswith('python3dist('),
                 'basereqs': lambda req: [req[len('python3-'):]],
                 'extra': lambda basereqs, versions: ['python3dist(%s)' % basereq for basereq in basereqs],
             },
             'python-egg': {
                 'desc': 'convert pythonegg(3) into python3dist()',
-                'check_br':  lambda req: req.startswith('pythonegg(3)(') and req.endswith(')'),
+                'check_br': lambda req: req.startswith('pythonegg(3)(') and req.endswith(')'),
                 'check_provide': lambda prov: prov.startswith('python3dist('),
                 'basereqs': lambda req: [req[len('pythonegg(3)('):-1]],
                 'extra': lambda basereqs, versions: ['python3dist(%s)' % basereq for basereq in basereqs],
@@ -1531,8 +1798,12 @@ def cmd_clean_spec_multi(args):
                 every_provides, every_ignored_provide = Downstream.alternative_provides(req)
                 # XXX - document what clean_pkgconfig_prov is for
                 #       maybe integrate clean_pkgconfig_prov in alternative_provides function?
-                provides = [clean_pkgconfig_prov(prov) for prov in every_provides if keys['check_provide'](prov)]
-                provides_ignored = [clean_pkgconfig_prov(prov) for prov in every_ignored_provide if keys['check_provide'](prov)]
+                provides = [clean_pkgconfig_prov(prov)
+                            for prov in every_provides
+                            if keys['check_provide'](prov)]
+                provides_ignored = [clean_pkgconfig_prov(prov)
+                                    for prov in every_ignored_provide
+                                    if keys['check_provide'](prov)]
                 change_to = None
                 if len(provides) == 1 and not provides_ignored:
                     if options.debug:
@@ -1568,7 +1839,8 @@ def cmd_clean_spec_multi(args):
                         for prov in provides:
                             for match in re_prov_get_version.finditer(prov):
                                 if options.debug:
-                                    print("NOTICE: Heuristically adding version %s from provide %s" % (match.group('version'), prov))
+                                    print("NOTICE: Heuristically adding version %s from provide %s" \
+                                            % (match.group('version'), prov))
                                 versions.add(match.group('version'))
 
                     check_for = keys['extra'](basereqs, versions)
@@ -1633,7 +1905,7 @@ def cmd_clean_spec_multi(args):
     # If we made it this far, checkin the changes
     if made_changes:
         if options.doit:
-            Downstream.ci(package, spec.changes, cwd=cwd)
+            Downstream.checkin(package, spec.changes, cwd=cwd)
             if spec.should_rebuild:
                 cmd = ['mgagnome', 'rebuild', '-s', '-m', 'to test removal of deprecated macros', package]
                 subprocess.call(cmd, cwd=cwd)
@@ -1646,7 +1918,7 @@ def cmd_clean_spec_multi(args):
 
     return made_changes
 
-def cmd_check_spec_multi(args):
+def _cmd_check_spec_multi(args):
     _, package = args
     cwd = Downstream.package_path(package)
 
@@ -1666,6 +1938,7 @@ def cmd_check_spec_multi(args):
 
 
 def cmd_check_spec(options):
+    """Check if the spec files can be parsed by rpm"""
     if options.all:
         packages = Downstream().packages
     else:
@@ -1674,15 +1947,21 @@ def cmd_check_spec(options):
 
     if options.debug:
         for package in packages:
-            cmd_check_spec_multi((options, package))
+            _cmd_check_spec_multi((options, package))
     else:
         workers = os.cpu_count() or 4
         with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
-            executor.map(cmd_check_spec_multi, ((options, package) for package in packages))
+            executor.map(_cmd_check_spec_multi, ((options, package) for package in packages))
 
 
 
 def cmd_clean_spec(options):
+    """Clean spec files
+
+    Updates spec files to remove known deprecated statements from spec files
+
+    Can optionally also change the BuildRequires"""
+
     if options.all:
         packages = Downstream().packages
     else:
@@ -1691,16 +1970,20 @@ def cmd_clean_spec(options):
 
     if options.debug:
         for package in packages:
-            cmd_clean_spec_multi((options, package))
+            _cmd_clean_spec_multi((options, package))
     else:
         workers = os.cpu_count() or 4
         # Hack: warm alternative provides cache
         if options.convert_br:
             Downstream.alternative_provides('XXXX')
         with concurrent.futures.ProcessPoolExecutor(max_workers=workers) as executor:
-            executor.map(cmd_clean_spec_multi, ((options, package) for package in packages))
+            executor.map(_cmd_clean_spec_multi, ((options, package) for package in packages))
 
 def cmd_new_release(options):
+    """Increase the release of a package (source rpm) and submit that to the buildsystem
+
+    Also called a rebuild"""
+
     success = True
     for pkg in options.package:
         # Determine the package name
@@ -1718,7 +2001,7 @@ def cmd_new_release(options):
 
         # Checkout package to ensure the checkout reflects the latest changes
         try:
-            Downstream.co(package)
+            Downstream.checkout(package)
         except subprocess.CalledProcessError:
             subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd)
             success = False
@@ -1749,7 +2032,7 @@ def cmd_new_release(options):
 
         try:
             # If we made it this far, checkin the changes
-            Downstream.ci(package, spec.changes, cwd=cwd)
+            Downstream.checkin(package, spec.changes, cwd=cwd)
 
             # Submit is optional
             if options.submit:
@@ -1762,6 +2045,10 @@ def cmd_new_release(options):
         sys.exit(1)
 
 def cmd_package_new_version(options):
+    """Increase the version number of a package (source rpm)
+
+    Can optionally submit the package to the buildsystem."""
+
     # Determine the package name
     if options.upstream:
         try:
@@ -1777,7 +2064,7 @@ def cmd_package_new_version(options):
 
     # Checkout package to ensure the checkout reflects the latest changes
     try:
-        Downstream.co(package)
+        Downstream.checkout(package)
     except subprocess.CalledProcessError:
         subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd)
         sys.exit(1)
@@ -1813,7 +2100,7 @@ def cmd_package_new_version(options):
 
         for filename in sources:
             path = os.path.join(cwd, "SOURCES", filename)
-            if not is_valid_hash(path, options.algo, options.hexdigest):
+            if not _is_valid_hash(path, options.algo, options.hexdigest):
                 print("ERROR: Hash file failed check for %s!" % path, file=sys.stderr)
                 print("ERROR: Reverting changes!", file=sys.stderr)
                 subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd)
@@ -1821,7 +2108,7 @@ def cmd_package_new_version(options):
 
     try:
         # If we made it this far, checkin the changes
-        Downstream.ci(package, spec.changes, cwd=cwd)
+        Downstream.checkin(package, spec.changes, cwd=cwd)
 
         # Submit is optional
         if options.submit:
@@ -1831,6 +2118,10 @@ def cmd_package_new_version(options):
         sys.exit(1)
 
 def cmd_parse_ftp_release_list(options):
+    """Submit packages by parsing the GNOME ftp-release-list email
+
+    """
+
     def _send_reply_mail(contents, orig_msg, to_addr, packages=None, error=False):
         """Send an reply email"""
         contents.seek(0)
@@ -1861,11 +2152,11 @@ def cmd_parse_ftp_release_list(options):
         # Call sendmail program directly so it doesn't matter if the service is running
         cmd = ['/usr/sbin/sendmail', '-oi', '--']
         cmd.extend([to_addr])
-        p = subprocess.Popen(cmd, stdin=subprocess.PIPE)
-        p.stdin.write(msg.as_bytes())
-        p.stdin.flush()
-        p.stdin.close()
-        p.wait()
+        proc = subprocess.Popen(cmd, stdin=subprocess.PIPE)
+        proc.stdin.write(msg.as_bytes())
+        proc.stdin.flush()
+        proc.stdin.close()
+        proc.wait()
 
 
     msg = email.message_from_file(sys.stdin)
@@ -1930,6 +2221,11 @@ def cmd_parse_ftp_release_list(options):
         _send_reply_mail(stdout, msg, options.mail, packages=packages, error=error)
 
 def main():
+    """Parse arguments and call main subfunction
+
+    The Subfunctions are the functions whose name starts with cmd_"""
+
+
     description = """Mageia GNOME commands."""
     epilog = """Report bugs to Olav Vitters"""
     parser = argparse.ArgumentParser(description=description, epilog=epilog)
@@ -1998,10 +2294,11 @@ def main():
                            help="only checkout SPECS/ directory")
     subparser.add_argument("package", help="Package name", nargs='*')
     subparser.set_defaults(
-        func=cmd_co, all=False
+        func=cmd_checkout, all=False
     )
 
-    subparser = subparsers.add_parser('gnome-release-email', help='submit packages based on GNOME ftp-release-list email')
+    subparser = subparsers.add_parser('gnome-release-email',
+                                      help='submit packages based on GNOME ftp-release-list email')
     subparser.add_argument("-m", "--mail", help="Email address to send the progress to")
     subparser.add_argument("--fork", action="store_true",
                            help="Fork as quickly as possible")
@@ -2094,7 +2391,8 @@ def main():
     Downstream.PKGROOT = options.PKGROOT
     if options.distro:
         Downstream.PKGROOT = os.path.join(options.PKGROOT, options.distro)
-        Downstream.MEDIA = "Core Release {0} Source,Core {0} Updates Source,Core {0} Updates Testing Source".format(options.distro)
+        Downstream.MEDIA = "Core Release {0} Source,Core {0} Updates Source," \
+                "Core {0} Updates Testing Source".format(options.distro)
         Downstream.DISTRO = options.distro
 
     try:
-- 
cgit v1.2.1