From aeb84f81397f0608da98d12c019fc23c4b038475 Mon Sep 17 00:00:00 2001 From: Olav Vitters Date: Thu, 23 Apr 2020 17:33:08 +0200 Subject: pylint: remove various unused variables, avoid len check, etc --- mgagnome | 81 +++++++++++++++++++++++++++++----------------------------------- 1 file changed, 36 insertions(+), 45 deletions(-) diff --git a/mgagnome b/mgagnome index d4c19bf..0361850 100755 --- a/mgagnome +++ b/mgagnome @@ -1,4 +1,5 @@ #!/usr/bin/python3 -u +"""Mageia GNOME commands.""" # A lot of the code comes from ftpadmin, see # https://git.gnome.org/browse/sysadmin-bin/tree/ftpadmin @@ -165,10 +166,10 @@ def get_safe_max_version(version, module=None): min_nr = int(majmin[1]) - if min_nr % 2 == 0: - return "%s.%d" % (majmin[0], min_nr + 1) - else: - return "%s.%d" % (majmin[0], min_nr + 2) + # Add 2 for stable releases, 1 for unstable + min_nr += 1 if min_nr % 2 == 0 else 2 + + return "%s.%d" % (majmin[0], min_nr) def judge_version_increase(version_old, version_new, module=None): """Judge quality of version increase: @@ -278,7 +279,7 @@ def call_editor(filename): return True -class urllister(HTMLParser): +class URLLister(HTMLParser): def reset(self): HTMLParser.reset(self) self.urls = [] @@ -311,10 +312,9 @@ def clean_pkgconfig_prov(prov): class SpecFileError(Exception): """Used for problems in the spec file""" - pass -class SpecFile(object): +class SpecFile(): re_update_version = re.compile(r'^(?P
Version[ \t]*:\s*)(?P.+)(?P\s*)$', re.MULTILINE + re.IGNORECASE)
     re_update_release = re.compile(r'^(?P
Release[ \t]*:\s*)(?P%mkrel [0-9.]+)(?P\s*)$', re.MULTILINE + re.IGNORECASE)
     re_update_patch = re.compile(r'^(?P
Patch0*?)(?P[0-9]*)(?P[ \t]*:\s*)(?P.+)(?P\s*)\n', re.MULTILINE + re.IGNORECASE)
@@ -416,7 +416,7 @@ class SpecFile(object):
         with open(self.path, "r", encoding="utf-8") as f:
             data = f.read()
             for reason, change_to, regexp, *extra in re_clean_spec:
-                if len(extra):
+                if extra:
                     should_rebuild = extra[0]
                 else:
                     should_rebuild = False
@@ -480,7 +480,7 @@ class SpecFile(object):
                 data = data.lstrip()
                 self._changes['SILENT remove variable definition(s) %s' % ", ".join(converted_defines)] = True
 
-            made_changes, data = self._clean_spec_patches(f, made_changes, data)
+            made_changes, data = self._clean_spec_patches(made_changes, data)
 
             # Overwrite file with new version number
             if made_changes:
@@ -489,7 +489,7 @@ class SpecFile(object):
 
         return made_changes
 
-    def _clean_spec_patches(self, f, made_changes, data):
+    def _clean_spec_patches(self, made_changes, data):
         re_autopatch = re.compile(r'^[ \t]*\%autopatch(?:[ \t]+-p(?P[0-9]+))?$', re.MULTILINE)
 
         re_patch_header = re.compile('^Patch(?P[0-9]*)[ \t]*:[ \t]*(?P[^\n]+)\n', re.MULTILINE + re.IGNORECASE)
@@ -619,12 +619,9 @@ class SpecFile(object):
         with open(self.path, "r", encoding="utf-8") as f:
             data = f.read()
 
-            len_before = len(data)
-
             data, nr = self.re_update_patch.subn(lambda mo: '' if mo.group('nr') in nrs or (mo.group('nr').isdigit() and int(mo.group('nr')) in nrs) else mo.group(0), data)
 
-            # XXX - pretty hacky
-            if len(data) == len_before:
+            if not nr:
                 print("ERROR: Could not remove patch nr %s!" % patchnr, file=sys.stderr)
                 return False
 
@@ -746,28 +743,23 @@ class SpecFile(object):
 
         with open(self.path, "r", encoding="utf-8") as f:
             data = f.read()
-            data_before = data
 
             # Change any "," in buildrequires into multiple lines
             data, nr = self.re_update_br_unsplit.subn(lambda mo: ''.join((''.join((mo.group('pre'), mo2.group(0), mo.group('unsplitpost'))) for mo2 in self.re_br_part.finditer(mo.group('unsplit')) if mo.group(0).strip() != '')), data)
-            if data_before != data:
+            if nr:
                 made_changes = True
                 self._changes['SILENT one line per buildrequirement'] = True
-                data_before = data
 
             # Change =< and => operators into <= and >=
-            # XXX - pretty ugly
             data, nr = self.re_update_br_fix_operator.subn(lambda mo: mo.group(0).replace('=>', '>=').replace('=<', '<') if self.re_update_br.match(mo.group(0).replace('=>', '>=').replace('=<', '<')) else mo.group(0), data)
-            if data_before != data:
+            if nr:
                 made_changes = True
                 self._changes['SILENT fix operator in buildrequires'] = True
-                data_before = data
 
             # Now update buildrequires if any
             data, nr = self.re_update_br.subn(lambda mo: ''.join((mo.group('pre'), changes[mo.group('br')], mo.group('post'))) if mo.group('br') in changes else mo.group(0), data)
 
-            # XXX - very hacky because of multiple changes, could miss out on a change
-            if data_before != data:
+            if nr:
                 made_changes = True
                 self._changes['SILENT %s' % change_description] = True
             elif len(changes) != 0:
@@ -822,8 +814,6 @@ class SpecFile(object):
 
     def update_release(self, release, reason, force=False):
         """Update release (usually for rebuilds)"""
-        cur_release = self.release
-
         data = self._check_can_update(force)
         if data is None:
             return False
@@ -907,7 +897,7 @@ class SpecFile(object):
 
         return self.check_and_update_patches()
 
-class Patch(object):
+class Patch():
     """Do things with patches"""
 
     re_dep3 = re.compile(r'^(?:#\s*)?(?P
[-A-Za-z0-9]+?):\s*(?P.*)$') @@ -1048,7 +1038,7 @@ class Patch(object): return self._svn_author -class Upstream(object): +class Upstream(): URL = "https://download.gnome.org/sources/" limit = None @@ -1061,7 +1051,7 @@ class Upstream(object): # Get the files usock = urlopen.open(self.URL) - parser = urllister() + parser = URLLister() parser.feed(usock.read().decode('utf-8')) usock.close() parser.close() @@ -1089,7 +1079,7 @@ class Upstream(object): return cls._cache_versions[module] -class Downstream(object): +class Downstream(): re_file = re.compile(r'^(?P.*)[_-](?:(?P([0-9]+[\.])*[0-9]+)-)?(?P([0-9]+[\.\-])*[0-9]+)\.(?P(?:tar\.|diff\.)?[a-z][a-z0-9]*)$') MEDIA = "Core Release Source" @@ -1155,7 +1145,7 @@ class Downstream(object): Sort of works like: $ urpmq --whatprovides $search_for --provides""" - if not len(cls._provide_to_alternate): + if not cls._provide_to_alternate: provide_has_multiple_pkgs = set() _provide_to_pkg = {} _pkg_to_provide = {} @@ -1261,7 +1251,7 @@ class Downstream(object): # Return all packages reflecting the current version matches = [package for package in packages if packages[package] == version] - if len(matches): + if matches: return matches # Return all packages reflecting the version before the current version @@ -1280,7 +1270,7 @@ class Downstream(object): # - now really get the right packages matches = [package for package in packages if packages[package] == latest_version] - if len(matches): + if matches: return matches # Give up @@ -1305,7 +1295,7 @@ def cmd_co_multi(args): def cmd_co(options, parser): if options.all: packages = ((package, package, options) for package in Downstream().packages) - elif len(options.package): + elif options.package: packages = ((package, package, options) for package in options.package) else: packages = ((l[0], "%s => %s" % (l[0], l[1]), options) for l in sorted(join_streams(auto_update=False))) @@ -1425,7 +1415,7 @@ def cmd_cleanup(options, parser): c2 = pysvn.Client() stats = [stat for stat in c2.status(os.path.join(root, path, 'SOURCES'), depth=pysvn.depth.immediates) if stat.text_status == pysvn.wc_status_kind.unversioned and os.path.basename(stat.path) not in binaries] - if len(stats): + if stats: print(path) print(", ".join(os.path.basename(stat.path) for stat in stats)) print(stats) @@ -1625,7 +1615,7 @@ def cmd_clean_spec_multi(args): change_to = None if len(provides_alt) == 1: change_to = provides_alt[0] - elif len(provides_alt) and 'extra' in keys: + elif provides_alt and 'extra' in keys: # Determine base require (e.g. gtk+3.0-devel --> gtk+3.0) basereqs = keys['basereqs'](req) @@ -1634,7 +1624,7 @@ def cmd_clean_spec_multi(args): if 'versions_from_basereq' in keys: # Determine if the basereq has a version at the end (e.g. gtk+3.0 --> 3.0) versions.update(keys['versions_from_basereq'](basereqs)) - if len(versions) and 'basereq_no_version' in keys: + if versions and 'basereq_no_version' in keys: basereqs.extend(keys['basereq_no_version'](basereqs)) # Make it unique again, but keep the order # @@ -1644,7 +1634,7 @@ def cmd_clean_spec_multi(args): if 'versions_basereq_extra' in keys: versions.update(keys['versions_basereq_extra'](versions)) - if not len(versions): + if not versions: # In case no versions were retrieved from the basereq, # match with any version found from the alternative # provides (heuristic matching) @@ -1667,7 +1657,7 @@ def cmd_clean_spec_multi(args): change_to = check break - if change_to is None and len(provides_alt): + if change_to is None and provides_alt: provides_alt_no_versions = [] for prov in provides_alt: if re_prov_get_version.fullmatch(prov) is None: @@ -1677,7 +1667,7 @@ def cmd_clean_spec_multi(args): change_to = provides_alt_no_versions[0] - if len(provides_alt): + if provides_alt: if change_to is None: no_change[req] = (provides_alt, check_for) else: no_alt.add(req) @@ -1719,6 +1709,8 @@ def cmd_clean_spec_multi(args): print(s.changes) s.ensure_no_local_changes(force=True) + return made_changes + def cmd_check_spec_multi(args): options, package = args cwd = Downstream.package_path(package) @@ -1742,14 +1734,13 @@ def cmd_check_spec(options, parser): if options.all: packages = Downstream().packages else: - packages = options.package if len(options.package) else (l[0] for l in join_streams()) + packages = options.package if options.package else (l[0] for l in join_streams()) if options.debug: for package in packages: cmd_check_spec_multi((options, package)) else: - import os workers = os.cpu_count() or 4 with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor: executor.map(cmd_check_spec_multi, ((options, package) for package in packages)) @@ -1760,14 +1751,13 @@ def cmd_clean_spec(options, parser): if options.all: packages = Downstream().packages else: - packages = options.package if len(options.package) else (l[0] for l in join_streams()) + packages = options.package if options.package else (l[0] for l in join_streams()) if options.debug: for package in packages: cmd_clean_spec_multi((options, package)) else: - import os workers = os.cpu_count() or 4 # Hack: warm alternative provides cache if options.convert_br: @@ -1884,9 +1874,9 @@ def cmd_package_new_version(options, parser): # Check hash, if given if options.hexdigest is not None: sources = [name for name, value in s.sources.items() if '://' in value[0]] - if not len(sources): + if not sources: print("ERROR: Cannot determine source file (for hash check)!", file=sys.stderr) - sys.stderr(1) + sys.exit(1) # If there are multiple sources, try to see if there is a preferred name # --> needed for metacity hash check (multiple tarball sources) @@ -1917,7 +1907,8 @@ def cmd_package_new_version(options, parser): # retry submission various times, could be that some dependencies are being built at the same time @retry(subprocess.CalledProcessError, tries=10, delay=300, backoff=1.5) - def _submit(): subprocess.check_call(cmd, cwd=cwd) + def _submit(): + subprocess.check_call(cmd, cwd=cwd) _submit() except subprocess.CalledProcessError: sys.exit(1) -- cgit v1.2.1