summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorOlav Vitters <olav@vitters.nl>2020-05-07 16:49:50 +0200
committerOlav Vitters <olav@vitters.nl>2020-05-07 16:49:50 +0200
commit70908038e1729c264ff92c5ce3892f93a28f0bbe (patch)
tree4092bef171884569942d5b00d096771fea3304de
parent9dded9b45bfd08168c960112d0f26db740c2ed23 (diff)
downloadmgagnome-70908038e1729c264ff92c5ce3892f93a28f0bbe.tar
mgagnome-70908038e1729c264ff92c5ce3892f93a28f0bbe.tar.gz
mgagnome-70908038e1729c264ff92c5ce3892f93a28f0bbe.tar.bz2
mgagnome-70908038e1729c264ff92c5ce3892f93a28f0bbe.tar.xz
mgagnome-70908038e1729c264ff92c5ce3892f93a28f0bbe.zip
make use of pathlib
-rwxr-xr-xmgagnome134
1 files changed, 66 insertions, 68 deletions
diff --git a/mgagnome b/mgagnome
index 4440a2e..8c80bd5 100755
--- a/mgagnome
+++ b/mgagnome
@@ -9,7 +9,7 @@ from functools import wraps, lru_cache, cached_property
# basic modules:
import os
-import os.path
+from pathlib import Path
import sys
import re
import subprocess
@@ -46,9 +46,6 @@ import shlex
import concurrent.futures
-# for merging comments in order
-import collections
-
# for debugging output
import pprint
import logging
@@ -285,7 +282,7 @@ def _is_valid_hash(path, algo, hexdigest):
local_hash = getattr(hashlib, algo)()
- with open(path, 'rb') as fp_file:
+ with path.open('rb') as fp_file:
data = fp_file.read(32768)
while data:
local_hash.update(data)
@@ -353,12 +350,14 @@ class SpecFile():
if package:
self.cwd = package.path
else:
- self.cwd = os.path.dirname(path)
+ self.cwd = path.parent
# If spec file is located in SPECS directory, cwd should be 1
# directory higher
- if os.path.basename(self.cwd) == 'SPECS':
- self.cwd = os.path.dirname(self.cwd)
- self._changes = collections.OrderedDict()
+ if self.cwd.name == 'SPECS':
+ self.cwd = self.cwd.parent
+
+ # WARNING: Requires Python 3.7+ as that version guarantees dict ordering
+ self._changes = dict()
self._should_rebuild = False
self._changed_spec_file = False
@@ -379,7 +378,7 @@ class SpecFile():
"""Provide the version as parsed by rpm"""
cmd = ["rpm", "--define", "_topdir %s" % self.cwd, "--specfile", self.path,
"--queryformat", "%{VERSION}\n"]
- return subprocess.check_output(cmd).decode("utf-8").splitlines()[0]
+ return subprocess.check_output(cmd, encoding='utf-8').splitlines()[0]
@property
def should_rebuild(self):
@@ -391,7 +390,7 @@ class SpecFile():
"""Provide the release as parsed by rpm"""
cmd = ["rpm", "--define", "%dist %nil", "--define", "_topdir %s" % self.cwd,
"--specfile", self.path, "--queryformat", "%{RELEASE}\n"]
- return subprocess.check_output(cmd).decode("utf-8").splitlines()[0]
+ return subprocess.check_output(cmd, encoding='utf-8').splitlines()[0]
def _sources_and_patches(self, flag=None):
os.chdir(self.cwd)
@@ -410,7 +409,7 @@ class SpecFile():
# reload the config to fix this
rpm.reloadConfig()
return dict(
- (os.path.basename(name), [name, 0 if no == 2147483647 and flags == 2 else no])
+ (Path(name).name, [name, 0 if no == 2147483647 and flags == 2 else no])
for name, no, flags in srclist if flag is None or flags == flag
)
@@ -610,7 +609,7 @@ class SpecFile():
"""Clean the spec file of deprecated statements"""
made_changes = False
- with open(self.path, "r", encoding="utf-8") as fp_spec:
+ with self.path.open('r', encoding='utf-8') as fp_spec:
data = fp_spec.read()
made_changes, data = self._clean_spec_regexp(made_changes, data)
@@ -1002,7 +1001,7 @@ class SpecFile():
if patchnr == 0:
nrs.append('')
- with open(self.path, "r", encoding="utf-8") as fp_spec:
+ with self.path.open('r', encoding='utf-8') as fp_spec:
data = fp_spec.read()
data, subs = self.re_update_patch.subn(
@@ -1040,9 +1039,8 @@ class SpecFile():
# Check patches still apply
subprocess.check_call(['bm', '-p', '--nodeps'], cwd=self.cwd)
except subprocess.CalledProcessError:
- logfile = os.path.join(
- os.path.dirname(self.path),
- 'log.%s' % os.path.splitext(os.path.basename(self.path))[0]
+ logfile = self.path.parent.joinpath(
+ 'log.%s' % self.path.name.stem
)
failed_patch = None
@@ -1050,9 +1048,9 @@ class SpecFile():
cmd_output = []
cmd_before = (None, None)
# Determine the last command that failed
- if os.path.exists(logfile):
+ if logfile.exists():
print(logfile)
- with open(logfile, "r", encoding="utf-8") as fp_logfile:
+ with logfile.open('r', encoding='utf-8') as fp_logfile:
for line in line_input(fp_logfile):
if line.startswith('+ '):
cmd_before = (cmd, cmd_before)
@@ -1065,14 +1063,14 @@ class SpecFile():
cmd_before_parsed = shlex.split(cmd_before[0]) if cmd_before[0] else []
if not check_only and uses_autopatch and patches and cmd_parsed:
- if os.path.basename(cmd_parsed[0]) in ('patch', 'cat'):
- if os.path.exists(cmd_parsed[-1]):
- failed_patch = os.path.basename(cmd_parsed[-1])
- elif cmd_parsed[-1].startswith('-') and os.path.exists(cmd_before_parsed[-1]):
+ if Path(cmd_parsed[0]).name in ('patch', 'cat'):
+ if Path(cmd_parsed[-1]).exists():
+ failed_patch = Path(cmd_parsed[-1]).name
+ elif cmd_parsed[-1].startswith('-') and Path(cmd_before_parsed[-1]).exists():
# for %autopatch as well as %patch
#+ /usr/bin/cat /home/src/pkgs/gnome-getting-started-docs/SOURCES/gs-browse-web-firefox.page.patch
#+ /usr/bin/patch -p1 -s
- failed_patch = os.path.basename(cmd_before_parsed[-1])
+ failed_patch = Path(cmd_before_parsed[-1]).name
# Patch is merged if there is at least one 'ignored' line and no 'FAILED' line anywhere
has_ignored = False
@@ -1107,7 +1105,7 @@ class SpecFile():
if cmd and len(cmd_output) > lines_in_log:
print('+ %s' % cmd, file=sys.stdout)
print("\n".join(cmd_output), file=sys.stdout)
- elif os.path.exists(logfile):
+ elif logfile.exists():
subprocess.call(['tail', '-n', str(lines_in_log), logfile])
if failed_patch:
@@ -1121,8 +1119,8 @@ class SpecFile():
return False
finally:
# bm command doesn't cleanup after itself and the option it has to do that removes spec file + sources!
- buildpath = os.path.join(self.cwd, 'BUILD', '%s-%s' % (self.module, self.version))
- if os.path.exists(buildpath):
+ buildpath = self.cwd.joinpath('BUILD', '%s-%s' % (self.module, self.version))
+ if buildpath.exists():
shutil.rmtree(buildpath, ignore_errors=True)
return True
@@ -1133,7 +1131,7 @@ class SpecFile():
# XXX - doesn't handle buildrequires with version numbers :-(
made_changes = False
- with open(self.path, "r", encoding="utf-8") as fp_spec:
+ with self.path.open('r', encoding='utf-8') as fp_spec:
data = fp_spec.read()
# Change any "," in buildrequires into multiple lines
@@ -1187,7 +1185,7 @@ class SpecFile():
changes). This should possibily be moved to Downstream class, or move
this into a new Package class"""
cmd = ["svn", "diff", self.cwd]
- svn_diff_output = subprocess.check_output(cmd).decode('utf-8')
+ svn_diff_output = subprocess.check_output(cmd, encoding='utf-8')
if svn_diff_output != '':
print(svn_diff_output)
print("ERROR: Package has uncommitted changes!", file=sys.stderr)
@@ -1208,7 +1206,7 @@ class SpecFile():
if not self.ensure_no_local_changes(force):
return None
- with open(self.path, "r", encoding="utf-8") as fp_spec:
+ with self.path.open('r', encoding='utf-8') as fp_spec:
data = fp_spec.read()
if data.count("%subrel") != 0:
@@ -1318,7 +1316,7 @@ class Patch():
self.show_path = show_path
def __str__(self):
- return self.path if self.show_path else os.path.basename(self.path)
+ return self.path if self.show_path else self.path.name
@cached_property
def svn_author(self):
@@ -1327,7 +1325,7 @@ class Patch():
Makes use of svn log."""
try:
cmd = ['svn', 'log', '-q', "--", self.path]
- contents = subprocess.check_output(cmd, close_fds=True).strip("\n").decode('utf-8').splitlines()
+ contents = subprocess.check_output(cmd, close_fds=True, encoding='utf-8').strip("\n").splitlines()
for line in contents:
if ' | ' not in line:
@@ -1402,13 +1400,13 @@ class Downstream():
MEDIA = "Core Release Source"
# PKGROOT will be overwritten (command line option)
- PKGROOT = '~/pkgs'
+ PKGROOT = Path('~/pkgs')
DISTRO = None
SECTION = None
def __init__(self):
cmd = ['urpmf', '--qf', '%name|%version|%files', '.', "--media", self.MEDIA]
- contents = subprocess.check_output(cmd, close_fds=True).decode("utf-8").strip("\n").splitlines()
+ contents = subprocess.check_output(cmd, close_fds=True, encoding='utf-8').strip("\n").splitlines()
srpm_files = {}
module_srpm_tarballs = {}
@@ -1474,7 +1472,7 @@ class Downstream():
_provide_to_pkg = {}
_pkg_to_provide = {}
cmd = ['urpmf', "--qf", "%name\t%provides\t%arch", '.']
- for myline in subprocess.check_output(cmd).decode("utf-8").splitlines():
+ for myline in subprocess.check_output(cmd, encoding='utf-8').splitlines():
pkgname, pkgprovide, pkgarch = myline.split("\t")
if pkgarch in ('src', 'i586'):
continue
@@ -1598,26 +1596,25 @@ class Package:
"""Provide the local checkout path for a given package
Package might not be checked out yet!"""
- return os.path.join(os.path.expanduser(self._downstream.PKGROOT), self.name)
+ return self._downstream.PKGROOT.expanduser().joinpath(self.name)
@cached_property
def spec(self):
"""Return the SpecFile for a given package"""
- path = self.path
return SpecFile(self.spec_path, package=self, module=self.name)
@cached_property
def spec_path(self):
- path = self.path
+ """Return the expected location of the SpecFile"""
- return os.path.join(path, "SPECS", "%s.spec" % self.name)
+ return self.path.joinpath("SPECS", "%s.spec" % self.name)
def ensure_checkout(self):
"""Ensure that the package is checked out"""
- if not os.path.exists(self.spec_path):
+ if not self.spec_path.exists():
try:
- package.checkout()
+ self.checkout()
except subprocess.CalledProcessError:
print('WARNING: Cannot checkout package %s. Skipping.' % self.name, file=sys.stderr)
return False
@@ -1629,7 +1626,7 @@ class Package:
"""Check out a package from the repository"""
downstream = self._downstream
if cwd is None:
- cwd = os.path.expanduser(downstream.PKGROOT)
+ cwd = downstream.PKGROOT.expanduser()
cmd = ['mgarepo', 'co']
if downstream.DISTRO:
@@ -1675,10 +1672,10 @@ def write_file(path, data):
"""Write to a file by creating a temporary file and renaming that to the
new file"""
- with tempfile.NamedTemporaryFile(mode='w+t', dir=os.path.dirname(path), delete=False, encoding="utf-8") as fdst:
+ with tempfile.NamedTemporaryFile(mode='w+t', dir=path.parent, delete=False, encoding='utf-8') as fdst:
fdst.write(data)
fdst.flush()
- os.rename(fdst.name, path)
+ os.replace(fdst.name, path)
def _cmd_checkout_multi(args):
package_name, what_to_print, options = args
@@ -1728,12 +1725,13 @@ def join_streams(show_version=False, only_diff_version=False, auto_update=True):
if show_version or only_diff_version:
# ensure package is checked out
- if not os.path.exists(cwd):
+ if not cwd.exists():
try:
package.checkout()
except subprocess.CalledProcessError:
# XXX - ignoring packages which cannot be checked out
continue
+
try:
spec_version = package.spec.version
except subprocess.CalledProcessError:
@@ -1759,7 +1757,7 @@ def cmd_group_owner(options):
groups = set(options.group)
cmd = ["urpmf", "-F|", "--qf", "%group\t%name\t%sourcerpm\t%version\t%release", "."]
- output = [pkg.split("\t") for pkg in subprocess.check_output(cmd).decode("utf-8").splitlines()]
+ output = [pkg.split("\t") for pkg in subprocess.check_output(cmd, encoding='utf-8').splitlines()]
if not output:
return
@@ -1786,7 +1784,7 @@ def cmd_group_owner(options):
maints = {
line.rpartition(" ")[::2]
- for line in subprocess.check_output(["mgarepo", "maintdb", "get"]).decode("utf-8").splitlines()
+ for line in subprocess.check_output(["mgarepo", "maintdb", "get"], encoding='utf-8').splitlines()
}
def get_output(source, maints, packages):
@@ -1819,41 +1817,42 @@ def cmd_cleanup(_):
- clean old files in RPMS and SRPMS"""
- root = os.path.expanduser(Downstream.PKGROOT)
+ root = Downstream.PKGROOT.expanduser()
# packages = set(Downstream().packages)
- dirs = set((o for o in os.listdir(root) if os.path.isdir(os.path.join(root, o))))
+ dirs = set((path for path in root.glob('*') if path.is_dir()))
# dirs = dirs - packages
import pysvn # pylint: disable=import-outside-toplevel
- dirs = [o for o in dirs if os.path.exists(os.path.join(root, o, "SOURCES", 'sha1.lst'))]
+ dirs = [path for path in dirs if path.joinpath('SOURCES', 'sha1.lst').exists()]
for path in dirs:
try:
binaries = set((l.split(' ', 1)[1]
- for l in open(os.path.join(root, path, 'SOURCES', 'sha1.lst')).read().splitlines()))
+ for l in path.joinpath('SOURCES', 'sha1.lst').open().read().splitlines()))
except IndexError:
- print(os.path.join(root, path, 'SOURCES', 'sha1.lst'))
-# shutil.rmtree(os.path.join(root, path))
+ print('ERROR: Problem parsing the sha1.lst of package %s' % path.basename(), file=sys.stderr)
+# shutil.rmtree(path)
# Downstream.package(path).checkout()
continue
vcs = pysvn.Client()
- stats = [stat for stat in vcs.status(os.path.join(root, path, 'SOURCES'), depth=pysvn.depth.immediates) # pylint: disable=no-member
+ stats = [stat for stat in vcs.status(path.joinpath('SOURCES'), depth=pysvn.depth.immediates) # pylint: disable=no-member
if stat.text_status == pysvn.wc_status_kind.unversioned # pylint: disable=no-member
- and os.path.basename(stat.path) not in binaries] # pylint: disable=no-member
+ and stat.path.name not in binaries] # pylint: disable=no-member
if stats:
print(path)
- print(", ".join(os.path.basename(stat.path) for stat in stats))
+ print(", ".join(Path(stat.path).name for stat in stats))
print(stats)
for stat in stats:
- if os.path.isfile(stat.path):
- os.remove(stat.path)
- elif os.path.isdir(stat.path):
- shutil.rmtree(stat.path)
+ stat_path = Path(stat.path)
+ if stat_path.is_file():
+ stat_path.unlink()
+ elif stat_path.is_dir():
+ shutil.rmtree(stat_path)
def cmd_ls(options):
"""Show upstream module names, downstream package names
@@ -1941,13 +1940,13 @@ def cmd_check_latest(options):
def cmd_patches(options):
"""List files with extension .patch or .diff as found in the source rpms"""
- root = os.path.expanduser(Downstream.PKGROOT)
+ root = Downstream.PKGROOT.expanduser()
for package_name, module, _, _, downstream_files in sorted(join_streams()):
for filename in downstream_files:
if '.patch' in filename or '.diff' in filename:
- this_patch = Patch(os.path.join(root, package_name, "SOURCES", filename), show_path=options.path)
+ this_patch = Patch(root.joinpath(package_name, 'SOURCES', filename), show_path=options.path)
print("\t".join((module, package_name, str(this_patch))))
def cmd_check_prep(options):
@@ -2002,9 +2001,8 @@ def _cmd_clean_spec_multi(args):
def _cmd_check_spec_multi(args):
_, package_name = args
package = Downstream.package(package_name)
- cwd = package.path
- if not os.path.exists(package.spec_path):
+ if not package.spec_path.exists():
return False
spec = package.spec
@@ -2182,7 +2180,7 @@ def cmd_package_new_version(options):
sources = [preferred_name]
for filename in sources:
- path = os.path.join(cwd, "SOURCES", filename)
+ path = cwd.joinpath("SOURCES", filename)
if not _is_valid_hash(path, options.algo, options.hexdigest):
print("ERROR: Hash file failed check for %s!" % path, file=sys.stderr)
print("ERROR: Reverting changes!", file=sys.stderr)
@@ -2315,14 +2313,14 @@ def main():
parser.add_argument("-l", "--limit", type=argparse.FileType('r', 0),
dest="limit_upstream", metavar="FILE",
help="File containing upstream names")
- parser.add_argument("-p", "--root", action="store", dest="PKGROOT",
+ parser.add_argument("-p", "--root", type=Path, action="store", dest="PKGROOT",
help="Package root directory")
parser.add_argument("-d", "--distro", action="store", dest="distro",
help="Distribution release")
parser.add_argument("--debug", action="store_true", dest="debug",
help="Use for debugging")
parser.set_defaults(
- debug=False, PKGROOT="~/pkgs"
+ debug=False, PKGROOT=Downstream.PKGROOT
)
# SUBPARSERS
@@ -2473,7 +2471,7 @@ def main():
Downstream.PKGROOT = options.PKGROOT
if options.distro:
- Downstream.PKGROOT = os.path.join(options.PKGROOT, options.distro)
+ Downstream.PKGROOT = options.PKGROOT.joinpath(options.distro)
Downstream.MEDIA = "Core Release {0} Source,Core {0} Updates Source," \
"Core {0} Updates Testing Source".format(options.distro)
Downstream.DISTRO = options.distro