summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorOlav Vitters <olav@vitters.nl>2015-01-23 14:57:51 +0100
committerOlav Vitters <olav@vitters.nl>2015-01-23 14:57:51 +0100
commitcd2502fa439d6e67fc6b566b7c21c6cf9cd41e8d (patch)
treefebb9886237939e6f9d5658d5d23bed4cad9273f
parentf1ae5b9a79e72356c10d1b0b65a6b6993a082b83 (diff)
downloadmgagnome-cd2502fa439d6e67fc6b566b7c21c6cf9cd41e8d.tar
mgagnome-cd2502fa439d6e67fc6b566b7c21c6cf9cd41e8d.tar.gz
mgagnome-cd2502fa439d6e67fc6b566b7c21c6cf9cd41e8d.tar.bz2
mgagnome-cd2502fa439d6e67fc6b566b7c21c6cf9cd41e8d.tar.xz
mgagnome-cd2502fa439d6e67fc6b566b7c21c6cf9cd41e8d.zip
convert to python3
-rwxr-xr-xmgagnome175
1 files changed, 88 insertions, 87 deletions
diff --git a/mgagnome b/mgagnome
index 3b94371..b286dd7 100755
--- a/mgagnome
+++ b/mgagnome
@@ -1,4 +1,4 @@
-#!/usr/bin/python -u
+#!/usr/bin/python3 -u
# A lot of the code comes from ftpadmin, see
# https://git.gnome.org/browse/sysadmin-bin/tree/ftpadmin
@@ -29,9 +29,9 @@ import bz2
import lzma # pyliblzma
# getting links from HTML document:
-from sgmllib import SGMLParser
-import urllib2
-import urlparse
+from html.parser import HTMLParser
+import urllib.request, urllib.error, urllib.parse
+import urllib.parse
# for checking hashes
import hashlib
@@ -94,12 +94,12 @@ def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
while mtries > 1:
try:
return f(*args, **kwargs)
- except ExceptionToCheck, e:
+ except ExceptionToCheck as e:
msg = "%s, Retrying in %d seconds..." % (str(e), mdelay)
if logger:
logger.warning(msg)
else:
- print msg
+ print(msg)
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
@@ -166,7 +166,7 @@ def get_safe_max_version(version, module=None):
majmin = get_majmin(version, module)
- min_nr = long(majmin[1])
+ min_nr = int(majmin[1])
if min_nr % 2 == 0:
return "%s.%d" % (majmin[0], min_nr + 1)
@@ -222,7 +222,7 @@ def judge_version_increase(version_old, version_new, module=None):
return (1, "Major version number increase")
# Minor indicates stable/unstable
- devstate = (long(majmins[0][1]) % 2 == 0, long(majmins[1][1]) % 2 == 0)
+ devstate = (int(majmins[0][1]) % 2 == 0, int(majmins[1][1]) % 2 == 0)
# Upgrading to unstable is weird
if not devstate[1]:
@@ -263,7 +263,7 @@ def call_editor(filename):
for editor in editors:
try:
ret = subprocess.call([editor, filename])
- except OSError, e:
+ except OSError as e:
if e.errno == 2:
continue
raise
@@ -273,15 +273,16 @@ def call_editor(filename):
return True
-class urllister(SGMLParser):
+class urllister(HTMLParser):
def reset(self):
- SGMLParser.reset(self)
+ HTMLParser.reset(self)
self.urls = []
- def start_a(self, attrs):
- href = [v for k, v in attrs if k=='href']
- if href:
- self.urls.extend(href)
+ def handle_starttag(self, tag, attrs):
+ if tag == 'a':
+ href = [v for k, v in attrs if k=='href']
+ if href:
+ self.urls.extend(href)
class XzTarFile(tarfile.TarFile):
@@ -505,21 +506,21 @@ class SpecFile(object):
return made_changes, data
if not (patch_nrs_header == patch_nrs_any == patch_nrs_valid):
- print >>sys.stderr, "NOTICE: Unable to automatically convert %s patches into %%autopatch (header/patch/valid: %s, %s, %s)" % (self.module, len(patch_nrs_header), len(patch_nrs_any), len(patch_nrs_valid))
+ print("NOTICE: Unable to automatically convert %s patches into %%autopatch (header/patch/valid: %s, %s, %s)" % (self.module, len(patch_nrs_header), len(patch_nrs_any), len(patch_nrs_valid)), file=sys.stderr)
return made_changes, data
patch_flags = set([mo.group('strip') for mo in re_patch_valid.finditer(mo2.group(0))])
if len(patch_flags) != 1:
- print >>sys.stderr, "NOTICE: Unable to automatically convert patches into as different -p / strip levels used"
+ print("NOTICE: Unable to automatically convert patches into as different -p / strip levels used", file=sys.stderr)
return made_changes, data
# Whoot, we can convert!!
change_to = "%%autopatch -p%s\n" % list(patch_flags)[0]
prep, n1 = re_patch_valid.subn(change_to.replace('\\', '\\\\'), mo2.group(0), count=1)
prep, n2 = re_patch_valid.subn('', prep)
- if len(patch_nrs_valid) <> n1 + n2:
- print >>sys.stderr, "WARNING: Couldn't replace patches?!? Likely error in program logic"
+ if len(patch_nrs_valid) != n1 + n2:
+ print("WARNING: Couldn't replace patches?!? Likely error in program logic", file=sys.stderr)
return made_changes, data
# First check if patches currently apply
@@ -554,9 +555,9 @@ class SpecFile(object):
require_ver = spec.sourceHeader[rpm.RPMTAG_REQUIREVERSION]
br = {}
- for req, flag, ver in itertools.izip_longest(requires, require_flags, require_ver):
+ for req, flag, ver in itertools.zip_longest(requires, require_flags, require_ver):
# bitmask other than 15 means the require is (probably?) a require for a trigger or script
- if flag & 15 <> flag: continue
+ if flag & 15 != flag: continue
ver_cmp = ""
if (flag & rpm.RPMSENSE_LESS): ver_cmp += '<'
@@ -587,7 +588,7 @@ class SpecFile(object):
# XXX - pretty hacky
if len(data) == len_before:
- print >>sys.stderr, "ERROR: Could not remove patch nr %s!" % nr
+ print("ERROR: Could not remove patch nr %s!" % nr, file=sys.stderr)
return False
# Overwrite file with new version number
@@ -622,7 +623,7 @@ class SpecFile(object):
cmd_before = (None, None)
# Determine the last command that failed
if os.path.exists(logfile):
- print logfile
+ print(logfile)
with open(logfile, "r") as f:
for line in line_input(f):
if line.startswith('+ '):
@@ -657,7 +658,7 @@ class SpecFile(object):
if has_ignored and not has_failed:
# If patch was merged, drop it from spec file and rety
- print >>sys.stdout, "INFO: Patch has been merged: %s" % failed_patch
+ print("INFO: Patch has been merged: %s" % failed_patch, file=sys.stdout)
if failed_patch in patches:
if self.remove_patch(patches[failed_patch][1]):
# try again
@@ -666,19 +667,19 @@ class SpecFile(object):
sys.exit(1)
if cmd and len(cmd_output) > LOGLINES:
- print >>sys.stdout, '+ %s' % cmd
- print >>sys.stdout, "\n".join(cmd_output)
+ print('+ %s' % cmd, file=sys.stdout)
+ print("\n".join(cmd_output), file=sys.stdout)
elif os.path.exists(logfile):
subprocess.call(['tail', '-n', str(LOGLINES), logfile])
if failed_patch:
- print >>sys.stderr, "ERROR: Problem applying patch: %s" % failed_patch
+ print("ERROR: Problem applying patch: %s" % failed_patch, file=sys.stderr)
elif cmd:
- print >>sys.stderr, "ERROR: Problem in %%prep phase command: %s" % cmd
+ print("ERROR: Problem in %%prep phase command: %s" % cmd, file=sys.stderr)
elif patches:
- print >>sys.stderr, "ERROR: Problem applying patches and/or %prep phase"
+ print("ERROR: Problem applying patches and/or %prep phase", file=sys.stderr)
else:
- print >>sys.stderr, "ERROR: Problem in %prep phase"
+ print("ERROR: Problem in %prep phase", file=sys.stderr)
return False
finally:
# bm command doesn't cleanup after itself and the option it has to do that removes spec file + sources!
@@ -721,7 +722,7 @@ class SpecFile(object):
made_changes = True
self._changes.append('SILENT %s' % change_description)
elif len(changes) != 0:
- print >>sys.stderr, "ERROR: Could not update buildrequires!"
+ print("ERROR: Could not update buildrequires!", file=sys.stderr)
return False
# Overwrite file with new version number
@@ -736,14 +737,14 @@ class SpecFile(object):
# XXX - os.path.join is hackish
svn_diff_output = subprocess.check_output(["svn", "diff", os.path.normpath(os.path.join(self.cwd, '..'))])
if svn_diff_output != '':
- print svn_diff_output
- print >>sys.stderr, "ERROR: Package has uncommitted changes!"
+ print(svn_diff_output)
+ print("ERROR: Package has uncommitted changes!", file=sys.stderr)
if not force:
return False
# Forcing package submission: revert changes
try:
- print >>sys.stderr, "WARNING: Force used; reverting svn changes"
+ print("WARNING: Force used; reverting svn changes", file=sys.stderr)
self._revert_changes()
except subprocess.CalledProcessError:
return False
@@ -758,11 +759,11 @@ class SpecFile(object):
(judgement, msg) = judge_version_increase(cur_version, version, self.module)
if judgement < 0:
- print >>sys.stderr, "ERROR: %s!" % (msg)
+ print("ERROR: %s!" % (msg), file=sys.stderr)
return False
if judgement < max_judgement:
- print "WARNING: %s!" % (msg)
+ print("WARNING: %s!" % (msg))
if not force: return False
if not self.ensure_no_local_changes(force):
@@ -772,21 +773,21 @@ class SpecFile(object):
data = f.read()
if data.count("%subrel") != 0:
- print >>sys.stderr, "ERROR: %subrel found; don't know what to do!"
+ print("ERROR: %subrel found; don't know what to do!", file=sys.stderr)
return False
if data.count("%mkrel") != 1:
- print >>sys.stderr, "ERROR: Multiple %mkrel found; don't know what to do!"
+ print("ERROR: Multiple %mkrel found; don't know what to do!", file=sys.stderr)
return False
data, nr = self.re_update_version.subn(r'\g<pre>%s\g<post>' % version, data, 1)
if nr != 1:
- print >>sys.stderr, "ERROR: Could not increase version!"
+ print("ERROR: Could not increase version!", file=sys.stderr)
return False
data, nr = self.re_update_release.subn(r'\g<pre>%mkrel 1\g<post>', data, 1)
if nr != 1:
- print >>sys.stderr, "ERROR: Could not reset release!"
+ print("ERROR: Could not reset release!", file=sys.stderr)
return False
# Overwrite file with new version number
@@ -796,7 +797,7 @@ class SpecFile(object):
# Verify that RPM also agrees that version number has changed
if self.version != version:
- print >>sys.stderr, "ERROR: Increased version to %s, but RPM doesn't agree!?!" % version
+ print("ERROR: Increased version to %s, but RPM doesn't agree!?!" % version, file=sys.stderr)
return False
@@ -810,16 +811,16 @@ class SpecFile(object):
subprocess.check_call(['mgarepo', 'sync', '-d'], cwd=self.cwd)
# success, so exit loop
break
- except subprocess.CalledProcessError, e:
+ except subprocess.CalledProcessError as e:
# mgarepo sync returns 1 if the tarball cannot be downloaded
if e.returncode != 1:
self._revert_changes()
- print >>sys.stderr, "ERROR: Could not download tarball"
+ print("ERROR: Could not download tarball", file=sys.stderr)
return False
else:
# failed to download tarball
self._revert_changes()
- print >>sys.stderr, "ERROR: Could not download tarball"
+ print("ERROR: Could not download tarball", file=sys.stderr)
return False
return self.check_and_update_patches()
@@ -865,9 +866,9 @@ class Patch(object):
# XXX - wrap this at 80 chars
add_line = True
- print >>fdst, "%s: %s" % (header, "" if data is None else data)
+ print("%s: %s" % (header, "" if data is None else data), file=fdst)
- if add_line: print >>fdst, ""
+ if add_line: print("", file=fdst)
# Now copy any other data and the patch
shutil.copyfileobj(fsrc, fdst)
@@ -972,7 +973,7 @@ class Upstream(object):
_cache_versions = {}
def __init__(self):
- urlopen = urllib2.build_opener()
+ urlopen = urllib.request.build_opener()
good_dir = re.compile('^[-A-Za-z0-9_+.]+/$')
@@ -1024,7 +1025,7 @@ class Downstream(object):
try:
srpm, version, filename = line.split("|")
except ValueError:
- print >>sys.stderr, line
+ print(line, file=sys.stderr)
continue
PACKAGES.add(srpm)
@@ -1098,13 +1099,13 @@ class Downstream(object):
raise ValueError("No packages for upstream name: %s" % upstream)
if len(self.tarballs[upstream]) == 1:
- return self.tarballs[upstream].keys()
+ return list(self.tarballs[upstream].keys())
# Directories packages are located in
root = os.path.expanduser(self.PKGROOT)
packages = {}
- for package in self.tarballs[upstream].keys():
+ for package in list(self.tarballs[upstream].keys()):
cwd = os.path.join(root, package)
# Checkout package to ensure the checkout reflects the latest changes
@@ -1126,9 +1127,9 @@ class Downstream(object):
# Return all packages reflecting the version before the current version
# - determine the newest version in any spec file
- latest_possible_version = get_latest_version(packages.values())
+ latest_possible_version = get_latest_version(list(packages.values()))
# - now get the latest version before the current version
- latest_version = get_latest_version(packages.values(), max_version=version)
+ latest_version = get_latest_version(list(packages.values()), max_version=version)
if latest_version is None:
raise ValueError("Multiple packages found and all versions are newer than %s" % version)
@@ -1155,7 +1156,7 @@ def write_file(path, data):
def cmd_co_multi(args):
package, what_to_print = args
- print what_to_print
+ print(what_to_print)
try:
Downstream.co(package)
@@ -1180,7 +1181,7 @@ def join_streams(show_version=False, only_diff_version=False):
matches = upstream & set(downstream.tarballs.keys())
for module in matches:
- for package in downstream.tarballs[module].keys():
+ for package in list(downstream.tarballs[module].keys()):
package_version = downstream.tarballs[module][package]
spec_version = None
if show_version or only_diff_version:
@@ -1242,28 +1243,28 @@ def cmd_group_owner(options, parser):
maints = dict([line.rpartition(" ")[::2] for line in subprocess.check_output(["mgarepo", "maintdb", "get"]).splitlines()])
def get_output(source, maints, packages):
- for source in packages.keys():
+ for source in list(packages.keys()):
maint = maints.get(source, "?")
yield "\t".join((maint, source, ",".join(sorted(packages[source]))))
first = True
- for group in packages.keys():
+ for group in list(packages.keys()):
if first:
first = False
else:
- print ""
- print ""
- print group
- print ""
+ print("")
+ print("")
+ print(group)
+ print("")
for line in sorted(get_output(source, maints, packages[group])):
- print line
+ print(line)
def cmd_ls(options, parser):
streams = join_streams(show_version=options.show_version, only_diff_version=options.diff)
if options.sort:
- SORT=dict(zip(options.sort.read().splitlines(), itertools.count()))
+ SORT=dict(list(zip(options.sort.read().splitlines(), itertools.count())))
streams = sorted(streams, key=lambda a: (SORT.get(a[1], 9999), a[0]))
else:
@@ -1273,7 +1274,7 @@ def cmd_ls(options, parser):
sys.stdout.write(package)
if options.upstream: sys.stdout.write("\t%s" % module)
if options.show_version: sys.stdout.write("\t%s\t%s" % (spec_version, package_version))
- print
+ print()
def cmd_check_version(options, parser):
streams = join_streams(show_version=True)
@@ -1319,7 +1320,7 @@ def cmd_check_latest(options, parser):
sys.stdout.write("\t%s" % safe_version)
sys.stdout.write("\t%s" % "".join(sorted(upgrade)))
- print
+ print()
if 'S' in upgrade and options.submit and not Downstream.DISTRO:
cmd = ['mgagnome', 'increase', package, safe_version]
@@ -1339,7 +1340,7 @@ def cmd_patches(options, parser):
forwarded = p.dep3['headers'].get('Forwarded', "no")
if p.dep3['valid']:
valid="VALID"
- print "\t".join((module, package, str(p), forwarded, valid))
+ print("\t".join((module, package, str(p), forwarded, valid)))
def cmd_dep3(options, parser):
p = Patch(options.patch)
@@ -1358,12 +1359,12 @@ def cmd_clean_spec_multi(args):
# Directories packages are located in
root = os.path.expanduser(Downstream.PKGROOT)
- print package
+ print(package)
cwd = os.path.join(root, package)
path = os.path.join(cwd, "SPECS", "%s.spec" % package)
if not os.path.exists(path):
- print >>sys.stderr, 'ERROR: Cannot find spec file for package %s' % package
+ print('ERROR: Cannot find spec file for package %s' % package, file=sys.stderr)
return False
s = SpecFile(path, module=package)
@@ -1373,7 +1374,7 @@ def cmd_clean_spec_multi(args):
return False
except subprocess.CalledProcessError:
# Package was probably not checked out or something
- print >>sys.stderr, "ERROR: cannot clean spec file for %s" % package
+ print("ERROR: cannot clean spec file for %s" % package, file=sys.stderr)
return False
made_changes=False
@@ -1405,9 +1406,9 @@ def cmd_clean_spec_multi(args):
}
}
- for convert_br, keys in convert_brs.iteritems():
+ for convert_br, keys in convert_brs.items():
keys['changes'] = {}
- br_old = [r for r in br.keys() if keys['check_br'](r)]
+ br_old = [r for r in list(br.keys()) if keys['check_br'](r)]
for req in br_old:
provides = Downstream.alternative_provides(req)
provides_alt = [clean_pkgconfig_prov(prov) for prov in provides if keys['check_provide'](prov)]
@@ -1433,14 +1434,14 @@ def cmd_clean_spec_multi(args):
if not options.doit:
import pprint
- for keys in convert_brs.items():
+ for keys in list(convert_brs.items()):
if 'changes' in keys and keys['changes']: pprint.pprint(keys['changes'])
- if no_alt: print "WARNING: no alternatives found for: %s" % ", ".join(sorted(no_alt))
+ if no_alt: print("WARNING: no alternatives found for: %s" % ", ".join(sorted(no_alt)))
if no_change: pprint.pprint(no_change)
else:
convert_brs = {}
- keys_with_changes = [keys for keys in convert_brs.itervalues() if 'changes' in keys and keys['changes']]
+ keys_with_changes = [keys for keys in convert_brs.values() if 'changes' in keys and keys['changes']]
if not keys_with_changes:
keys_with_changes.append( {'changes': [], 'desc': 'unsplit BRs'})
@@ -1457,7 +1458,7 @@ def cmd_clean_spec_multi(args):
Downstream.ci(package, s.changes, cwd=cwd)
else:
# show the diff and undo all changes
- print s.changes
+ print(s.changes)
s.ensure_no_local_changes(force=True)
def cmd_clean_spec(options, parser):
@@ -1474,8 +1475,8 @@ def cmd_package_new_version(options, parser):
if options.upstream:
try:
package = Downstream().get_downstream_from_upstream(options.package, options.version)[0]
- except ValueError, e:
- print >>sys.stderr, "ERROR: %s" % e
+ except ValueError as e:
+ print("ERROR: %s" % e, file=sys.stderr)
sys.exit(1)
else:
package = options.package
@@ -1493,15 +1494,15 @@ def cmd_package_new_version(options, parser):
# SpecFile class handles the actual version+release change
# XXX - module should reflect upstream name, this gives it the package name
s = SpecFile(os.path.join(cwd, "SPECS", "%s.spec" % package), module=package)
- print "%s => %s" % (s.version, options.version)
+ print("%s => %s" % (s.version, options.version))
if not s.update(options.version, force=options.force):
sys.exit(1)
# Check hash, if given
if options.hexdigest is not None:
- sources = [name for name, value in s.sources.iteritems() if '://' in value[0]]
+ sources = [name for name, value in s.sources.items() if '://' in value[0]]
if not len(sources):
- print >>sys.stderr, "ERROR: Cannot determine source file (for hash check)!"
+ print("ERROR: Cannot determine source file (for hash check)!", file=sys.stderr)
sys.stderr(1)
# If there are multiple sources, try to see if there is a preferred name
@@ -1514,8 +1515,8 @@ def cmd_package_new_version(options, parser):
for filename in sources:
path = os.path.join(cwd, "SOURCES", filename)
if not is_valid_hash(path, options.algo, options.hexdigest):
- print >>sys.stderr, "ERROR: Hash file failed check for %s!" % path
- print >>sys.stderr, "ERROR: Reverting changes!"
+ print("ERROR: Hash file failed check for %s!" % path, file=sys.stderr)
+ print("ERROR: Reverting changes!", file=sys.stderr)
subprocess.call(['svn', 'revert', '-R', cwd], cwd=cwd)
sys.exit(1)
@@ -1587,15 +1588,15 @@ def cmd_parse_ftp_release_list(options, parser):
module = msg['X-Module-Name']
version = msg['X-Module-Version']
hexdigest = msg['X-Module-SHA256-tar.xz']
- except KeyError, e:
- print >>stderr, "ERROR: %s" % e
+ except KeyError as e:
+ print("ERROR: %s" % e, file=stderr)
if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True)
sys.exit(1)
try:
packages = Downstream().get_downstream_from_upstream(module, version)
- except ValueError, e:
- print >>stderr, "ERROR: %s" % e
+ except ValueError as e:
+ print("ERROR: %s" % e, file=stderr)
if options.mail: _send_reply_mail(stdout, msg, options.mail, error=True)
sys.exit(1)
@@ -1713,7 +1714,7 @@ def main():
help="Commit changes and submit")
subparser.add_argument( "--no-submit", action="store_false", dest="submit",
help="Do not commit changes and submit")
- subparser.add_argument("-a", "--algorithm", choices=hashlib.algorithms, dest="algo",
+ subparser.add_argument("-a", "--algorithm", choices=hashlib.algorithms_available, dest="algo",
help="Hash algorithm")
subparser.add_argument("--hash", dest="hexdigest",
help="Hexdigest of the hash")
@@ -1767,7 +1768,7 @@ def main():
except EOFError:
print('EOF')
sys.exit(1)
- except IOError, e:
+ except IOError as e:
if e.errno != errno.EPIPE:
raise
sys.exit(0)