diff --git a/fedmsg.d/base.py b/fedmsg.d/base.py index c213c00..2aa221c 100644 --- a/fedmsg.d/base.py +++ b/fedmsg.d/base.py @@ -17,38 +17,38 @@ # # Authors: Ralph Bean # -config = dict( +config = { # Set this to dev if you're hacking on fedmsg or an app. # Set to stg or prod if running in the Fedora Infrastructure - environment="dev", + 'environment': "dev", # Default is 0 - high_water_mark=0, - io_threads=1, + 'high_water_mark': 0, + 'io_threads': 1, ## For the fedmsg-hub and fedmsg-relay. ## # We almost always want the fedmsg-hub to be sending messages with zmq as # opposed to amqp or stomp. - zmq_enabled=True, + 'zmq_enabled': True, # When subscribing to messages, we want to allow splats ('*') so we tell # the hub to not be strict when comparing messages topics to subscription # topics. - zmq_strict=False, + 'zmq_strict': False, # Number of seconds to sleep after initializing waiting for sockets to sync - post_init_sleep=0.5, + 'post_init_sleep': 0.5, # Wait a whole second to kill all the last io threads for messages to # exit our outgoing queue (if we have any). This is in milliseconds. - zmq_linger=1000, + 'zmq_linger': 1000, # See the following # - http://tldp.org/HOWTO/TCP-Keepalive-HOWTO/overview.html # - http://api.zeromq.org/3-2:zmq-setsockopt - zmq_tcp_keepalive=1, - zmq_tcp_keepalive_cnt=3, - zmq_tcp_keepalive_idle=60, - zmq_tcp_keepalive_intvl=5, -) + 'zmq_tcp_keepalive': 1, + 'zmq_tcp_keepalive_cnt': 3, + 'zmq_tcp_keepalive_idle': 60, + 'zmq_tcp_keepalive_intvl': 5, +} diff --git a/fedmsg.d/logging.py b/fedmsg.d/logging.py index 833cd37..156ffb0 100644 --- a/fedmsg.d/logging.py +++ b/fedmsg.d/logging.py @@ -2,49 +2,49 @@ # See the following for constraints on this format http://bit.ly/Xn1WDn bare_format = "[%(asctime)s][%(name)10s %(levelname)7s] %(message)s" -config = dict( - logging=dict( - version=1, - formatters=dict( - bare={ +config = { + 'logging': { + 'version': 1, + 'formatters': { + 'bare': { "datefmt": "%Y-%m-%d %H:%M:%S", "format": bare_format }, - ), - handlers=dict( - console={ + }, + 'handlers': { + 'console': { "class": "logging.StreamHandler", "formatter": "bare", "level": "DEBUG", "stream": "ext://sys.stdout", } - ), - loggers=dict( - fedmsg={ + }, + 'loggers': { + 'fedmsg': { "level": "DEBUG", "propagate": False, "handlers": ["console"], }, - moksha={ + 'moksha': { "level": "DEBUG", "propagate": False, "handlers": ["console"], }, - pkgdb2client={ + 'pkgdb2client': { "level": "INFO", "propagate": False, "handlers": ["console"], }, - modules={ + 'modules': { "level": "DEBUG", "propagate": False, "handlers": ["console"], }, - pdcupdater={ + 'pdcupdater': { "level": "DEBUG", "propagate": False, "handlers": ["console"], }, - ), - ), -) + }, + }, +} diff --git a/fedmsg.d/pdcupdater-example.py b/fedmsg.d/pdcupdater-example.py index abf5d09..d23c0f4 100644 --- a/fedmsg.d/pdcupdater-example.py +++ b/fedmsg.d/pdcupdater-example.py @@ -72,13 +72,13 @@ 'pdcupdater.ContainerRPMInclusionDepChainHandler.container_build_user': 'containerbuild', # Augment the base fedmsg logging config to also handle pdcupdater loggers. - 'logging': dict( - loggers=dict( - pdcupdater={ + 'logging': { + 'loggers': { + 'pdcupdater': { "level": "DEBUG", "propagate": False, "handlers": ["console"], }, - ) - ) + }, + }, } diff --git a/fedmsg.d/ssl.py b/fedmsg.d/ssl.py index 2e701e5..25cf612 100644 --- a/fedmsg.d/ssl.py +++ b/fedmsg.d/ssl.py @@ -18,29 +18,28 @@ # Authors: Ralph Bean # import os -import socket SEP = os.path.sep here = os.getcwd() -config = dict( - sign_messages=False, - validate_signatures=False, +config = { + 'sign_messages': False, + 'validate_signatures': False, # Use these implementations to sign and validate messages - crypto_backend='x509', - crypto_validate_backends=['x509'], + 'crypto_backend': 'x509', + 'crypto_validate_backends': ['x509'], - ssldir="/etc/pki/fedmsg", - crl_location="https://fedoraproject.org/fedmsg/crl.pem", - crl_cache="/var/run/fedmsg/crl.pem", - crl_cache_expiry=10, + 'ssldir': "/etc/pki/fedmsg", + 'crl_location': "https://fedoraproject.org/fedmsg/crl.pem", + 'crl_cache': "/var/run/fedmsg/crl.pem", + 'crl_cache_expiry': 10, - ca_cert_location="https://fedoraproject.org/fedmsg/ca.crt", - ca_cert_cache="/var/run/fedmsg/ca.crt", - ca_cert_cache_expiry=0, # Never expires + 'ca_cert_location': "https://fedoraproject.org/fedmsg/ca.crt", + 'ca_cert_cache': "/var/run/fedmsg/ca.crt", + 'ca_cert_cache_expiry': 0, # Never expires - certnames={ + 'certnames': { # In prod/stg, map hostname to the name of the cert in ssldir. # Unfortunately, we can't use socket.getfqdn() #"app01.stg": "app01.stg.phx2.fedoraproject.org", @@ -49,7 +48,7 @@ # A mapping of fully qualified topics to a list of cert names for which # a valid signature is to be considered authorized. Messages on topics not # listed here are considered automatically authorized. - routing_policy={ + 'routing_policy': { # Only allow announcements from production if they're signed by a # certain certificate. "org.fedoraproject.prod.announce.announcement": [ @@ -62,5 +61,5 @@ # When this is False, only messages that have a topic in the routing_policy # but whose cert names aren't in the associated list are dropped; messages # whose topics do not appear in the routing_policy are not dropped. - routing_nitpicky=False, -) + 'routing_nitpicky': False, +} diff --git a/pdcupdater/commands.py b/pdcupdater/commands.py index 837306f..ac1550f 100644 --- a/pdcupdater/commands.py +++ b/pdcupdater/commands.py @@ -1,5 +1,3 @@ -from __future__ import print_function - import logging import logging.config import sys @@ -36,11 +34,11 @@ def retry(): def _initialize_basics(pdc): """ Gotta have these before we can really do anything... """ - arches = [dict(name=name) for name in ["armhfp", "i386", "x86_64"]] + arches = [{'name': name} for name in ["armhfp", "i386", "x86_64"]] pdc_arches = list(pdc.get_paged(pdc['arches']._)) for arch in arches: if arch not in pdc_arches: - log.info("Creating arch %r." % arch['name']) + log.info("Creating arch %r.", arch['name']) pdc['arches']._(arch) @@ -52,8 +50,8 @@ def initialize(): _initialize_basics(pdc) handlers = pdcupdater.handlers.load_handlers(config) for handler in handlers: - log.info("Calling .initialize() on %r" % handler) - pdc.set_comment("Initialized via %r" % handler) + log.info("Calling .initialize() on %r", handler) + pdc.set_comment(f"Initialized via {handler!r}") try: handler.initialize(pdc) except beanbag.bbexcept.BeanBagException as e: @@ -70,7 +68,7 @@ def audit(): results = {} for handler in handlers: name = type(handler).__name__ - log.info('Performing audit for %s' % name) + log.info('Performing audit for %s', name) results[name] = handler.audit(pdc) verbose = False @@ -79,7 +77,7 @@ def audit(): def _print_audit_report(results, verbose): fail = False - for key, values in list(results.items()): + for key, values in results.items(): present, absent = values fail = fail or present or absent @@ -88,30 +86,33 @@ def _print_audit_report(results, verbose): else: print("WARNING - audit script detected something is wrong.") - print("\nSummary") - print("=======\n") + print() + print("Summary") + print("=======") + print() - for key, values in list(results.items()): + for key, values in results.items(): present, absent = values if not present and not absent: - print(( "- [x]", key)) + print(f"- [x] {key}") else: - print(("- [!]", key)) - print((" ", len(present), "extra entries in PDC unaccounted for")) - print((" ", len(absent), "entries absent from PDC")) + print(f"- [!] {key}") + print(f" {len(present)} extra entries in PDC unaccounted for") + print(f" {len(absent)} entries absent from PDC") - print("\nDetails") + print() + print("Details") print("=======") limit = 100 - for key, values in list(results.items()): + for key, values in results.items(): present, absent = values if not present and not absent: continue print() print(key) - print(("-" * len(key))) + print("-" * len(key)) print() if not present: @@ -121,16 +122,16 @@ def _print_audit_report(results, verbose): print() if verbose or len(present) < limit: for value in present: - print(("-", value)) + print(f"- {value}") if isinstance(present, dict): - print((" ", present[value])) + print(f" {present[value]}") else: present = list(present) for value in present[:limit]: - print(("-", value)) + print(f"- {value}") if isinstance(present, dict): - print((" ", present[value])) - print(("- (plus %i more... truncated.)" % (len(present) - limit))) + print(f" {present[value]}") + print(f"- (plus {len(present) - limit} more... truncated.)") print() if not absent: @@ -140,16 +141,16 @@ def _print_audit_report(results, verbose): print() if verbose or len(absent) < limit: for value in absent: - print("-", value) + print(f"- {value}") if isinstance(absent, dict): - print(" ", absent[value]) + print(f" {absent[value]}") else: absent = list(absent) for value in absent[:limit]: - print("-", value) + print(f"- {value}") if isinstance(absent, dict): - print(" ", absent[value]) - print("- (plus %i more... truncated.)" % (len(absent) - limit)) + print(f" {absent[value]}") + print(f"- (plus {len(absent) - limit} more... truncated.)") if not fail: return 0 diff --git a/pdcupdater/consumer.py b/pdcupdater/consumer.py index c7d090b..5a2f9ac 100644 --- a/pdcupdater/consumer.py +++ b/pdcupdater/consumer.py @@ -69,7 +69,7 @@ def consume(self, envelope): if 'message-id' in msg['headers']: msg['msg_id'] = msg['headers']['message-id'] - self.log.debug("Received %r, %r" % (msg['msg_id'], topic)) + self.log.debug("Received %r, %r", msg['msg_id'], topic) pdc = pdc_client.PDCClient(**self.pdc_config) pdcupdater.utils.handle_message(pdc, self.handlers, msg) diff --git a/pdcupdater/handlers/__init__.py b/pdcupdater/handlers/__init__.py index aaeab26..455531c 100644 --- a/pdcupdater/handlers/__init__.py +++ b/pdcupdater/handlers/__init__.py @@ -31,7 +31,8 @@ def construct_topics(self, config): for topic in self.topic_suffixes ] - @abc.abstractproperty + @property + @abc.abstractmethod def topic_suffixes(self): pass diff --git a/pdcupdater/handlers/atomic.py b/pdcupdater/handlers/atomic.py index fb5c052..c6f94b6 100644 --- a/pdcupdater/handlers/atomic.py +++ b/pdcupdater/handlers/atomic.py @@ -56,12 +56,12 @@ def atomic_component_groups_from_git(self, pdc): branch = 'f' + release['version'] # Go, get, and parse the data - params = dict(h=branch) - filename = 'fedora-%s.json' % self.group_type + params = {'h': branch} + filename = f'fedora-{self.group_type}.json' url = self.git_url + filename response = requests.get(url, params=params) if not bool(response): - log.warn("Failed to get %r: %r" % (response.url, response)) + log.warn("Failed to get %r: %r", response.url, response) continue data = response.json() @@ -77,10 +77,7 @@ def atomic_component_groups_from_git(self, pdc): yield { 'group_type': self.group_type, 'release': release_id, - 'description': 'Deps for %s %s' % ( - self.group_type, - self.git_url, - ), + 'description': f'Deps for {self.group_type} {self.git_url}', 'components': [{ 'release': release_id, 'name': package, @@ -101,16 +98,18 @@ def audit(self, pdc): ] # Invert the lists of dicts into dicts of lists - invert = lambda collection: dict([( - group['release'], - [component['name'] for component in group['components']] - ) for group in collection ]) + invert = lambda collection: { + group['release']: [ + component['name'] for component in group['components'] + ] + for group in collection + } git_groups = invert(git_groups) pdc_groups = invert(pdc_groups) # Associate the two by release and normalize present, absent = {}, {} - for release in set(list(git_groups.keys()) + list(pdc_groups.keys())): + for release in set(git_groups) | set(pdc_groups): # Convert each group to a set left = set(git_groups.get(release, [])) right = set(pdc_groups.get(release, [])) diff --git a/pdcupdater/handlers/compose.py b/pdcupdater/handlers/compose.py index 8d542af..5d2c7a2 100644 --- a/pdcupdater/handlers/compose.py +++ b/pdcupdater/handlers/compose.py @@ -62,8 +62,8 @@ def audit(self, pdc): pdc_composes = pdc.get_paged(pdc['composes']._) # normalize the two lists - old_composes = set([idx for branch, idx, url in old_composes]) - pdc_composes = set([c['compose_id'] for c in pdc_composes]) + old_composes = {idx for branch, idx, url in old_composes} + pdc_composes = {c['compose_id'] for c in pdc_composes} # use set operators to determine the difference present = pdc_composes - old_composes @@ -85,10 +85,10 @@ def initialize(self, pdc): self._import_compose(pdc, compose_id, url) except Exception as e: if getattr(e, 'response', None): - log.exception("Failed to import %r - %r %r" % ( - url, e.response.url, e.response.text)) + log.exception("Failed to import %r - %r %r", + url, e.response.url, e.response.text) else: - log.exception("Failed to import %r" % url) + log.exception("Failed to import %r", url) @pdcupdater.utils.with_ridiculous_timeout @@ -98,23 +98,23 @@ def _import_compose(self, pdc, compose_id, compose_url): url = base + '/composeinfo.json' response = session.get(url) if not bool(response): - raise IOError("Failed to get %r: %r" % (url, response)) + raise IOError(f"Failed to get {url!r}: {response!r}") composeinfo = response.json() # Before we waste any more time pulling down 100MB files from koji and # POSTing them back to PDC, let's check to see if we already know about # this compose. compose_id = composeinfo['payload']['compose']['id'] - log.info("Importing compose %r" % compose_id) + log.info("Importing compose %r", compose_id) if pdcupdater.utils.compose_exists(pdc, compose_id): - log.warn("%r already exists in PDC." % compose_id) + log.warn("%r already exists in PDC.", compose_id) return # OK, go ahead and pull down these gigantic files. url = base + '/images.json' response = session.get(url) if not bool(response): - raise IOError("Failed to get %r: %r" % (url, response)) + raise IOError(f"Failed to get {url!r}: {response!r}") images = response.json() url = base + '/rpms.json' @@ -123,11 +123,11 @@ def _import_compose(self, pdc, compose_id, compose_url): if response.status_code == 404: # Not all composes have rpms. In particular, atomic ones. # https://github.com/fedora-infra/pdc-updater/issues/11 - log.warn('Found no rpms.json file at %r' % r) + log.warn('Found no rpms.json file at %r', r) rpms = None elif not bool(response): # Something other than a 404 means real failure, so complain. - raise IOError("Failed to get %r: %r" % (url, response)) + raise IOError(f"Failed to get {url!r}: {response!r}") else: rpms = response.json() @@ -146,16 +146,16 @@ def _import_compose(self, pdc, compose_id, compose_url): # https://github.com/product-definition-center/product-definition-center/issues/228 # https://pdc.fedoraproject.org/rest_api/v1/compose-images/ - pdc['compose-images']._(dict( - release_id=release_id, - composeinfo=composeinfo, - image_manifest=images, - )) + pdc['compose-images']._({ + 'release_id': release_id, + 'composeinfo': composeinfo, + 'image_manifest': images, + }) # https://pdc.fedoraproject.org/rest_api/v1/compose-rpms/ if rpms: - pdc['compose-rpms']._(dict( - release_id=release_id, - composeinfo=composeinfo, - rpm_manifest=rpms, - )) + pdc['compose-rpms']._({ + 'release_id': release_id, + 'composeinfo': composeinfo, + 'rpm_manifest': rpms, + }) diff --git a/pdcupdater/handlers/depchain/base.py b/pdcupdater/handlers/depchain/base.py index aa8d59b..af2d995 100644 --- a/pdcupdater/handlers/depchain/base.py +++ b/pdcupdater/handlers/depchain/base.py @@ -48,7 +48,7 @@ def __init__(self, *args, **kwargs): required = ('managed_types', 'parent_type', 'child_type',) for attr in required: if not getattr(self, attr, None): - raise AttributeError("%r is required on %r" % (attr, self)) + raise AttributeError(f"{attr!r} is required on {self!r}") super(BaseKojiDepChainHandler, self).__init__(*args, **kwargs) self.koji_url = self.config['pdcupdater.koji_url'] @@ -86,14 +86,14 @@ def can_handle(self, pdc, msg): # Ignore secondary arches for now instance = msg.get('msg', {}).get('instance', 'primary') if instance != 'primary': - log.debug("From %r. Skipping." % instance) + log.debug("From %r. Skipping.", instance) return False interesting = self.interesting_tags(pdc) tag = self.extract_tag(msg) if tag not in interesting: - log.debug("%r not in %r. Skipping." % (tag, interesting)) + log.debug("%r not in %r. Skipping.", tag, interesting) return False return True @@ -114,8 +114,8 @@ def _yield_managed_pdc_relationships_from_release(self, pdc, release_id): # Construct and yield a three-tuple result. keys = ('name', 'release') - parent = dict(list(zip(keys, [entry['from_component'][key] for key in keys]))) - child = dict(list(zip(keys, [entry['to_component'][key] for key in keys]))) + parent = {key: entry['from_component'][key] for key in keys} + child = {key: entry['to_component'][key] for key in keys} yield parent, relationship_type, child def handle(self, pdc, msg): @@ -144,7 +144,7 @@ def handle(self, pdc, msg): # all the relationships from koji, then find all the relationships from # pdc. We'll study the intersection between the two sets and act on # the discrepancies. - log.info("Gathering relationships from koji for %r" % build_id) + log.info("Gathering relationships from koji for %r", build_id) koji_relationships = set(self._yield_koji_relationships_from_build( self.koji_url, build_id)) @@ -154,23 +154,23 @@ def handle(self, pdc, msg): by_parent[parent_name].add((relationship, child_name,)) # Finally, iterate over all those, now grouped by parent_name - for parent_name, koji_relationships in list(by_parent.items()): + for parent_name, koji_relationships in by_parent.items(): # TODO -- pass in global_component_name to this function? parent = pdcupdater.utils.ensure_release_component_exists( pdc, release_id, parent_name, type=self.parent_type) - log.info("Gathering from pdc for %s/%s" % (parent_name, release_id)) + log.info("Gathering from pdc for %s/%s", parent_name, release_id) pdc_relationships = set(self._yield_pdc_relationships_from_build( pdc, parent['name'], release_id)) to_be_created = koji_relationships - pdc_relationships to_be_deleted = pdc_relationships - koji_relationships - log.info("Issuing bulk create for %i entries" % len(to_be_created)) + log.info("Issuing bulk create for %i entries", len(to_be_created)) pdcupdater.utils.ensure_bulk_release_component_relationships_exists( pdc, parent, to_be_created, component_type=self.child_type) - log.info("Issuing bulk delete for %i entries" % len(to_be_deleted)) + log.info("Issuing bulk delete for %i entries", len(to_be_deleted)) pdcupdater.utils.delete_bulk_release_component_relationships( pdc, parent, to_be_deleted) @@ -179,7 +179,7 @@ def audit(self, pdc): tags = self.interesting_tags(pdc) for tag in tags: - log.info("Starting audit of tag %r of %r." % (tag, tags)) + log.info("Starting audit of tag %r of %r.", tag, tags) if self.pdc_tag_mapping: release_id, release = pdcupdater.utils.tag2release(tag, pdc=pdc) else: @@ -191,13 +191,13 @@ def audit(self, pdc): # normalize the two lists, and smash items into hashable strings. def _format(parent, relationship_type, child): - return "%s/%s %s %s/%s" % ( - parent['name'], parent['release'], - relationship_type, - child['name'], child['release'], + return ( + f"{parent['name']}/{parent['release']} " + f"{relationship_type} " + f"{child['name']}/{child['release']}" ) - koji_relationships = set([_format(*x) for x in koji_relationships]) - pdc_relationships = set([_format(*x) for x in pdc_relationships]) + koji_relationships = {_format(*x) for x in koji_relationships} + pdc_relationships = {_format(*x) for x in pdc_relationships} # use set operators to determine the difference present = present.union(pdc_relationships - koji_relationships) @@ -210,7 +210,7 @@ def initialize(self, pdc): tags.reverse() for tag in tags: - log.info("Starting initialize of tag %r of %r." % (tag, tags)) + log.info("Starting initialize of tag %r of %r.", tag, tags) if self.pdc_tag_mapping: release_id, release = pdcupdater.utils.tag2release(tag, pdc=pdc) else: diff --git a/pdcupdater/handlers/depchain/containers.py b/pdcupdater/handlers/depchain/containers.py index 39a5cc7..30618d3 100644 --- a/pdcupdater/handlers/depchain/containers.py +++ b/pdcupdater/handlers/depchain/containers.py @@ -34,16 +34,16 @@ class ContainerRPMInclusionDepChainHandler(BaseKojiDepChainHandler): child_type = 'rpm' def interesting_tags(self, pdc): - key = "pdcupdater.%s.interesting_tags" % type(self).__name__ + key = f"pdcupdater.{type(self).__name__}.interesting_tags" if not self.config.get(key): - log.debug("config key %s has no value. performing queries." % key) + log.debug("config key %s has no value. performing queries.", key) if self.pdc_tag_mapping: return pdcupdater.utils.all_tags_from_pdc(pdc) else: return pdcupdater.utils.interesting_container_tags() - log.debug("using value from config key %s" % key) + log.debug("using value from config key %s", key) return self.config[key] def _yield_koji_relationships_from_tag(self, pdc, tag): @@ -56,16 +56,16 @@ def _yield_koji_relationships_from_tag(self, pdc, tag): pdcupdater.utils.ensure_release_exists(pdc, release_id, release) # This may be None, or 'osbs' or 'containerbuild' in Fedora. - key = "pdcupdater.%s.container_build_user" % type(self).__name__ + key = f"pdcupdater.{type(self).__name__}.container_build_user" owner = self.config.get(key) - log.debug("Found %r for config key %r" % (owner, key)) + log.debug("Found %r for config key %r", owner, key) # Return builds in the tag owned by the user, if configured. builds = pdcupdater.services.koji_builds_in_tag(self.koji_url, tag, owner=owner) for i, build in enumerate(builds): - log.info("Considering container build idx=%r, (%i of %i)" % ( - build['build_id'], i, len(builds))) + log.info("Considering container build idx=%r, (%i of %i)", + build['build_id'], i, len(builds)) relationships = list(self._yield_koji_relationships_from_build( self.koji_url, build['build_id'])) @@ -86,7 +86,7 @@ def _yield_koji_relationships_from_build(self, koji_url, build_id, rpms=None): build = pdcupdater.services.koji_get_build(koji_url, build_id) if not build: - raise ValueError("Unable to find build %r" % build_id) + raise ValueError(f"Unable to find build {build_id!r}") parent = build['name'] artifacts = pdcupdater.services.koji_archives_from_build( @@ -95,7 +95,7 @@ def _yield_koji_relationships_from_build(self, koji_url, build_id, rpms=None): for artifact in artifacts: if artifact['type_name'] in ('ks', 'cfg', 'xml'): continue - log.debug("Looking up installed rpms for %r" % artifact['filename']) + log.debug("Looking up installed rpms for %r", artifact['filename']) rpms = pdcupdater.services.koji_rpms_from_archive(self.koji_url, artifact) for entry in rpms: child = entry['name'] diff --git a/pdcupdater/handlers/depchain/rpms.py b/pdcupdater/handlers/depchain/rpms.py index f8d894d..22c87f0 100644 --- a/pdcupdater/handlers/depchain/rpms.py +++ b/pdcupdater/handlers/depchain/rpms.py @@ -55,8 +55,8 @@ def _format_rpm_filename(rpm): return "{name}-{version}-{release}.{arch}.rpm".format(**rpm) working_set = [_format_rpm_filename(rpm) for rpm in working_set] - log.info("Considering build idx=%r, (%i of %i) with %r" % ( - working_build_id, i, len(rpms), working_set)) + log.info("Considering build idx=%r, (%i of %i) with %r", + working_build_id, i, len(rpms), working_set) relationships = list(self._yield_koji_relationships_from_build( self.koji_url, working_build_id, rpms=working_set)) @@ -87,16 +87,16 @@ class NewRPMBuildTimeDepChainHandler(BaseRPMDepChainHandler): child_type = 'rpm' def interesting_tags(self, pdc): - key = "pdcupdater.%s.interesting_tags" % type(self).__name__ + key = f"pdcupdater.{type(self).__name__}.interesting_tags" if not self.config.get(key): - log.debug("config key %s has no value. performing queries." % key) + log.debug("config key %s has no value. performing queries.", key) if self.pdc_tag_mapping: return pdcupdater.utils.all_tags_from_pdc(pdc) else: return pdcupdater.utils.interesting_tags() - log.debug("using value from config key %s" % key) + log.debug("using value from config key %s", key) return self.config[key] def _yield_koji_relationships_from_build(self, koji_url, build_id, rpms=None): @@ -110,7 +110,7 @@ def _yield_koji_relationships_from_build(self, koji_url, build_id, rpms=None): results = collections.defaultdict(set) def _get_buildroot(filename): - log.debug("Looking up buildtime deps in koji for %r" % filename) + log.debug("Looking up buildtime deps in koji for %r", filename) return filename, pdcupdater.services.koji_list_buildroot_for( self.koji_url, filename) @@ -147,16 +147,16 @@ class NewRPMRunTimeDepChainHandler(BaseRPMDepChainHandler): child_type = 'rpm' def interesting_tags(self, pdc): - key = "pdcupdater.%s.interesting_tags" % type(self).__name__ + key = f"pdcupdater.{type(self).__name__}.interesting_tags" if not self.config.get(key): - log.debug("config key %s has no value. performing queries." % key) + log.debug("config key %s has no value. performing queries.", key) if self.pdc_tag_mapping: return pdcupdater.utils.all_tags_from_pdc(pdc) else: return pdcupdater.utils.interesting_tags() - log.debug("using value from config key %s" % key) + log.debug("using value from config key %s", key) return self.config[key] def _yield_koji_relationships_from_build(self, koji_url, build_id, rpms=None): @@ -169,7 +169,7 @@ def _yield_koji_relationships_from_build(self, koji_url, build_id, rpms=None): results = collections.defaultdict(set) def _get_requirements(filename): - log.debug("Looking up installtime deps in koji for %r" % filename) + log.debug("Looking up installtime deps in koji for %r", filename) return filename, pdcupdater.services.koji_yield_rpm_requires( self.koji_url, filename) diff --git a/pdcupdater/handlers/modules.py b/pdcupdater/handlers/modules.py index b49cbbc..9cd9294 100644 --- a/pdcupdater/handlers/modules.py +++ b/pdcupdater/handlers/modules.py @@ -20,11 +20,11 @@ class ModuleStateChangeHandler(pdcupdater.handlers.BaseHandler): """ When the state of a module changes. """ - processing_states = set(('done', 'ready')) - other_states = set(('wait', 'build')) - irrelevant_states = set(('init',)) + processing_states = {'done', 'ready'} + other_states = {'wait', 'build'} + irrelevant_states = {'init'} relevant_states = processing_states.union(other_states) - error_states = set(('failed',)) + error_states = {'failed'} valid_states = relevant_states.union(error_states).union(irrelevant_states) def __init__(self, *args, **kwargs): @@ -96,14 +96,14 @@ def get_module_rpms(self, pdc, module): rpms = [] # Flatten into a list and augment the koji dict with tag info. for rpm in koji_rpms: - data = dict( - name=rpm['name'], - version=rpm['version'], - release=rpm['release'], - epoch=rpm['epoch'] or 0, - arch=rpm['arch'], - srpm_name=rpm['srpm_name'], - ) + data = { + 'name': rpm['name'], + 'version': rpm['version'], + 'release': rpm['release'], + 'epoch': rpm['epoch'] or 0, + 'arch': rpm['arch'], + 'srpm_name': rpm['srpm_name'], + } if 'srpm_nevra' in rpm and rpm['arch'] != 'src': data['srpm_nevra'] = rpm['srpm_nevra'] @@ -111,8 +111,8 @@ def get_module_rpms(self, pdc, module): # For SRPM packages, include the hash and branch from which is # has been built. if (rpm['arch'] == 'src' - and rpm['name'] in list(mmd.get_rpm_components().keys()) - and 'rpms' in list(mmd.get_xmd()['mbs'].keys()) + and rpm['name'] in mmd.get_rpm_components() + and 'rpms' in mmd.get_xmd()['mbs'] and rpm['name'] in mmd.get_xmd()['mbs']['rpms']): mmd_rpm = mmd.get_rpm_components()[rpm['name']] xmd_rpm = mmd.get_xmd()['mbs']['rpms'][rpm['name']] @@ -124,7 +124,7 @@ def get_module_rpms(self, pdc, module): return rpms def handle(self, pdc, msg): - log.debug("handle(pdc, msg=%r)" % msg) + log.debug("handle(pdc, msg=%r)", msg) body = msg['msg'] state = body['state_name'] @@ -149,7 +149,7 @@ def handle(self, pdc, msg): # At this point we can update the Koji tag from MBS pdc[self.pdc_api][uid]._ += {'koji_tag': body['koji_tag']} elif body['state_name'] == 'ready': - log.info("%r ready. Patching with rpms and active=True." % uid) + log.info("%r ready. Patching with rpms and active=True.", uid) rpms = self.get_module_rpms(pdc, module) pdc[self.pdc_api][uid]._ += {'active': True, 'rpms': rpms} @@ -163,7 +163,7 @@ def _get_modulemd_by_mbs_id(self, idx): def create_module(self, pdc, body): """Creates a module in PDC.""" - log.debug("create_module(pdc, body=%r)" % body) + log.debug("create_module(pdc, body=%r)", body) modulemd = self._get_modulemd_by_mbs_id(body['id']) mmd = Modulemd.Module.new_from_string(modulemd) @@ -172,11 +172,11 @@ def create_module(self, pdc, body): runtime_deps = [] build_deps = [] for deps in mmd.get_dependencies(): - for dependency, streams in list(deps.get_requires().items()): + for dependency, streams in deps.get_requires().items(): for stream in streams.get(): runtime_deps.append( {'dependency': dependency, 'stream': stream}) - for dependency, streams in list(deps.get_buildrequires().items()): + for dependency, streams in deps.get_buildrequires().items(): for stream in streams.get(): build_deps.append( {'dependency': dependency, 'stream': stream}) @@ -217,10 +217,10 @@ def create_module(self, pdc, body): def get_or_create_module(self, pdc, body): """Attempts to retrieve the corresponding module from PDC, or if it's missing, creates it.""" - log.debug("get_or_create_module(pdc, body=%r)" % body) + log.debug("get_or_create_module(pdc, body=%r)", body) uid = self.get_uid(body) - log.info("Looking up module %r" % uid) + log.info("Looking up module %r", uid) if self.pdc_api == 'modules': query = {'uid': uid} else: @@ -228,7 +228,7 @@ def get_or_create_module(self, pdc, body): modules = pdc[self.pdc_api]._(page_size=-1, **query) if not modules: - log.info("%r not found. Creating." % uid) # a new module! + log.info("%r not found. Creating.", uid) # a new module! return self.create_module(pdc, body) else: return modules[0] diff --git a/pdcupdater/handlers/persons.py b/pdcupdater/handlers/persons.py index aa0a9ca..b72d522 100644 --- a/pdcupdater/handlers/persons.py +++ b/pdcupdater/handlers/persons.py @@ -18,8 +18,8 @@ def can_handle(self, pdc, msg): def handle(self, pdc, msg): username = msg['msg']['user'] - email = '%s@fedoraproject.org' % username - pdc['persons']._(dict(username=username, email=email)) + email = f'{username}@fedoraproject.org' + pdc['persons']._({'username': username, 'email': email}) def audit(self, pdc): # Query the data sources @@ -27,8 +27,8 @@ def audit(self, pdc): pdc_persons = pdc.get_paged(pdc['persons']._) # normalize the two lists - fas_persons = set([p['username'] for p in fas_persons]) - pdc_persons = set([p['username'] for p in pdc_persons]) + fas_persons = {p['username'] for p in fas_persons} + pdc_persons = {p['username'] for p in pdc_persons} # use set operators to determine the difference present = pdc_persons - fas_persons @@ -38,12 +38,12 @@ def audit(self, pdc): def initialize(self, pdc): fas_persons = pdcupdater.services.fas_persons(**self.fas_config) - persons = [dict( - username=person['username'], - email='%s@fedoraproject.org' % person['username'], - ) for person in fas_persons] + persons = [{ + 'username': person['username'], + 'email': f"{person['username']}@fedoraproject.org", + } for person in fas_persons] for person in persons: try: pdc['persons']._(person) except beanbag.bbexcept.BeanBagException as e: - log.warn("persons, %r %r" % (component, e.response)) + log.warn("persons, %r %r", component, e.response) diff --git a/pdcupdater/handlers/pkgdb.py b/pdcupdater/handlers/pkgdb.py index d0a2165..8cca18d 100644 --- a/pdcupdater/handlers/pkgdb.py +++ b/pdcupdater/handlers/pkgdb.py @@ -51,19 +51,19 @@ def handle(self, pdc, msg): collection = msg['msg']['package_listing']['collection'] release_id = collection2release_id(pdc, collection) global_component = name - data = dict( - name=name, - release=release_id, - global_component=global_component, - dist_git_branch=branch, - #bugzilla_component=name, - brew_package=name, - active=True, - type='rpm', - ) + data = { + 'name': name, + 'release': release_id, + 'global_component': global_component, + 'dist_git_branch': branch, + # 'bugzilla_component': name, + 'brew_package': name, + 'active': True, + 'type': 'rpm', + } pdcupdater.utils.ensure_global_component_exists(pdc, name) # https://pdc.fedoraproject.org/rest_api/v1/release-components/ - log.info("Creating release component %s for %s" % (name, release_id)) + log.info("Creating release component %s for %s", name, release_id) pdc['release-components']._(data) def audit(self, pdc): @@ -71,8 +71,8 @@ def audit(self, pdc): pdc_packages = pdc.get_paged(pdc['global-components']._) # normalize the two lists - pkgdb_packages = set([p['name'] for p in pkgdb_packages]) - pdc_packages = set([p['name'] for p in pdc_packages]) + pkgdb_packages = {p['name'] for p in pkgdb_packages} + pdc_packages = {p['name'] for p in pdc_packages} # use set operators to determine the difference present = pdc_packages - pkgdb_packages @@ -82,14 +82,19 @@ def audit(self, pdc): def initialize(self, pdc): packages = pdcupdater.services.pkgdb_packages(self.pkgdb_url) - components = [dict( - name=package['name'], - ) for package in packages] + components = [{ + 'name': package['name'], + } for package in packages] for component in components: try: pdc['global-components']._(component) +<<<<<<< HEAD except beanbag.bbexcept.BeanBagException as e: log.warn("global-component, %r %r" % (component, e.response)) +======= + except BeanBagException as e: + log.warn("global-component, %r %r", component, e.response) +>>>>>>> d3973f1... FOO 11fa0b0c75d7d1683252edf4b621ba80464a9167 class NewPackageBranchHandler(pdcupdater.handlers.BaseHandler): @@ -112,19 +117,19 @@ def handle(self, pdc, msg): collection = msg['msg']['package_listing']['collection'] release_id = collection2release_id(pdc, collection) global_component = name - data = dict( - name=name, - release=release_id, - global_component=global_component, - dist_git_branch=branch, - #bugzilla_component=name, - brew_package=name, - active=True, - type='rpm', - ) + data = { + 'name': name, + 'release': release_id, + 'global_component': global_component, + 'dist_git_branch': branch, + # 'bugzilla_component': name, + 'brew_package': name, + 'active': True, + 'type': 'rpm', + } # https://pdc.fedoraproject.org/rest_api/v1/release-components/ pdcupdater.utils.ensure_global_component_exists(pdc, name) - log.info("Creating release component %s for %s" % (name, release_id)) + log.info("Creating release component %s for %s", name, release_id) pdc['release-components']._(data) def audit(self, pdc): @@ -133,7 +138,7 @@ def audit(self, pdc): pdc_packages = pdc.get_paged(pdc['release-components']._) # normalize the two lists - pkgdb_packages = set( + pkgdb_packages = { ( package['name'], pdcupdater.utils.pkgdb2release(collection), @@ -141,11 +146,11 @@ def audit(self, pdc): ) for package in pkgdb_packages for collection in package['collections'] - ) - pdc_packages = set( + } + pdc_packages = { (p['name'], p['release']['release_id'], p['dist_git_branch']) for p in pdc_packages - ) + } # use set operators to determine the difference present = pdc_packages - pkgdb_packages @@ -157,22 +162,22 @@ def initialize(self, pdc): packages = pdcupdater.services.pkgdb_packages( self.pkgdb_url, extra=True) components = [ - dict( - name=package['name'], - release=collection2release_id(pdc, collection), - global_component=package['name'], - dist_git_branch=collection['branchname'], - #bugzilla_component=package['name'], - brew_package=package['name'], - active=True, - type='rpm', - ) - for package in packages - for collection in package['collections'] + { + 'name': package['name'], + 'release': collection2release_id(pdc, collection), + 'global_component': package['name'], + 'dist_git_branch': collection['branchname'], + # 'bugzilla_component': package['name'], + 'brew_package': package['name'], + 'active': True, + 'type': 'rpm', + } + for package in packages + for collection in package['collections'] ] for component in components: try: pdc['release-components']._(component) except beanbag.bbexcept.BeanBagException as e: - log.warn("release-component, %r %r" % (component, e.response)) + log.warn("release-component, %r %r", component, e.response) diff --git a/pdcupdater/handlers/retirement.py b/pdcupdater/handlers/retirement.py index 3ccda5a..ef041c0 100644 --- a/pdcupdater/handlers/retirement.py +++ b/pdcupdater/handlers/retirement.py @@ -67,7 +67,7 @@ def handle(self, pdc, msg): 'repo': repo, 'branch': branchname, 'file': 'dead.package'} - log.info('Checking for file: %s' % fileurl) + log.info('Checking for file: %s', fileurl) resp = requests.head(fileurl, timeout=15) if resp.status_code != 200: log.info('Seems not to actually be retired, possibly merge') diff --git a/pdcupdater/handlers/rpms.py b/pdcupdater/handlers/rpms.py index 04fdd89..20cac59 100644 --- a/pdcupdater/handlers/rpms.py +++ b/pdcupdater/handlers/rpms.py @@ -30,14 +30,14 @@ def can_handle(self, pdc, msg): # Ignore secondary arches for now if msg['msg']['instance'] != 'primary': - log.debug("From %r. Skipping." % (msg['msg']['instance'])) + log.debug("From %r. Skipping.", msg['msg']['instance']) return False interesting = interesting_tags() tag = msg['msg']['tag'] if tag not in interesting: - log.debug("%r not in %r. Skipping." % (tag, interesting)) + log.debug("%r not in %r. Skipping.", tag, interesting) return False return True @@ -61,21 +61,21 @@ def handle(self, pdc, msg): # Start with podofo-0.9.1-17.el7.ppc64.rpm name, version, release = rpm.rsplit('-', 2) release, arch, _ = release.rsplit('.', 2) - data = dict( - name=name, - version=version, - release=release, - arch=arch, - epoch=build['epoch'] or 0, - srpm_name=build['name'], - srpm_nevra=None, # This gets overwritten below - linked_releases=[ + data = { + 'name': name, + 'version': version, + 'release': release, + 'arch': arch, + 'epoch': build['epoch'] or 0, + 'srpm_name': build['name'], + 'srpm_nevra': None, # This gets overwritten below + 'linked_releases': [ release_id, ], - ) + } if arch != 'src': data['srpm_nevra'] = build['nvr'] - log.info("Adding rpm %s to PDC release %s" % (rpm, release_id)) + log.info("Adding rpm %s to PDC release %s", rpm, release_id) pdc['rpms']._(data) def audit(self, pdc): @@ -84,8 +84,8 @@ def audit(self, pdc): pdc_rpms = pdc.get_paged(pdc['rpms']._) # Normalize the lists before comparing them. - koji_rpms = set([json.dumps(r, sort_keys=True) for r in koji_rpms]) - pdc_rpms = set([json.dumps(r, sort_keys=True) for r in pdc_rpms]) + koji_rpms = {json.dumps(r, sort_keys=True) for r in koji_rpms} + pdc_rpms = {json.dumps(r, sort_keys=True) for r in pdc_rpms} # use set operators to determine the difference present = pdc_rpms - koji_rpms @@ -96,7 +96,7 @@ def audit(self, pdc): def initialize(self, pdc): # Get a list of all rpms in koji and send it to PDC for batch in self._gather_koji_rpms(): - log.info("Uploading info about %i rpms to PDC." % len(batch)) + log.info("Uploading info about %i rpms to PDC.", len(batch)) for entry in batch: pdc['rpms']._(entry) @@ -107,19 +107,19 @@ def _gather_koji_rpms(self): } # Flatten into a list and augment the koji dict with tag info. - for tag, rpms in list(koji_rpms.items()): + for tag, rpms in koji_rpms.items(): yield [ - dict( - name=rpm['name'], - version=rpm['version'], - release=rpm['release'], - epoch=rpm['epoch'] or 0, - arch=rpm['arch'], - linked_releases=[ + { + 'name': rpm['name'], + 'version': rpm['version'], + 'release': rpm['release'], + 'epoch': rpm['epoch'] or 0, + 'arch': rpm['arch'], + 'linked_releases': [ tag2release(tag)[0], # Just the release_id ], - srpm_name=rpm['srpm_name'], - srpm_nevra=rpm['arch'] != 'src' and rpm.get('srpm_nevra') or None, - ) + 'srpm_name': rpm['srpm_name'], + 'srpm_nevra': rpm['arch'] != 'src' and rpm.get('srpm_nevra') or None, + } for rpm in rpms ] diff --git a/pdcupdater/services.py b/pdcupdater/services.py index 6c0eee3..5dddb58 100644 --- a/pdcupdater/services.py +++ b/pdcupdater/services.py @@ -20,7 +20,7 @@ def _scrape_links(session, url): log.debug('Scraping %s', url) response = session.get(url) if not bool(response): - raise IOError("Couldn't talk to %r, %r" % (url, response)) + raise IOError("Couldn't talk to %r, %r", url, response) soup = bs4.BeautifulSoup(response.text, 'html.parser') pre = soup.find('pre') for link in pre.findAll('a'): @@ -46,7 +46,7 @@ def old_composes(base_url): for compose, compose_link in compose_links: # Some of these are symlinks to others if compose.startswith('latest'): - log.debug("Skipping %s. Just a symlink." % compose_link) + log.debug("Skipping %s. Just a symlink.", compose_link) continue # Some of these failed mid-way and didn't complete. @@ -65,7 +65,7 @@ def old_composes(base_url): continue # If we got this far, then return it - log.info(" found %s/%s" % (branch, compose)) + log.info(" found %s/%s", branch, compose) yield branch, compose, compose_link # Finally, close the requests session. @@ -79,7 +79,7 @@ def fas_persons(base_url, username, password): import fedora.client import fedora.client.fas2 - log.info("Connecting to FAS at %r" % base_url) + log.info("Connecting to FAS at %r", base_url) fasclient = fedora.client.fas2.AccountSystem( base_url=base_url, username=username, password=password) @@ -99,7 +99,7 @@ def koji_list_buildroot_for(url, filename, tries=3): rpminfo = session.getRPM(filename) if type(rpminfo) == list: if not tries: - raise TypeError("Got a list back from koji.getRPM(%r)" % filename) + raise TypeError("Got a list back from koji.getRPM(%r)", filename) # Try again.. this is weird behavior... return koji_list_buildroot_for(url, filename, tries-1) return session.listRPMs(componentBuildrootID=rpminfo['buildroot_id']) @@ -121,7 +121,7 @@ def koji_yield_rpm_requires(url, nvra): rpm.RPMSENSE_GREATER: '>', rpm.RPMSENSE_EQUAL: '=', } - relevant_flags = reduce(operator.ior, list(header_lookup.keys())) + relevant_flags = reduce(operator.ior, header_lookup) # Query koji and step over all the deps listed in the raw rpm headers. deps = session.getRPMDeps(nvra, koji.DEP_REQUIRE) @@ -149,12 +149,12 @@ def koji_yield_rpm_requires(url, nvra): def koji_builds_in_tag(url, tag, owner=None): """ Return the list of koji builds in a tag. """ import koji - log.info("Listing rpms in koji(%s) tag %s" % (url, tag)) + log.info("Listing rpms in koji(%s) tag %s", url, tag) session = koji.ClientSession(url) try: return session.listTagged(tag, latest=True, owner=owner) except koji.GenericError as e: - log.warn("Failed to get builds in tag %r: %r" % (tag, e)) + log.warn("Failed to get builds in tag %r: %r", tag, e) return [] @@ -162,13 +162,13 @@ def koji_builds_in_tag(url, tag, owner=None): def koji_rpms_in_tag(url, tag): """ Return the list of koji rpms in a tag. """ import koji - log.info("Listing rpms in koji(%s) tag %s" % (url, tag)) + log.info("Listing rpms in koji(%s) tag %s", url, tag) session = koji.ClientSession(url) try: rpms, builds = session.listTaggedRPMS(tag, latest=True) except koji.GenericError as e: - log.exception("Failed to list rpms in tag %r" % tag) + log.exception("Failed to list rpms in tag %r", tag) # If the tag doesn't exist.. then there are no rpms in that tag. return [] @@ -188,7 +188,7 @@ def koji_get_build(url, build_id): session = koji.ClientSession(url) build = session.getBuild(build_id) if build: - assert build['id'] == build_id, "%r != %r" % (build['id'], build_id) + assert build['id'] == build_id, f"{build['id']!r} != {build_id!r}" return build @@ -210,7 +210,7 @@ def koji_rpms_from_archive(url, artifact): @pdcupdater.utils.retry() def koji_rpms_from_build(url, build_id): import koji - log.info("Listing rpms in koji(%s) for %r" % (url, build_id)) + log.info("Listing rpms in koji(%s) for %r", url, build_id) session = koji.ClientSession(url) build = koji_get_build(url, build_id) @@ -219,14 +219,14 @@ def koji_rpms_from_build(url, build_id): rpms.add('{0}.{1}.rpm'.format(rpm['nvr'], rpm['arch'])) # Dependable order for testing. - rpms = list(sorted(rpms)) + rpms = sorted(rpms) return build, rpms def pkgdb_packages(base_url, extra=False): """ Return a generator over all the packages in pkgdb. """ import pkgdb2client - log.info("Connecting to pkgdb at %r" % base_url) + log.info("Connecting to pkgdb at %r", base_url) pkgdb = pkgdb2client.PkgDB(url=base_url) result = pkgdb.get_packages(page='all') packages = result['packages'] @@ -247,6 +247,5 @@ def pkgdb_packages(base_url, extra=False): # A little test by hand... logging.basicConfig(level=logging.DEBUG) composes = old_composes('https://kojipkgs.fedoraproject.org/compose/') - composes = list(composes) for compose in composes: print(compose) diff --git a/pdcupdater/tests/handler_tests/test_compose.py b/pdcupdater/tests/handler_tests/test_compose.py index bff2fa6..6567b59 100644 --- a/pdcupdater/tests/handler_tests/test_compose.py +++ b/pdcupdater/tests/handler_tests/test_compose.py @@ -38,15 +38,15 @@ def test_cannot_handle_new_compose_start(self): # Read the docs and code about the message producer for more info # https://pagure.io/pungi/blob/master/f/doc/configuration.rst#_566 # https://pagure.io/pungi/blob/master/f/bin/pungi-fedmsg-notification - msg = dict( - topic='org.fedoraproject.prod.pungi.compose.status.change', - msg=dict( - status='STARTED', - compose_id='Fedora-24-20151130.n.2', - location='http://kojipkgs.fedoraproject.org/compose//rawhide/' + msg = { + 'topic': 'org.fedoraproject.prod.pungi.compose.status.change', + 'msg': { + 'status': 'STARTED', + 'compose_id': 'Fedora-24-20151130.n.2', + 'location': 'http://kojipkgs.fedoraproject.org/compose//rawhide/' 'Fedora-24-20151130.n.2/compose', - ), - ) + }, + } result = self.handler.can_handle(None, msg) self.assertEqual(result, False) @@ -54,15 +54,15 @@ def test_cannot_handle_new_compose_doomed(self): # Read the docs and code about the message producer for more info # https://pagure.io/pungi/blob/master/f/doc/configuration.rst#_566 # https://pagure.io/pungi/blob/master/f/bin/pungi-fedmsg-notification - msg = dict( - topic='org.fedoraproject.prod.pungi.compose.status.change', - msg=dict( - status='DOOMED', - compose_id='Fedora-24-20151130.n.2', - location='http://kojipkgs.fedoraproject.org/compose//rawhide/' + msg = { + 'topic': 'org.fedoraproject.prod.pungi.compose.status.change', + 'msg': { + 'status': 'DOOMED', + 'compose_id': 'Fedora-24-20151130.n.2', + 'location': 'http://kojipkgs.fedoraproject.org/compose//rawhide/' 'Fedora-24-20151130.n.2/compose', - ), - ) + }, + } result = self.handler.can_handle(None, msg) self.assertEqual(result, False) @@ -70,15 +70,15 @@ def test_can_handle_new_compose_finish(self): # Read the docs and code about the message producer for more info # https://pagure.io/pungi/blob/master/f/doc/configuration.rst#_566 # https://pagure.io/pungi/blob/master/f/bin/pungi-fedmsg-notification - msg = dict( - topic='org.fedoraproject.prod.pungi.compose.status.change', - msg=dict( - status='FINISHED', - compose_id='Fedora-24-20151130.n.2', - location='http://kojipkgs.fedoraproject.org/compose//rawhide/' + msg = { + 'topic': 'org.fedoraproject.prod.pungi.compose.status.change', + 'msg': { + 'status': 'FINISHED', + 'compose_id': 'Fedora-24-20151130.n.2', + 'location': 'http://kojipkgs.fedoraproject.org/compose//rawhide/' 'Fedora-24-20151130.n.2/compose', - ), - ) + }, + } result = self.handler.can_handle(None, msg) self.assertEqual(result, True) @@ -87,33 +87,33 @@ def test_handle_new_compose(self, pdc): # Read the docs and code about the message producer for more info # https://pagure.io/pungi/blob/master/f/doc/configuration.rst#_566 # https://pagure.io/pungi/blob/master/f/bin/pungi-fedmsg-notification - msg = dict( - topic='org.fedoraproject.prod.pungi.compose.status.change', - msg=dict( - status='FINISHED', - compose_id='Fedora-24-20151130.n.2', - location='http://kojipkgs.fedoraproject.org/compose//rawhide/' + msg = { + 'topic': 'org.fedoraproject.prod.pungi.compose.status.change', + 'msg': { + 'status': 'FINISHED', + 'compose_id': 'Fedora-24-20151130.n.2', + 'location': 'http://kojipkgs.fedoraproject.org/compose//rawhide/' 'Fedora-24-20151130.n.2/compose', - ), - ) + }, + } self.handler.handle(pdc, msg) # Check compose images compose_images = pdc.calls['compose-images'] self.assertEqual(len(compose_images), 1) - self.assertDictEqual(compose_images[0][1], dict( - release_id='fedora-24', - composeinfo=composeinfo_modified, - image_manifest=images, - )) + self.assertDictEqual(compose_images[0][1], { + 'release_id': 'fedora-24', + 'composeinfo': composeinfo_modified, + 'image_manifest': images, + }) # Check compose rpms compose_rpms = pdc.calls['compose-rpms'] self.assertEqual(len(compose_rpms), 1) - self.assertEqual(compose_rpms[0][1], dict( - release_id='fedora-24', - composeinfo=composeinfo_modified, - rpm_manifest=rpms, - )) + self.assertEqual(compose_rpms[0][1], { + 'release_id': 'fedora-24', + 'composeinfo': composeinfo_modified, + 'rpm_manifest': rpms, + }) @mock_pdc @mock.patch('pdcupdater.services.old_composes') @@ -131,19 +131,19 @@ def test_initialize_from_old_composes(self, pdc, old_composes): # Check compose images compose_images = pdc.calls['compose-images'] self.assertEqual(len(compose_images), 1) - self.assertDictEqual(compose_images[0][1], dict( - release_id='fedora-24', - composeinfo=composeinfo_modified, - image_manifest=images, - )) + self.assertDictEqual(compose_images[0][1], { + 'release_id': 'fedora-24', + 'composeinfo': composeinfo_modified, + 'image_manifest': images, + }) # Check compose rpms compose_rpms = pdc.calls['compose-rpms'] self.assertEqual(len(compose_rpms), 1) - self.assertEqual(compose_rpms[0][1], dict( - release_id='fedora-24', - composeinfo=composeinfo_modified, - rpm_manifest=rpms, - )) + self.assertEqual(compose_rpms[0][1], { + 'release_id': 'fedora-24', + 'composeinfo': composeinfo_modified, + 'rpm_manifest': rpms, + }) @mock_pdc @mock.patch('pdcupdater.services.old_composes') @@ -194,4 +194,4 @@ def test_audit_missing_one(self, pdc, old_composes): # Check the results. self.assertSetEqual(present, set()) - self.assertSetEqual(absent, set(['Fedora-24-20151130.n.3'])) + self.assertSetEqual(absent, {'Fedora-24-20151130.n.3'}) diff --git a/pdcupdater/tests/handler_tests/test_depchain_containers.py b/pdcupdater/tests/handler_tests/test_depchain_containers.py index a7288f3..2b0dae7 100644 --- a/pdcupdater/tests/handler_tests/test_depchain_containers.py +++ b/pdcupdater/tests/handler_tests/test_depchain_containers.py @@ -65,7 +65,7 @@ def test_handle_new_build(self, pdc, get_build, archives, rpms, tags, rawhide): 'release-components', 'global-components', ]) - self.assertEqual(sorted(pdc.calls.keys()), expected_keys) + self.assertEqual(sorted(pdc.calls), expected_keys) self.assertEqual(len(pdc.calls['global-components']), 1) self.assertEqual(len(pdc.calls['release-components']), 1) @@ -194,7 +194,7 @@ def test_audit_mismatch(self, pdc, get_build, builds, archives, rpms, tags, rawh }) # Check the results. - self.assertSetEqual(present, set([])) - self.assertSetEqual(absent, set([ + self.assertSetEqual(present, set()) + self.assertSetEqual(absent, { 'cockpit/fedora-24-updates ContainerIncludesRPM guake/fedora-24-updates', - ])) + }) diff --git a/pdcupdater/tests/handler_tests/test_depchain_rpms.py b/pdcupdater/tests/handler_tests/test_depchain_rpms.py index 10d8eca..929d887 100644 --- a/pdcupdater/tests/handler_tests/test_depchain_rpms.py +++ b/pdcupdater/tests/handler_tests/test_depchain_rpms.py @@ -76,7 +76,7 @@ def test_handle_new_build(self, pdc, tags, rawhide, buildroot): 'release-components', 'global-components', ]) - self.assertEqual(sorted(pdc.calls.keys()), expected_keys) + self.assertEqual(sorted(pdc.calls), expected_keys) self.assertEqual(len(pdc.calls['global-components']), 22) self.assertEqual(len(pdc.calls['release-components']), 22) @@ -173,11 +173,11 @@ def test_audit_mismatch(self, pdc, builds, rpms, buildroot, tags, rawhide): }) # Check the results. - self.assertSetEqual(present, set([])) - self.assertSetEqual(absent, set([ + self.assertSetEqual(present, set()) + self.assertSetEqual(absent, { 'guake/fedora-24 RPMBuildRequires buildtimelib1/fedora-24', 'guake/fedora-24 RPMBuildRoot buildtimelib2/fedora-24', - ])) + }) class TestRuntimeDepIngestionFedora(BaseHandlerTest): @@ -220,8 +220,8 @@ def test_audit_mismatch(self, pdc, builds, rpms, requires, tags, rawhide): }) # Check the results. - self.assertSetEqual(present, set(['guake/fedora-24 RPMRequires nethack/fedora-24'])) - self.assertSetEqual(absent, set(['guake/fedora-24 RPMRequires runtimelib1/fedora-24'])) + self.assertSetEqual(present, {'guake/fedora-24 RPMRequires nethack/fedora-24'}) + self.assertSetEqual(absent, {'guake/fedora-24 RPMRequires runtimelib1/fedora-24'}) @mock_pdc @mock.patch('pdcupdater.utils.rawhide_tag') @@ -366,7 +366,7 @@ def test_handle_new_brew_build(self, pdc, tags, tag2release, buildroot): 'release-components', 'global-components', ]) - self.assertEqual(sorted(pdc.calls.keys()), expected_keys) + self.assertEqual(sorted(pdc.calls), expected_keys) self.assertEqual(len(pdc.calls['global-components']), 1) self.assertEqual(len(pdc.calls['release-components']), 1) diff --git a/pdcupdater/tests/handler_tests/test_modules.py b/pdcupdater/tests/handler_tests/test_modules.py index 2bb5be8..19655ce 100644 --- a/pdcupdater/tests/handler_tests/test_modules.py +++ b/pdcupdater/tests/handler_tests/test_modules.py @@ -199,7 +199,7 @@ def test_update_unreleased_variant_ready( endpoint = 'unreleasedvariants/testmodule:master:20180123171544' self.assertEqual(pdc.calls[endpoint][0][0], 'PATCH') self.assertEqual( - set(pdc.calls[endpoint][0][1].keys()), set(['active', 'rpms'])) + set(pdc.calls[endpoint][0][1]), {'active', 'rpms'}) @mock.patch(HANDLER_PATH + '.get_pdc_api') @mock.patch(HANDLER_PATH + '.get_module_rpms') @@ -217,7 +217,7 @@ def test_update_unreleased_variant_build( # Make sure the PATCH was sent on the module endpoint = 'unreleasedvariants/testmodule:master:20180123171544' self.assertEqual(pdc.calls[endpoint][0][0], 'PATCH') - self.assertEqual(list(pdc.calls[endpoint][0][1].keys()), ['koji_tag']) + self.assertEqual(set(pdc.calls[endpoint][0][1]), {'koji_tag'}) @mock.patch('pdcupdater.services.koji_rpms_in_tag') @mock.patch(HANDLER_PATH + '._get_modulemd_by_mbs_id') @@ -300,7 +300,7 @@ def test_update_module_ready(self, pdc, mbs, get_rpms): endpoint = 'modules/testmodule:master:20180123171544:c2c572ec' self.assertEqual(pdc.calls[endpoint][0][0], 'PATCH') self.assertEqual( - set(pdc.calls[endpoint][0][1].keys()), set(['active', 'rpms'])) + set(pdc.calls[endpoint][0][1]), {'active', 'rpms'}) @mock.patch(HANDLER_PATH + '.get_module_rpms') @mock.patch(HANDLER_PATH + '._get_modulemd_by_mbs_id') @@ -317,4 +317,4 @@ def test_update_module_build(self, pdc, mbs, get_rpms): # Make sure the PATCH was sent on the module endpoint = 'modules/testmodule:master:20180123171544:c2c572ec' self.assertEqual(pdc.calls[endpoint][0][0], 'PATCH') - self.assertEqual(list(pdc.calls[endpoint][0][1].keys()), ['koji_tag']) + self.assertEqual(set(pdc.calls[endpoint][0][1]), {'koji_tag'}) diff --git a/pdcupdater/tests/handler_tests/test_persons.py b/pdcupdater/tests/handler_tests/test_persons.py index 7c9a7ec..62d1ff9 100644 --- a/pdcupdater/tests/handler_tests/test_persons.py +++ b/pdcupdater/tests/handler_tests/test_persons.py @@ -29,10 +29,10 @@ def test_handle_new_package(self, pdc): self.handler.handle(pdc, msg) self.assertDictEqual(pdc.calls, { 'persons': [ - ('POST', dict( - username='alvicler', - email='alvicler@fedoraproject.org', - )), + ('POST', { + 'username': 'alvicler', + 'email': 'alvicler@fedoraproject.org', + }), ], }) @@ -51,14 +51,14 @@ def test_initialize_from_fas(self, pdc, fas): # Check the PDC calls.. self.assertDictEqual(pdc.calls, { 'persons': [ - ('POST', dict( - username='ralph', - email='ralph@fedoraproject.org', - )), - ('POST', dict( - username='lmacken', - email='lmacken@fedoraproject.org', - )), + ('POST', { + 'username': 'ralph', + 'email': 'ralph@fedoraproject.org', + }), + ('POST', { + 'username': 'lmacken', + 'email': 'lmacken@fedoraproject.org', + }), ], }) @@ -104,7 +104,7 @@ def test_audit_with_an_extra(self, pdc, fas): }) # Check the results. - self.assertSetEqual(present, set(['lmacken'])) + self.assertSetEqual(present, {'lmacken'}) self.assertSetEqual(absent, set()) @mock_pdc @@ -129,7 +129,7 @@ def test_audit_missing_one(self, pdc, fas): # Check the results. self.assertSetEqual(present, set()) - self.assertSetEqual(absent, set(['toshio'])) + self.assertSetEqual(absent, {'toshio'}) @mock_pdc @mock.patch('pdcupdater.services.fas_persons') @@ -150,5 +150,5 @@ def test_audit_flipping_out(self, pdc, fas): }) # Check the results. - self.assertSetEqual(present, set(['lmacken', 'ralph'])) - self.assertSetEqual(absent, set(['toshio'])) + self.assertSetEqual(present, {'lmacken', 'ralph'}) + self.assertSetEqual(absent, {'toshio'}) diff --git a/pdcupdater/tests/handler_tests/test_pkgdb.py b/pdcupdater/tests/handler_tests/test_pkgdb.py index 5aa1ed2..b14e5da 100644 --- a/pdcupdater/tests/handler_tests/test_pkgdb.py +++ b/pdcupdater/tests/handler_tests/test_pkgdb.py @@ -147,7 +147,7 @@ { 'active': True, 'brew_package': 'guake', - #'bugzilla_component': u'guake', + #'bugzilla_component': 'guake', 'dist_git_branch': 'master', 'global_component': 'guake', 'name': 'guake', @@ -157,7 +157,7 @@ { 'active': True, 'brew_package': 'guake', - #'bugzilla_component': u'guake', + #'bugzilla_component': 'guake', 'dist_git_branch': 'el6', 'global_component': 'guake', 'name': 'guake', @@ -167,7 +167,7 @@ { 'active': True, 'brew_package': 'guake', - #'bugzilla_component': u'guake', + #'bugzilla_component': 'guake', 'dist_git_branch': 'f20', 'global_component': 'guake', 'name': 'guake', @@ -177,7 +177,7 @@ { 'active': True, 'brew_package': 'guake', - #'bugzilla_component': u'guake', + #'bugzilla_component': 'guake', 'dist_git_branch': 'epel7', 'global_component': 'guake', 'name': 'guake', @@ -187,7 +187,7 @@ { 'active': True, 'brew_package': 'guake', - #'bugzilla_component': u'guake', + #'bugzilla_component': 'guake', 'dist_git_branch': 'f21', 'global_component': 'guake', 'name': 'guake', @@ -197,7 +197,7 @@ { 'active': True, 'brew_package': 'guake', - #'bugzilla_component': u'guake', + #'bugzilla_component': 'guake', 'dist_git_branch': 'f22', 'global_component': 'guake', 'name': 'guake', @@ -207,7 +207,7 @@ { 'active': True, 'brew_package': 'guake', - #'bugzilla_component': u'guake', + #'bugzilla_component': 'guake', 'dist_git_branch': 'f23', 'global_component': 'guake', 'name': 'guake', @@ -217,7 +217,7 @@ { 'active': True, 'brew_package': 'geany', - #'bugzilla_component': u'geany', + #'bugzilla_component': 'geany', 'dist_git_branch': 'master', 'global_component': 'geany', 'name': 'geany', @@ -227,7 +227,7 @@ { 'active': True, 'brew_package': 'geany', - #'bugzilla_component': u'geany', + #'bugzilla_component': 'geany', 'dist_git_branch': 'el6', 'global_component': 'geany', 'name': 'geany', @@ -237,7 +237,7 @@ { 'active': True, 'brew_package': 'geany', - #'bugzilla_component': u'geany', + #'bugzilla_component': 'geany', 'dist_git_branch': 'epel7', 'global_component': 'geany', 'name': 'geany', @@ -247,7 +247,7 @@ { 'active': True, 'brew_package': 'geany', - #'bugzilla_component': u'geany', + #'bugzilla_component': 'geany', 'dist_git_branch': 'f23', 'global_component': 'geany', 'name': 'geany', @@ -293,22 +293,22 @@ def test_handle_new_package(self, pdc): self.handler.handle(pdc, msg) self.assertDictEqual(pdc.calls, { 'releases/fedora-23-updates': [ - ('GET', dict()), + ('GET', {}), ], 'global-components': [ - ('GET', dict(name='perl-Lingua-Translit')), + ('GET', {'name': 'perl-Lingua-Translit'}), ], 'release-components': [ - ('POST', dict( - name='perl-Lingua-Translit', - global_component='perl-Lingua-Translit', - #bugzilla_component=u'perl-Lingua-Translit', - brew_package='perl-Lingua-Translit', - release='fedora-23-updates', - dist_git_branch='f23', - type='rpm', - active=True, - )), + ('POST', { + 'name': 'perl-Lingua-Translit', + 'global_component': 'perl-Lingua-Translit', + # 'bugzilla_component': 'perl-Lingua-Translit', + 'brew_package': 'perl-Lingua-Translit', + 'release': 'fedora-23-updates', + 'dist_git_branch': 'f23', + 'type': 'rpm', + 'active': True, + }), ], }) @@ -350,7 +350,7 @@ def test_audit_with_an_extra(self, pdc, pkgdb): }) # Check the results. - self.assertSetEqual(present, set(['guake'])) + self.assertSetEqual(present, {'guake'}) self.assertSetEqual(absent, set()) @mock_pdc @@ -379,7 +379,7 @@ def test_audit_missing_one(self, pdc, pkgdb): # Check the results. self.assertSetEqual(present, set()) - self.assertSetEqual(absent, set(['gnome-terminal'])) + self.assertSetEqual(absent, {'gnome-terminal'}) @mock_pdc @mock.patch('pdcupdater.services.pkgdb_packages') @@ -407,8 +407,8 @@ def test_audit_flipping_out(self, pdc, pkgdb): }) # Check the results. - self.assertSetEqual(present, set(['guake'])) - self.assertSetEqual(absent, set([('gnome-terminal')])) + self.assertSetEqual(present, {'guake'}) + self.assertSetEqual(absent, {'gnome-terminal'}) @mock_pdc @mock.patch('pdcupdater.services.pkgdb_packages') @@ -422,12 +422,8 @@ def test_initialize_new_package(self, pdc, pkgdb): # Check the PDC calls.. self.assertDictEqual(pdc.calls, { 'global-components': [ - ('POST', dict( - name='guake', - )), - ('POST', dict( - name='geany', - )), + ('POST', {'name': 'guake'}), + ('POST', {'name': 'geany'}), ], }) @@ -455,16 +451,16 @@ def test_handle_new_package_branch(self, pdc): self.handler.handle(pdc, msg) self.assertDictEqual(pdc.calls, { 'release-components': [ - ('POST', dict( - name='perl-Lingua-Translit', - global_component='perl-Lingua-Translit', - #bugzilla_component=u'perl-Lingua-Translit', - brew_package='perl-Lingua-Translit', - release='fedora-24', - dist_git_branch='master', - type='rpm', - active=True, - )), + ('POST', { + 'name': 'perl-Lingua-Translit', + 'global_component': 'perl-Lingua-Translit', + # 'bugzilla_component': 'perl-Lingua-Translit', + 'brew_package': 'perl-Lingua-Translit', + 'release': 'fedora-24', + 'dist_git_branch': 'master', + 'type': 'rpm', + 'active': True, + }), ], 'releases/fedora-24': [('GET', {})], 'global-components': [('GET', {'name': 'perl-Lingua-Translit'}) ], @@ -508,7 +504,7 @@ def test_audit_with_an_extra(self, pdc, pkgdb): }) # Check the results. - self.assertSetEqual(present, set([('guake', 'fedora-24', 'master')])) + self.assertSetEqual(present, {('guake', 'fedora-24', 'master')}) self.assertSetEqual(absent, set()) @mock_pdc @@ -539,7 +535,7 @@ def test_audit_missing_one(self, pdc, pkgdb): # Check the results. self.assertSetEqual(present, set()) - self.assertSetEqual(absent, set([('guake', 'fedora-18-updates', 'f18')])) + self.assertSetEqual(absent, {('guake', 'fedora-18-updates', 'f18')}) @mock_pdc @mock.patch('pdcupdater.services.pkgdb_packages') @@ -569,8 +565,8 @@ def test_audit_flipping_out(self, pdc, pkgdb): }) # Check the results. - self.assertSetEqual(present, set([('guake', 'fedora-24', 'master')])) - self.assertSetEqual(absent, set([('guake', 'fedora-18-updates', 'f18')])) + self.assertSetEqual(present, {('guake', 'fedora-24', 'master')}) + self.assertSetEqual(absent, {('guake', 'fedora-18-updates', 'f18')}) @mock_pdc @mock.patch('pdcupdater.services.pkgdb_packages') diff --git a/pdcupdater/tests/handler_tests/test_retirement.py b/pdcupdater/tests/handler_tests/test_retirement.py index 110e9b1..f6f235a 100644 --- a/pdcupdater/tests/handler_tests/test_retirement.py +++ b/pdcupdater/tests/handler_tests/test_retirement.py @@ -79,12 +79,12 @@ def test_can_process_retire_msg(self, pdc): idx = '2017-b1adac6d-64e9-406f-a1f4-4d3e57105649' msg = pdcupdater.utils.get_fedmsg(idx) self.handler.handle(pdc, msg) - expected_keys = [ + expected_keys = { 'component-branches', 'component-branch-slas/178020', 'component-branch-slas/178028' - ] - self.assertEqual(list(pdc.calls.keys()), expected_keys) + } + self.assertEqual(set(pdc.calls), expected_keys) @mock_pdc def test_can_process_retire_msg_already_retired(self, pdc): @@ -114,10 +114,10 @@ def test_can_process_retire_msg_already_retired(self, pdc): idx = '2017-3f490f4d-7612-4881-80cb-e1a941d6d700' msg = pdcupdater.utils.get_fedmsg(idx) self.handler.handle(pdc, msg) - expected_keys = [ + expected_keys = { 'component-branches' - ] - self.assertEqual(list(pdc.calls.keys()), expected_keys) + } + self.assertEqual(set(pdc.calls), expected_keys) @mock_pdc def test_audit(self, pdc): diff --git a/pdcupdater/tests/handler_tests/test_rpms.py b/pdcupdater/tests/handler_tests/test_rpms.py index 2da83f1..3172d99 100644 --- a/pdcupdater/tests/handler_tests/test_rpms.py +++ b/pdcupdater/tests/handler_tests/test_rpms.py @@ -342,7 +342,7 @@ def test_audit_missing_one(self, pdc, koji): # Check the results. # We removed a build from koji, so it is erroneously "present" in PDC - self.assertSetEqual(present, set([json.dumps({ + self.assertSetEqual(present, {json.dumps({ "arch": "noarch", "epoch": 0, "linked_releases": [ @@ -353,7 +353,7 @@ def test_audit_missing_one(self, pdc, koji): "release": "1.el7", "srpm_name": "rubygem-jmespath", "srpm_nevra": "rubygem-jmespath-1.1.3-1.el7", - }, sort_keys=True)])) + }, sort_keys=True)}) self.assertSetEqual(absent, set()) @mock_pdc @@ -375,7 +375,7 @@ def test_audit_adding_one(self, pdc, koji): # Check the results. self.assertSetEqual(present, set()) # We added an extra koji build, so it is "absent" from PDC. - self.assertSetEqual(absent, set([json.dumps({ + self.assertSetEqual(absent, {json.dumps({ "arch": "noarch", "epoch": 0, "linked_releases": [ @@ -386,4 +386,4 @@ def test_audit_adding_one(self, pdc, koji): "release": "1.fc24", "srpm_name": "rubygem-jmespath", "srpm_nevra": "rubygem-jmespath-1.1.3-1.fc24", - }, sort_keys=True)])) + }, sort_keys=True)}) diff --git a/pdcupdater/utils.py b/pdcupdater/utils.py index 1c7737d..9e12b70 100644 --- a/pdcupdater/utils.py +++ b/pdcupdater/utils.py @@ -41,7 +41,7 @@ def get_group_pk(pdc, target_group): return group['id'] # If we can't find it, then complain. - raise ValueError("Could not find matching group for %r" % target_group) + raise ValueError(f"Could not find matching group for {target_group!r}") def ensure_component_group_exists(pdc, component_group): @@ -75,7 +75,7 @@ def ensure_component_group_type_exists(pdc, component_group_type): """ Create a component_group-type in PDC if it doesn't already exist. """ try: # Try to create it - pdc['component-group-types']._(dict(name=component_group_type)) + pdc['component-group-types']._({'name': component_group_type}) except beanbag.bbexcept.BeanBagException as e: if e.response.status_code != 400: raise @@ -93,13 +93,11 @@ def ensure_release_exists(pdc, release_id, release): except beanbag.bbexcept.BeanBagException as e: if e.response.status_code != 404: raise - log.warn("No release %r exists. Creating." % release_id) + log.warn("No release %r exists. Creating.", release_id) release_payload = copy.copy(release) - release_payload.update(dict( - active=True, - )) - log.info("Creating release %r" % release_payload) + release_payload.update({'active': True}) + log.info("Creating release %r", release_payload) pdc['releases']._(release_payload) @@ -107,8 +105,8 @@ def ensure_release_exists(pdc, release_id, release): def ensure_global_component_exists(pdc, name): response = pdc['global-components']._(name=name) if not response['results']: - log.warn("No global-component %r exists. Creating." % name) - pdc['global-components']._(dict(name=name)) + log.warn("No global-component %r exists. Creating.", name) + pdc['global-components']._({'name': name}) def ensure_release_component_exists(pdc, release_id, name, type='rpm'): @@ -151,14 +149,17 @@ def ensure_release_component_exists(pdc, release_id, name, type='rpm'): # But if it was just that the component already existed, then go back and # query for what we tried to submit (return the primary key) - query = dict(name=name, release=release_id, type=type) + query = {'name': name, 'release': release_id, 'type': type} response = pdc['release-components']._(**query) if not response['count']: - raise IndexError("No results found for %r after submitting %r" % ( - query, data)) + raise IndexError( + f"No results found for {query!r} after submitting {data!r}" + ) if response['count'] > 1: - raise IndexError("%i results found for %r after submitting %r" % ( - response['count'], query, data)) + raise IndexError( + f"{response['count']} results found for {query!r} after submitting" + f" {data!r}" + ) return response['results'][0] @@ -170,8 +171,8 @@ def ensure_release_component_relationship_exists(pdc, parent, child, type): try: # Try to create it data = { - 'from_component': dict(id=parent['id']), - 'to_component': dict(id=child['id']), + 'from_component': {'id': parent['id']}, + 'to_component': {'id': child['id']}, # This may not exist, and we have no API to create it. It must be # entered by an admin in the admin panel beforehand. 'type': type, @@ -198,21 +199,21 @@ def delete_bulk_release_component_relationships(pdc, parent, relationships): # Split things up by relationship type into a lookup keyed by type relationships = list(relationships) - relationship_types = set([relation for relation, child in relationships]) - relationship_lookup = dict([ - (key, [child for relation, child in relationships if relation == key]) + relationship_types = {relation for relation, child in relationships} + relationship_lookup = { + key: [child for relation, child in relationships if relation == key] for key in relationship_types - ]) + } endpoint = pdc['release-component-relationships']._ - for relationship_type, children in list(relationship_lookup.items()): + for relationship_type, children in relationship_lookup.items(): # Check to see if all the relations are all already there, first. - query_kwargs = dict( - from_component_name=parent['name'], - from_component_release=release, - type=relationship_type, - ) + query_kwargs = { + 'from_component_name': parent['name'], + 'from_component_release': release, + 'type': relationship_type, + } response = _chunked_query( pdc, endpoint, query_kwargs, key='to_component_name', @@ -224,15 +225,16 @@ def delete_bulk_release_component_relationships(pdc, parent, relationships): # Nobody can ask us to delete things that aren't there. # That's unreasonable. Sanity check. - message = "%r != %r" % (len(response), len(children)) - assert len(response) == len(children), message + assert len(response) == len(children), ( + f"{len(response)} != {len(children)}" + ) # Find the primary keys for all of these... query = pdc.get_paged(endpoint, **query_kwargs) identifiers = [relation['id'] for relation in query] # Issue the DELETE request for those found primary keys. - log.info("Pruning %i old relationships." % len(identifiers)) + log.info("Pruning %i old relationships.", len(identifiers)) endpoint("DELETE", identifiers) @@ -282,30 +284,30 @@ def ensure_bulk_release_component_relationships_exists(pdc, parent, # Split things up by relationship type into a lookup keyed by type relationships = list(relationships) - relationship_types = set([relation for relation, child in relationships]) - relationship_lookup = dict([ - (key, set([child for relation, child in relationships if relation == key])) + relationship_types = {relation for relation, child in relationships} + relationship_lookup = { + key: {child for relation, child in relationships if relation == key} for key in relationship_types - ]) + } - for relationship_type, children in list(relationship_lookup.items()): + for relationship_type, children in relationship_lookup.items(): # Check to see if all the relations are all already there, first. endpoint = pdc['release-component-relationships']._ - query_kwargs = dict( - from_component_name=parent['name'], - from_component_release=release, - type=relationship_type, - ) + query_kwargs = { + 'from_component_name': parent['name'], + 'from_component_release': release, + 'type': relationship_type, + } count = _chunked_query( pdc, endpoint, query_kwargs, key='to_component_name', iterable=children, count=True) log.info("Of %i needed %s relationships for %s in koji, found %i in PDC." - " (%i are missing)" % ( - len(children), relationship_type, - parent['name'], count, - len(children) - count)) + " (%i are missing)", + len(children), relationship_type, + parent['name'], count, + len(children) - count) if count != len(children): # If they weren't all there already, figure out which ones are missing. @@ -322,12 +324,16 @@ def ensure_bulk_release_component_relationships_exists(pdc, parent, pdc, release, absent_names, component_type=component_type)) #if len(absent) != len(absent_names): - # raise ValueError("Error1 creating components: %i != %i" % ( - # len(absent), len(absent_names))) + # raise ValueError( + # f"Error1 creating components: {len(absent)} !=" + # f" {len(absent_names)}" + # ) #if len(absent) != len(children) - count: - # raise ValueError("Error2 creating components: %i != %i" % ( - # len(absent), len(children) - count)) + # raise ValueError( + # f"Error2 creating components: {len(absent)} !=" + # f" {len(children) - count}" + # ) # Make sure this guy exists and has a primary key id. if 'id' not in parent: @@ -335,11 +341,11 @@ def ensure_bulk_release_component_relationships_exists(pdc, parent, pdc, release, parent['name'], component_type) # Now issue a bulk create the missing ones. - pdc['release-component-relationships']._([dict( - from_component=dict(id=parent['id']), - to_component=dict(id=child['id']), - type=relationship_type, - ) for child in absent]) + pdc['release-component-relationships']._([{ + 'from_component': {'id': parent['id']}, + 'to_component': {'id': child['id']}, + 'type': relationship_type, + } for child in absent]) def ensure_bulk_release_components_exist(pdc, release, components, @@ -347,7 +353,7 @@ def ensure_bulk_release_components_exist(pdc, release, components, ensure_bulk_global_components_exist(pdc, components) - query_kwargs = dict(release=release, type=component_type) + query_kwargs = {'release': release, 'type': component_type} endpoint = pdc['release-components']._ count = _chunked_query( pdc, endpoint, query_kwargs, @@ -364,18 +370,20 @@ def ensure_bulk_release_components_exist(pdc, release, components, ## Validate that. #if len(absent) != len(components) - count: - # raise ValueError("Error creating components: %i != (%i - %i)" % ( - # len(absent), len(components), count)) + # raise ValueError( + # f"Error creating components: {len(absent)} !=" + # f" ({len(components)} - {count})" + # ) # Now issue a bulk create the missing ones. - log.info("Of %i needed, %i release-components missing." % ( - len(components), len(absent))) - pdc['release-components']._([dict( - name=name, - global_component=name, - release=release, - type=component_type - ) for name in absent]) + log.info("Of %i needed, %i release-components missing.", + len(components), len(absent)) + pdc['release-components']._([{ + 'name': name, + 'global_component': name, + 'release': release, + 'type': component_type + } for name in absent]) # Finally, return all of the present components (with all of their primary # key IDs which were assigned server side. that's why we have to query a @@ -406,9 +414,9 @@ def ensure_bulk_global_components_exist(pdc, components): absent = [name for name in components if name not in present] # Now issue a bulk create the missing ones. - log.info("Of %i needed, %i global-components missing." % ( - len(components), len(absent))) - pdc['global-components']._([dict(name=name) for name in absent]) + log.info("Of %i needed, %i global-components missing.", + len(components), len(absent)) + pdc['global-components']._([{'name': name} for name in absent]) def delete_release_component_relationship(pdc, parent, child, type): @@ -424,9 +432,10 @@ def delete_release_component_relationship(pdc, parent, child, type): to_component_release=child['release'], )) if len(entries) != 1: - raise ValueError("No unique relationship found for " - "%r -> %r -> %r. Found %i." % ( - parent, type, child, len(entries))) + raise ValueError( + f"No unique relationship found for {parent!r} -> {type!r} ->" + f" {child!r}. Found {len(entries)}." + ) # But also, we needed the primary key in order to delete it. primary_key = entries[0]['id'] @@ -448,9 +457,9 @@ def compose_exists(pdc, compose_id): def get_fedmsg(idx): url = 'https://apps.fedoraproject.org/datagrepper/id' - response = session.get(url, params=dict(id=idx)) + response = session.get(url, params={'id': idx}) if not bool(response): - raise IOError("Failed to talk to %r %r" % (response.url, response)) + raise IOError(f"Failed to talk to {response.url!r} {response!r}") return response.json() @@ -469,9 +478,9 @@ def handle_message(pdc, handlers, msg, verbose=False): for handler in handlers: name = type(handler).__name__ if not handler.can_handle(pdc, msg): - debug("%s could not handle %s" % (name, idx)) + debug("%s could not handle %s", name, idx) continue - log.info("%s handling %s %s" % (name, idx, topic)) + log.info("%s handling %s %s", name, idx, topic) with annotated(pdc, msg['msg_id']) as client: try: handler.handle(client, msg) @@ -484,9 +493,9 @@ def handle_message(pdc, handlers, msg, verbose=False): def bodhi_releases(): # TODO -- get these releases from PDC, instead of from Bodhi url = 'https://bodhi.fedoraproject.org/releases' - response = session.get(url, params=dict(rows_per_page=100)) + response = session.get(url, params={'rows_per_page': 100}) if not bool(response): - raise IOError('Failed to talk to %r: %r' % (url, response)) + raise IOError(f"'Failed to talk to {url!r} {response!r}") return response.json()['releases'] @@ -494,9 +503,9 @@ def bodhi_releases(): def rawhide_tag(): # TODO - get this tag from PDC, instead of guessing from pkgdb url = 'https://admin.fedoraproject.org/pkgdb/api/collections/' - response = session.get(url, params=dict(clt_status="Under Development")) + response = session.get(url, params={'clt_status': "Under Development"}) if not bool(response): - raise IOError('Failed to talk to %r: %r' % (url, response)) + raise IOError('Failed to talk to %r: %r', url, response) collections = response.json()['collections'] rawhide = [c for c in collections if c['koji_name'] == 'rawhide'][0] return 'f' + rawhide['dist_tag'].strip('.fc') @@ -528,8 +537,8 @@ def interesting_container_tags(): tags = [tag for tag in tags if '-' not in tag] - return ['%s-docker' % tag for tag in tags] + \ - ['%s-container' % tag for tag in tags] + return ([f'{tag}-docker' for tag in tags] + + [f'{tag}-container' for tag in tags]) @cache.cache_on_arguments() @@ -574,7 +583,7 @@ def subpackage2parent(package, pdc_release): url = url.format(repo=repo, package=package) response = session.get(url) if not bool(response): - log.debug("Could not talk to mdapi %r %r" % (response.url, response)) + log.debug("Could not talk to mdapi %r %r", response.url, response) return package data = response.json() return data['basename'] @@ -604,11 +613,11 @@ def _tag2release_with_pdc(pdc, tag): )) if not releases: - raise ValueError("Could not find matching release for tag %r" % tag) + raise ValueError("Could not find matching release for tag %r", tag) if len(releases) != 1: - log.error("%i different releases match tag %r, %r" % ( - len(releases), tag, releases)) + log.error("%i different releases match tag %r, %r", len(releases), tag, + releases) release = releases[0] return release['release_id'], release @@ -692,8 +701,8 @@ def inner(*args, **kwargs): try: return function(*args, **kwargs) except wait_on as e: - log.warn("Exception %r raised from %r. Retry in %rs" % ( - e, function, interval)) + log.warn("Exception %r raised from %r. Retry in %rs", e, + function, interval) time.sleep(interval) return inner return wrapper