diff --git a/conf/config.py b/conf/config.py index 6980c66c..eec13a8b 100644 --- a/conf/config.py +++ b/conf/config.py @@ -4,37 +4,35 @@ from os import path # declared properly somewhere/somehow confdir = path.abspath(path.dirname(__file__)) # use parent dir as dbdir else fallback to current dir -dbdir = path.abspath(path.join(confdir, '..')) if confdir.endswith('conf') \ - else confdir +dbdir = path.abspath(path.join(confdir, "..")) if confdir.endswith("conf") else confdir class BaseConfiguration(object): DEBUG = False # Make this random (used to generate session keys) - SECRET_KEY = '74d9e9f9cd40e66fc6c4c2e9987dce48df3ce98542529fd0' - SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format(path.join( - dbdir, 'module_build_service.db')) + SECRET_KEY = "74d9e9f9cd40e66fc6c4c2e9987dce48df3ce98542529fd0" + SQLALCHEMY_DATABASE_URI = "sqlite:///{0}".format(path.join(dbdir, "module_build_service.db")) SQLALCHEMY_TRACK_MODIFICATIONS = True # Where we should run when running "manage.py run" directly. - HOST = '0.0.0.0' + HOST = "0.0.0.0" PORT = 5000 # Global network-related values, in seconds NET_TIMEOUT = 120 NET_RETRY_INTERVAL = 30 - SYSTEM = 'koji' - MESSAGING = 'fedmsg' # or amq - MESSAGING_TOPIC_PREFIX = ['org.fedoraproject.prod'] - KOJI_CONFIG = '/etc/module-build-service/koji.conf' - KOJI_PROFILE = 'koji' - ARCHES = ['i686', 'armv7hl', 'x86_64'] + SYSTEM = "koji" + MESSAGING = "fedmsg" # or amq + MESSAGING_TOPIC_PREFIX = ["org.fedoraproject.prod"] + KOJI_CONFIG = "/etc/module-build-service/koji.conf" + KOJI_PROFILE = "koji" + ARCHES = ["i686", "armv7hl", "x86_64"] ALLOW_ARCH_OVERRIDE = False - KOJI_REPOSITORY_URL = 'https://kojipkgs.fedoraproject.org/repos' - KOJI_TAG_PREFIXES = ['module', 'scrmod'] + KOJI_REPOSITORY_URL = "https://kojipkgs.fedoraproject.org/repos" + KOJI_TAG_PREFIXES = ["module", "scrmod"] KOJI_ENABLE_CONTENT_GENERATOR = True CHECK_FOR_EOL = False - PDC_URL = 'https://pdc.fedoraproject.org/rest_api/v1' + PDC_URL = "https://pdc.fedoraproject.org/rest_api/v1" PDC_INSECURE = False PDC_DEVELOP = True SCMURLS = ["https://src.fedoraproject.org/modules/"] @@ -50,30 +48,27 @@ class BaseConfiguration(object): ALLOW_CUSTOM_SCMURLS = False - RPMS_DEFAULT_REPOSITORY = 'https://src.fedoraproject.org/rpms/' + RPMS_DEFAULT_REPOSITORY = "https://src.fedoraproject.org/rpms/" RPMS_ALLOW_REPOSITORY = False - RPMS_DEFAULT_CACHE = 'http://pkgs.fedoraproject.org/repo/pkgs/' + RPMS_DEFAULT_CACHE = "http://pkgs.fedoraproject.org/repo/pkgs/" RPMS_ALLOW_CACHE = False - MODULES_DEFAULT_REPOSITORY = 'https://src.fedoraproject.org/modules/' + MODULES_DEFAULT_REPOSITORY = "https://src.fedoraproject.org/modules/" MODULES_ALLOW_REPOSITORY = False MODULES_ALLOW_SCRATCH = False - ALLOWED_GROUPS = set([ - 'packager', - # 'modularity-wg', - ]) + ALLOWED_GROUPS = set(["packager"]) ALLOWED_GROUPS_TO_IMPORT_MODULE = set() # Available backends are: console and file - LOG_BACKEND = 'console' + LOG_BACKEND = "console" # Path to log file when LOG_BACKEND is set to "file". - LOG_FILE = 'module_build_service.log' + LOG_FILE = "module_build_service.log" # Available log levels are: debug, info, warn, error. - LOG_LEVEL = 'info' + LOG_LEVEL = "info" # Settings for Kerberos KRB_KEYTAB = None @@ -81,31 +76,32 @@ class BaseConfiguration(object): # AMQ prefixed variables are required only while using 'amq' as messaging backend # Addresses to listen to - AMQ_RECV_ADDRESSES = ['amqps://messaging.mydomain.com/Consumer.m8y.VirtualTopic.eng.koji', - ('amqps://messaging.mydomain.com/Consumer.m8y.VirtualTopic.eng.' - 'module_build_service')] + AMQ_RECV_ADDRESSES = [ + "amqps://messaging.mydomain.com/Consumer.m8y.VirtualTopic.eng.koji", + "amqps://messaging.mydomain.com/Consumer.m8y.VirtualTopic.eng.module_build_service", + ] # Address for sending messages - AMQ_DEST_ADDRESS = ('amqps://messaging.mydomain.com/Consumer.m8y.' - 'VirtualTopic.eng.module_build_service') - AMQ_CERT_FILE = '/etc/module_build_service/msg-m8y-client.crt' - AMQ_PRIVATE_KEY_FILE = '/etc/module_build_service/msg-m8y-client.key' - AMQ_TRUSTED_CERT_FILE = '/etc/module_build_service/Root-CA.crt' + AMQ_DEST_ADDRESS = \ + "amqps://messaging.mydomain.com/Consumer.m8y.VirtualTopic.eng.module_build_service" + AMQ_CERT_FILE = "/etc/module_build_service/msg-m8y-client.crt" + AMQ_PRIVATE_KEY_FILE = "/etc/module_build_service/msg-m8y-client.key" + AMQ_TRUSTED_CERT_FILE = "/etc/module_build_service/Root-CA.crt" # Disable Client Authorization NO_AUTH = False - CACHE_DIR = '~/modulebuild/cache' + CACHE_DIR = "~/modulebuild/cache" class TestConfiguration(BaseConfiguration): - BUILD_LOGS_DIR = '/tmp' - BUILD_LOGS_NAME_FORMAT = 'build-{id}.log' - LOG_BACKEND = 'console' - LOG_LEVEL = 'debug' - SQLALCHEMY_DATABASE_URI = 'sqlite://' + BUILD_LOGS_DIR = "/tmp" + BUILD_LOGS_NAME_FORMAT = "build-{id}.log" + LOG_BACKEND = "console" + LOG_LEVEL = "debug" + SQLALCHEMY_DATABASE_URI = "sqlite://" DEBUG = True - MESSAGING = 'in_memory' - PDC_URL = 'https://pdc.fedoraproject.org/rest_api/v1' + MESSAGING = "in_memory" + PDC_URL = "https://pdc.fedoraproject.org/rest_api/v1" # Global network-related values, in seconds NET_TIMEOUT = 3 @@ -114,19 +110,19 @@ class TestConfiguration(BaseConfiguration): SCM_NET_TIMEOUT = 0.1 SCM_NET_RETRY_INTERVAL = 0.1 - KOJI_CONFIG = './conf/koji.conf' - KOJI_PROFILE = 'staging' - SERVER_NAME = 'localhost' + KOJI_CONFIG = "./conf/koji.conf" + KOJI_PROFILE = "staging" + SERVER_NAME = "localhost" - KOJI_REPOSITORY_URL = 'https://kojipkgs.stg.fedoraproject.org/repos' + KOJI_REPOSITORY_URL = "https://kojipkgs.stg.fedoraproject.org/repos" SCMURLS = ["https://src.stg.fedoraproject.org/modules/"] - AUTH_METHOD = 'oidc' - RESOLVER = 'db' + AUTH_METHOD = "oidc" + RESOLVER = "db" - ALLOWED_GROUPS_TO_IMPORT_MODULE = set(['mbs-import-module']) - GREENWAVE_DECISION_CONTEXT = 'osci_compose_gate_modules' + ALLOWED_GROUPS_TO_IMPORT_MODULE = set(["mbs-import-module"]) + GREENWAVE_DECISION_CONTEXT = "osci_compose_gate_modules" - STREAM_SUFFIXES = {r'^el\d+\.\d+\.\d+\.z$': 0.1} + STREAM_SUFFIXES = {r"^el\d+\.\d+\.\d+\.z$": 0.1} class ProdConfiguration(BaseConfiguration): @@ -134,22 +130,22 @@ class ProdConfiguration(BaseConfiguration): class LocalBuildConfiguration(BaseConfiguration): - LOG_LEVEL = 'debug' - MESSAGING = 'in_memory' + LOG_LEVEL = "debug" + MESSAGING = "in_memory" ARCH_AUTODETECT = True - ARCH_FALLBACK = 'x86_64' + ARCH_FALLBACK = "x86_64" ALLOW_CUSTOM_SCMURLS = True - RESOLVER = 'mbs' + RESOLVER = "mbs" RPMS_ALLOW_REPOSITORY = True MODULES_ALLOW_REPOSITORY = True class OfflineLocalBuildConfiguration(LocalBuildConfiguration): - RESOLVER = 'local' + RESOLVER = "local" class DevConfiguration(LocalBuildConfiguration): DEBUG = True - LOG_BACKEND = 'console' + LOG_BACKEND = "console" diff --git a/fedmsg.d/mbs-logging.py b/fedmsg.d/mbs-logging.py index 6ee3fadf..e478b9ff 100644 --- a/fedmsg.d/mbs-logging.py +++ b/fedmsg.d/mbs-logging.py @@ -2,16 +2,8 @@ config = dict( logging=dict( loggers=dict( # Quiet this guy down... - requests={ - "level": "WARNING", - "propagate": True, - "handlers": ["console"], - }, - module_build_service={ - "level": "INFO", - "propagate": True, - "handlers": ["console"], - }, - ), - ), + requests={"level": "WARNING", "propagate": True, "handlers": ["console"]}, + module_build_service={"level": "INFO", "propagate": True, "handlers": ["console"]}, + ) + ) ) diff --git a/fedmsg.d/mbs-scheduler.py b/fedmsg.d/mbs-scheduler.py index 8a8422f6..bc42ec5c 100644 --- a/fedmsg.d/mbs-scheduler.py +++ b/fedmsg.d/mbs-scheduler.py @@ -1,4 +1 @@ -config = { - 'mbsconsumer': True, - 'mbspoller': True, -} +config = {"mbsconsumer": True, "mbspoller": True} diff --git a/fedmsg.d/module_build_service.py b/fedmsg.d/module_build_service.py index 74c2e1b0..e2574ed9 100644 --- a/fedmsg.d/module_build_service.py +++ b/fedmsg.d/module_build_service.py @@ -3,10 +3,8 @@ import os config = { # Just for dev. "validate_signatures": False, - # Talk to the relay, so things also make it to composer.stg in our dev env "active": True, - # Since we're in active mode, we don't need to declare any of our own # passive endpoints. This placeholder value needs to be here for the tests # to pass in Jenkins, though. \o/ @@ -14,10 +12,9 @@ config = { "fedora-infrastructure": [ # Just listen to staging for now, not to production (spam!) # "tcp://hub.fedoraproject.org:9940", - "tcp://stg.fedoraproject.org:9940", - ], + "tcp://stg.fedoraproject.org:9940" + ] }, - # Start of code signing configuration # 'sign_messages': True, # 'validate_signatures': True, @@ -37,12 +34,11 @@ config = { } # developer's instance -if 'MODULE_BUILD_SERVICE_DEVELOPER_ENV' in os.environ and \ - os.environ['MODULE_BUILD_SERVICE_DEVELOPER_ENV'].lower() in ( - '1', 'on', 'true', 'y', 'yes'): - config['endpoints']['relay_outbound'] = ["tcp://fedmsg-relay:2001"] - config['relay_inbound'] = ["tcp://fedmsg-relay:2003"] +true_options = ("1", "on", "true", "y", "yes") +if os.environ.get("MODULE_BUILD_SERVICE_DEVELOPER_ENV", "").lower() in true_options: + config["endpoints"]["relay_outbound"] = ["tcp://fedmsg-relay:2001"] + config["relay_inbound"] = ["tcp://fedmsg-relay:2003"] else: # These configuration values are reasonable for most other configurations. - config['endpoints']['relay_outbound'] = ["tcp://127.0.0.1:4001"] - config['relay_inbound'] = ["tcp://127.0.0.1:2003"] + config["endpoints"]["relay_outbound"] = ["tcp://127.0.0.1:4001"] + config["relay_inbound"] = ["tcp://127.0.0.1:2003"] diff --git a/module_build_service/__init__.py b/module_build_service/__init__.py index 7e7efd25..e1123188 100644 --- a/module_build_service/__init__.py +++ b/module_build_service/__init__.py @@ -46,11 +46,10 @@ from flask_sqlalchemy import SQLAlchemy from sqlalchemy.pool import StaticPool from logging import getLogger import gi # noqa -gi.require_version('Modulemd', '1.0') # noqa +gi.require_version("Modulemd", "1.0") # noqa from gi.repository import Modulemd # noqa -from module_build_service.logger import ( - init_logging, ModuleBuildLogs, level_flags, MBSLogger) +from module_build_service.logger import init_logging, ModuleBuildLogs, level_flags, MBSLogger from module_build_service.errors import ( ValidationError, Unauthorized, UnprocessableEntity, Conflict, NotFound, @@ -59,9 +58,9 @@ from module_build_service.config import init_config from module_build_service.proxy import ReverseProxy try: - version = pkg_resources.get_distribution('module-build-service').version + version = pkg_resources.get_distribution("module-build-service").version except pkg_resources.DistributionNotFound: - version = 'unknown' + version = "unknown" api_version = 2 app = Flask(__name__) @@ -77,12 +76,13 @@ class MBSSQLAlchemy(SQLAlchemy): This is used *only* during tests to make them faster. """ + def apply_driver_hacks(self, app, info, options): - if info.drivername == 'sqlite' and info.database in (None, '', ':memory:'): - options['poolclass'] = StaticPool - options['connect_args'] = {'check_same_thread': False} + if info.drivername == "sqlite" and info.database in (None, "", ":memory:"): + options["poolclass"] = StaticPool + options["connect_args"] = {"check_same_thread": False} try: - del options['pool_size'] + del options["pool_size"] except KeyError: pass @@ -107,59 +107,56 @@ def create_app(debug=False, verbose=False, quiet=False): def load_views(): from module_build_service import views + assert views @app.errorhandler(ValidationError) def validationerror_error(e): """Flask error handler for ValidationError exceptions""" - return json_error(400, 'Bad Request', str(e)) + return json_error(400, "Bad Request", str(e)) @app.errorhandler(Unauthorized) def unauthorized_error(e): """Flask error handler for NotAuthorized exceptions""" - return json_error(401, 'Unauthorized', str(e)) + return json_error(401, "Unauthorized", str(e)) @app.errorhandler(Forbidden) def forbidden_error(e): """Flask error handler for Forbidden exceptions""" - return json_error(403, 'Forbidden', str(e)) + return json_error(403, "Forbidden", str(e)) @app.errorhandler(RuntimeError) def runtimeerror_error(e): """Flask error handler for RuntimeError exceptions""" log.exception("RuntimeError exception raised") - return json_error(500, 'Internal Server Error', str(e)) + return json_error(500, "Internal Server Error", str(e)) @app.errorhandler(UnprocessableEntity) def unprocessableentity_error(e): """Flask error handler for UnprocessableEntity exceptions""" - return json_error(422, 'Unprocessable Entity', str(e)) + return json_error(422, "Unprocessable Entity", str(e)) @app.errorhandler(Conflict) def conflict_error(e): """Flask error handler for Conflict exceptions""" - return json_error(409, 'Conflict', str(e)) + return json_error(409, "Conflict", str(e)) @app.errorhandler(NotFound) def notfound_error(e): """Flask error handler for Conflict exceptions""" - return json_error(404, 'Not Found', str(e)) + return json_error(404, "Not Found", str(e)) init_logging(conf) log = MBSLogger() -build_logs = ModuleBuildLogs( - conf.build_logs_dir, - conf.build_logs_name_format, - conf.log_level, -) +build_logs = ModuleBuildLogs(conf.build_logs_dir, conf.build_logs_name_format, conf.log_level) def get_url_for(*args, **kwargs): @@ -171,11 +168,13 @@ def get_url_for(*args, **kwargs): # Localhost is right URL only when the scheduler runs on the same # system as the web views. - app.config['SERVER_NAME'] = 'localhost' + app.config["SERVER_NAME"] = "localhost" with app.app_context(): - log.debug("WARNING: get_url_for() has been called without the Flask " - "app_context. That can lead to SQLAlchemy errors caused by " - "multiple session being used in the same time.") + log.debug( + "WARNING: get_url_for() has been called without the Flask " + "app_context. That can lead to SQLAlchemy errors caused by " + "multiple session being used in the same time." + ) return url_for(*args, **kwargs) diff --git a/module_build_service/auth.py b/module_build_service/auth.py index ab88edb2..8d726aa5 100644 --- a/module_build_service/auth.py +++ b/module_build_service/auth.py @@ -31,6 +31,7 @@ import ssl import requests import kerberos from flask import Response, g + # Starting with Flask 0.9, the _app_ctx_stack is the correct one, # before that we need to use the _request_ctx_stack. try: @@ -50,12 +51,12 @@ except ImportError: client_secrets = None -region = make_region().configure('dogpile.cache.memory') +region = make_region().configure("dogpile.cache.memory") def _json_loads(content): if not isinstance(content, str): - content = content.decode('utf-8') + content = content.decode("utf-8") return json.loads(content) @@ -67,8 +68,7 @@ def _load_secrets(): if "OIDC_CLIENT_SECRETS" not in app.config: raise Forbidden("OIDC_CLIENT_SECRETS must be set in server config.") - secrets = _json_loads(open(app.config['OIDC_CLIENT_SECRETS'], - 'r').read()) + secrets = _json_loads(open(app.config["OIDC_CLIENT_SECRETS"], "r").read()) client_secrets = list(secrets.values())[0] @@ -79,13 +79,15 @@ def _get_token_info(token): if not client_secrets: return None - request = {'token': token, - 'token_type_hint': 'Bearer', - 'client_id': client_secrets['client_id'], - 'client_secret': client_secrets['client_secret']} - headers = {'Content-type': 'application/x-www-form-urlencoded'} + request = { + "token": token, + "token_type_hint": "Bearer", + "client_id": client_secrets["client_id"], + "client_secret": client_secrets["client_secret"], + } + headers = {"Content-type": "application/x-www-form-urlencoded"} - resp = requests.post(client_secrets['token_introspection_uri'], data=request, headers=headers) + resp = requests.post(client_secrets["token_introspection_uri"], data=request, headers=headers) return resp.json() @@ -96,8 +98,8 @@ def _get_user_info(token): if not client_secrets: return None - headers = {'authorization': 'Bearer ' + token} - resp = requests.get(client_secrets['userinfo_uri'], headers=headers) + headers = {"authorization": "Bearer " + token} + resp = requests.get(client_secrets["userinfo_uri"], headers=headers) return resp.json() @@ -110,8 +112,8 @@ def get_user_oidc(request): if "authorization" not in request.headers: raise Unauthorized("No 'authorization' header found.") - header = request.headers['authorization'].strip() - prefix = 'Bearer ' + header = request.headers["authorization"].strip() + prefix = "Bearer " if not header.startswith(prefix): raise Unauthorized("Authorization headers must start with %r" % prefix) @@ -129,16 +131,15 @@ def get_user_oidc(request): if "OIDC_REQUIRED_SCOPE" not in app.config: raise Forbidden("OIDC_REQUIRED_SCOPE must be set in server config.") - presented_scopes = data['scope'].split(' ') + presented_scopes = data["scope"].split(" ") required_scopes = [ - 'openid', - 'https://id.fedoraproject.org/scope/groups', + "openid", + "https://id.fedoraproject.org/scope/groups", app.config["OIDC_REQUIRED_SCOPE"], ] for scope in required_scopes: if scope not in presented_scopes: - raise Unauthorized("Required OIDC scope %r not present: %r" % ( - scope, presented_scopes)) + raise Unauthorized("Required OIDC scope %r not present: %r" % (scope, presented_scopes)) try: extended_data = _get_user_info(token) @@ -153,7 +154,7 @@ def get_user_oidc(request): groups = set() else: try: - groups = set(extended_data['groups']) + groups = set(extended_data["groups"]) except Exception as e: error = "Could not find groups in UserInfo from OIDC %s" % str(e) log.exception(extended_data) @@ -175,19 +176,20 @@ class KerberosAuthenticate(object): # If the config specifies a keytab to use, then override the KRB5_KTNAME # environment variable if conf.kerberos_keytab: - os.environ['KRB5_KTNAME'] = conf.kerberos_keytab + os.environ["KRB5_KTNAME"] = conf.kerberos_keytab - if 'KRB5_KTNAME' in os.environ: + if "KRB5_KTNAME" in os.environ: try: - principal = kerberos.getServerPrincipalDetails('HTTP', hostname) + principal = kerberos.getServerPrincipalDetails("HTTP", hostname) except kerberos.KrbError as error: - raise Unauthorized( - 'Kerberos: authentication failed with "{0}"'.format(str(error))) + raise Unauthorized('Kerberos: authentication failed with "{0}"'.format(str(error))) log.debug('Kerberos: server is identifying as "{0}"'.format(principal)) else: - raise Unauthorized('Kerberos: set the config value of "KERBEROS_KEYTAB" or the ' - 'environment variable "KRB5_KTNAME" to your keytab file') + raise Unauthorized( + 'Kerberos: set the config value of "KERBEROS_KEYTAB" or the ' + 'environment variable "KRB5_KTNAME" to your keytab file' + ) def _gssapi_authenticate(self, token): """ @@ -201,23 +203,23 @@ class KerberosAuthenticate(object): try: rc, state = kerberos.authGSSServerInit(self.service_name) if rc != kerberos.AUTH_GSS_COMPLETE: - log.error('Kerberos: unable to initialize server context') + log.error("Kerberos: unable to initialize server context") return None rc = kerberos.authGSSServerStep(state, token) if rc == kerberos.AUTH_GSS_COMPLETE: - log.debug('Kerberos: completed GSSAPI negotiation') + log.debug("Kerberos: completed GSSAPI negotiation") ctx.kerberos_token = kerberos.authGSSServerResponse(state) ctx.kerberos_user = kerberos.authGSSServerUserName(state) return rc elif rc == kerberos.AUTH_GSS_CONTINUE: - log.debug('Kerberos: continuing GSSAPI negotiation') + log.debug("Kerberos: continuing GSSAPI negotiation") return kerberos.AUTH_GSS_CONTINUE else: - log.debug('Kerberos: unable to step server context') + log.debug("Kerberos: unable to step server context") return None except kerberos.GSSError as error: - log.error('Kerberos: unable to authenticate: {0}'.format(str(error))) + log.error("Kerberos: unable to authenticate: {0}".format(str(error))) return None finally: if state: @@ -235,25 +237,25 @@ class KerberosAuthenticate(object): kerberos_user = ctx.kerberos_user kerberos_token = ctx.kerberos_token elif rc != kerberos.AUTH_GSS_CONTINUE: - raise Forbidden('Invalid Kerberos ticket') + raise Forbidden("Invalid Kerberos ticket") return kerberos_user, kerberos_token def get_user_kerberos(request): user = None - if 'Authorization' not in request.headers: - response = Response('Unauthorized', 401, {'WWW-Authenticate': 'Negotiate'}) + if "Authorization" not in request.headers: + response = Response("Unauthorized", 401, {"WWW-Authenticate": "Negotiate"}) exc = FlaskUnauthorized() # For some reason, certain versions of werkzeug raise an exception when passing `response` # in the constructor. This is a work-around. exc.response = response raise exc - header = request.headers.get('Authorization') - token = ''.join(header.strip().split()[1:]) + header = request.headers.get("Authorization") + token = "".join(header.strip().split()[1:]) user, kerberos_token = KerberosAuthenticate().process_request(token) # Remove the realm - user = user.split('@')[0] + user = user.split("@")[0] # If the user is part of the whitelist, then the group membership check is skipped if user in conf.allowed_users: groups = [] @@ -275,20 +277,21 @@ def get_ldap_group_membership(uid): class Ldap(object): """ A class that handles LDAP connections and queries """ + connection = None base_dn = None def __init__(self): if not conf.ldap_uri: - raise Forbidden('LDAP_URI must be set in server config.') + raise Forbidden("LDAP_URI must be set in server config.") if conf.ldap_groups_dn: self.base_dn = conf.ldap_groups_dn else: - raise Forbidden('LDAP_GROUPS_DN must be set in server config.') + raise Forbidden("LDAP_GROUPS_DN must be set in server config.") - if conf.ldap_uri.startswith('ldaps://'): - tls = ldap3.Tls(ca_certs_file='/etc/pki/tls/certs/ca-bundle.crt', - validate=ssl.CERT_REQUIRED) + if conf.ldap_uri.startswith("ldaps://"): + tls = ldap3.Tls( + ca_certs_file="/etc/pki/tls/certs/ca-bundle.crt", validate=ssl.CERT_REQUIRED) server = ldap3.Server(conf.ldap_uri, use_ssl=True, tls=tls) else: server = ldap3.Server(conf.ldap_uri) @@ -296,26 +299,28 @@ class Ldap(object): try: self.connection.open() except ldap3.core.exceptions.LDAPSocketOpenError as error: - log.error('The connection to "{0}" failed. The following error was raised: {1}' - .format(conf.ldap_uri, str(error))) - raise Forbidden('The connection to the LDAP server failed. Group membership ' - 'couldn\'t be obtained.') + log.error( + 'The connection to "{0}" failed. The following error was raised: {1}'.format( + conf.ldap_uri, str(error))) + raise Forbidden( + "The connection to the LDAP server failed. Group membership couldn't be obtained.") def get_user_membership(self, uid): """ Gets the group membership of a user :param uid: a string of the uid of the user :return: a list of common names of the posixGroups the user is a member of """ - ldap_filter = '(memberUid={0})'.format(uid) + ldap_filter = "(memberUid={0})".format(uid) # Only get the groups in the base container/OU - self.connection.search(self.base_dn, ldap_filter, search_scope=ldap3.LEVEL, - attributes=['cn']) + self.connection.search( + self.base_dn, ldap_filter, search_scope=ldap3.LEVEL, attributes=["cn"]) groups = self.connection.response try: - return [group['attributes']['cn'][0] for group in groups] + return [group["attributes"]["cn"][0] for group in groups] except KeyError: - log.exception('The LDAP groups could not be determined based on the search results ' - 'of "{0}"'.format(str(groups))) + log.exception( + "The LDAP groups could not be determined based on the search results " + 'of "{0}"'.format(str(groups))) return [] @@ -326,11 +331,11 @@ def get_user(request): membership such as ('mprahl', {'factory2', 'devel'}) """ if conf.no_auth is True: - log.debug('Authorization is disabled.') - return 'anonymous', {'packager'} + log.debug("Authorization is disabled.") + return "anonymous", {"packager"} if "user" not in g and "groups" not in g: - get_user_func_name = 'get_user_{0}'.format(conf.auth_method) + get_user_func_name = "get_user_{0}".format(conf.auth_method) get_user_func = globals().get(get_user_func_name) if not get_user_func: raise RuntimeError('The function "{0}" is not implemented'.format(get_user_func_name)) diff --git a/module_build_service/backports.py b/module_build_service/backports.py index 1321a673..06683204 100644 --- a/module_build_service/backports.py +++ b/module_build_service/backports.py @@ -33,11 +33,11 @@ def jsonify(*args, **kwargs): # input only since 0.11, but RHEL7 contains 0.10.1. # https://github.com/pallets/flask/commit/daceb3e3a028b4b408c4bbdbdef0047f1de3a7c9 indent = None - separators = (',', ':') + separators = (",", ":") - if module_build_service.app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not request.is_xhr: + if module_build_service.app.config["JSONIFY_PRETTYPRINT_REGULAR"] and not request.is_xhr: indent = 2 - separators = (', ', ': ') + separators = (", ", ": ") if args and kwargs: raise TypeError("jsonify() behavior undefined when passed both args and kwargs") @@ -51,6 +51,5 @@ def jsonify(*args, **kwargs): # Note that we add '\n' to end of response # (see https://github.com/mitsuhiko/flask/pull/1262) rv = module_build_service.app.response_class( - (dumps(data, indent=indent, separators=separators), '\n'), - mimetype='application/json') + (dumps(data, indent=indent, separators=separators), "\n"), mimetype="application/json") return rv diff --git a/module_build_service/builder/KojiContentGenerator.py b/module_build_service/builder/KojiContentGenerator.py index 872302e9..eefc0092 100644 --- a/module_build_service/builder/KojiContentGenerator.py +++ b/module_build_service/builder/KojiContentGenerator.py @@ -52,6 +52,7 @@ logging.basicConfig(level=logging.DEBUG) def get_session(config, login=True): from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder + return KojiModuleBuilder.get_session(config, login=login) @@ -69,7 +70,7 @@ def strip_suffixes(s, suffixes): """ for suffix in suffixes: if s.endswith(suffix): - s = s[:-len(suffix)] + s = s[: -len(suffix)] break return s @@ -79,8 +80,9 @@ def koji_retrying_multicall_map(*args, **kwargs): Wrapper around KojiModuleBuilder.koji_retrying_multicall_map, because we cannot import that method normally because of import loop. """ - from module_build_service.builder.KojiModuleBuilder import \ - koji_retrying_multicall_map as multicall + from module_build_service.builder.KojiModuleBuilder import ( + koji_retrying_multicall_map as multicall,) + return multicall(*args, **kwargs) @@ -109,7 +111,7 @@ class KojiContentGenerator(object): return "" % (self.module_name) @staticmethod - def parse_rpm_output(output, tags, separator=';'): + def parse_rpm_output(output, tags, separator=";"): """ Copied from: https://github.com/projectatomic/atomic-reactor/blob/master/atomic_reactor/plugins/exit_koji_promote.py @@ -130,42 +132,42 @@ class KojiContentGenerator(object): except ValueError: return None - if value == '(none)': + if value == "(none)": return None return value components = [] - sigmarker = 'Key ID ' + sigmarker = "Key ID " for rpm in output: - fields = rpm.rstrip('\n').split(separator) + fields = rpm.rstrip("\n").split(separator) if len(fields) < len(tags): continue - signature = field('SIGPGP:pgpsig') or field('SIGGPG:pgpsig') + signature = field("SIGPGP:pgpsig") or field("SIGGPG:pgpsig") if signature: parts = signature.split(sigmarker, 1) if len(parts) > 1: signature = parts[1] component_rpm = { - u'type': u'rpm', - u'name': field('NAME'), - u'version': field('VERSION'), - u'release': field('RELEASE'), - u'arch': field('ARCH'), - u'sigmd5': field('SIGMD5'), - u'signature': signature, + u"type": u"rpm", + u"name": field("NAME"), + u"version": field("VERSION"), + u"release": field("RELEASE"), + u"arch": field("ARCH"), + u"sigmd5": field("SIGMD5"), + u"signature": signature, } # Special handling for epoch as it must be an integer or None - epoch = field('EPOCH') + epoch = field("EPOCH") if epoch is not None: epoch = int(epoch) - component_rpm[u'epoch'] = epoch + component_rpm[u"epoch"] = epoch - if component_rpm['name'] != 'gpg-pubkey': + if component_rpm["name"] != "gpg-pubkey": components.append(component_rpm) return components @@ -177,28 +179,25 @@ class KojiContentGenerator(object): Build a list of installed RPMs in the format required for the metadata. - """ # noqa + """ # noqa tags = [ - 'NAME', - 'VERSION', - 'RELEASE', - 'ARCH', - 'EPOCH', - 'SIGMD5', - 'SIGPGP:pgpsig', - 'SIGGPG:pgpsig', + "NAME", + "VERSION", + "RELEASE", + "ARCH", + "EPOCH", + "SIGMD5", + "SIGPGP:pgpsig", + "SIGGPG:pgpsig", ] - sep = ';' + sep = ";" fmt = sep.join(["%%{%s}" % tag for tag in tags]) cmd = "/bin/rpm -qa --qf '{0}\n'".format(fmt) - with open('/dev/null', 'r+') as devnull: - p = subprocess.Popen(cmd, - shell=True, - stdin=devnull, - stdout=subprocess.PIPE, - stderr=devnull) + with open("/dev/null", "r+") as devnull: + p = subprocess.Popen( + cmd, shell=True, stdin=devnull, stdout=subprocess.PIPE, stderr=devnull) (stdout, stderr) = p.communicate() status = p.wait() @@ -216,16 +215,12 @@ class KojiContentGenerator(object): # TODO: In libmodulemd v1.5, there'll be a property we can check instead # of using RPM try: - libmodulemd_version = subprocess.check_output( - ['rpm', '--queryformat', '%{VERSION}', '-q', 'libmodulemd'], - universal_newlines=True).strip() + cmd = ["rpm", "--queryformat", "%{VERSION}", "-q", "libmodulemd"] + libmodulemd_version = subprocess.check_output(cmd, universal_newlines=True).strip() except subprocess.CalledProcessError: - libmodulemd_version = 'unknown' + libmodulemd_version = "unknown" - return [{ - 'name': 'libmodulemd', - 'version': libmodulemd_version - }] + return [{"name": "libmodulemd", "version": libmodulemd_version}] def _koji_rpms_in_tag(self, tag): """ Return the list of koji rpms in a tag. """ @@ -257,17 +252,20 @@ class KojiContentGenerator(object): # Prepare the arguments for Koji multicall. # We will call session.getRPMHeaders(...) for each SRC RPM to get exclusivearch, # excludearch and license headers. - multicall_kwargs = [{"rpmID": rpm_id, - "headers": ["exclusivearch", "excludearch", "license"]} - for rpm_id in src_rpms.keys()] + multicall_kwargs = [ + {"rpmID": rpm_id, "headers": ["exclusivearch", "excludearch", "license"]} + for rpm_id in src_rpms.keys() + ] # For each binary RPM, we only care about the "license" header. - multicall_kwargs += [{"rpmID": rpm_id, "headers": ["license"]} - for rpm_id in binary_rpms.keys()] + multicall_kwargs += [ + {"rpmID": rpm_id, "headers": ["license"]} for rpm_id in binary_rpms.keys() + ] rpms_headers = koji_retrying_multicall_map( - session, session.getRPMHeaders, list_of_kwargs=multicall_kwargs) + session, session.getRPMHeaders, list_of_kwargs=multicall_kwargs + ) # Temporary dict with build_id as a key to find builds easily. - builds = {build['build_id']: build for build in builds} + builds = {build["build_id"]: build for build in builds} # Create a mapping of build IDs to SRPM NEVRAs so that the for loop below can directly # access these values when adding the `srpm_nevra` key to the returned RPMs @@ -280,8 +278,7 @@ class KojiContentGenerator(object): # also other useful data from the Build associated with the RPM. for rpm, headers in zip(chain(src_rpms.values(), binary_rpms.values()), rpms_headers): if not headers: - raise RuntimeError( - "No RPM headers received from Koji for RPM %s" % rpm["name"]) + raise RuntimeError("No RPM headers received from Koji for RPM %s" % rpm["name"]) if "license" not in headers: raise RuntimeError( "No RPM 'license' header received from Koji for RPM %s" % rpm["name"]) @@ -291,44 +288,42 @@ class KojiContentGenerator(object): build["excludearch"] = headers["excludearch"] rpm["license"] = headers["license"] - rpm['srpm_name'] = build['name'] - rpm['srpm_nevra'] = build_id_to_srpm_nevra[rpm["build_id"]] - rpm['exclusivearch'] = build['exclusivearch'] - rpm['excludearch'] = build['excludearch'] + rpm["srpm_name"] = build["name"] + rpm["srpm_nevra"] = build_id_to_srpm_nevra[rpm["build_id"]] + rpm["exclusivearch"] = build["exclusivearch"] + rpm["excludearch"] = build["excludearch"] return rpms def _get_build(self): ret = {} - ret[u'name'] = self.module.name + ret[u"name"] = self.module.name if self.devel: - ret['name'] += "-devel" - ret[u'version'] = self.module.stream.replace("-", "_") + ret["name"] += "-devel" + ret[u"version"] = self.module.stream.replace("-", "_") # Append the context to the version to make NVRs of modules unique in the event of # module stream expansion - ret[u'release'] = '{0}.{1}'.format(self.module.version, self.module.context) - ret[u'source'] = self.module.scmurl - ret[u'start_time'] = calendar.timegm( - self.module.time_submitted.utctimetuple()) - ret[u'end_time'] = calendar.timegm( - self.module.time_completed.utctimetuple()) - ret[u'extra'] = { + ret[u"release"] = "{0}.{1}".format(self.module.version, self.module.context) + ret[u"source"] = self.module.scmurl + ret[u"start_time"] = calendar.timegm(self.module.time_submitted.utctimetuple()) + ret[u"end_time"] = calendar.timegm(self.module.time_completed.utctimetuple()) + ret[u"extra"] = { u"typeinfo": { u"module": { u"module_build_service_id": self.module.id, u"content_koji_tag": self.module.koji_tag, u"modulemd_str": self.module.modulemd, - u"name": ret['name'], + u"name": ret["name"], u"stream": self.module.stream, u"version": self.module.version, - u"context": self.module.context + u"context": self.module.context, } } } session = get_session(self.config, login=False) # Only add the CG build owner if the user exists in Koji if session.getUser(self.owner): - ret[u'owner'] = self.owner + ret[u"owner"] = self.owner return ret def _get_buildroot(self): @@ -338,18 +333,15 @@ class KojiContentGenerator(object): u"id": 1, u"host": { u"arch": text_type(platform.machine()), - u'os': u"%s %s" % (distro[0], distro[1]) + u"os": u"%s %s" % (distro[0], distro[1]), }, u"content_generator": { u"name": u"module-build-service", - u"version": text_type(version) - }, - u"container": { - u"arch": text_type(platform.machine()), - u"type": u"none" + u"version": text_type(version), }, + u"container": {u"arch": text_type(platform.machine()), u"type": u"none"}, u"components": self.__get_rpms(), - u"tools": self.__get_tools() + u"tools": self.__get_tools(), } return ret @@ -368,7 +360,7 @@ class KojiContentGenerator(object): u"arch": rpm["arch"], u"epoch": rpm["epoch"], u"sigmd5": rpm["payloadhash"], - u"type": u"rpm" + u"type": u"rpm", } def _get_arch_mmd_output(self, output_path, arch): @@ -385,15 +377,11 @@ class KojiContentGenerator(object): :return: Dictionary with record in "output" list. """ ret = { - 'buildroot_id': 1, - 'arch': arch, - 'type': 'file', - 'extra': { - 'typeinfo': { - 'module': {} - } - }, - 'checksum_type': 'md5', + "buildroot_id": 1, + "arch": arch, + "type": "file", + "extra": {"typeinfo": {"module": {}}}, + "checksum_type": "md5", } # Noarch architecture represents "generic" modulemd.txt. @@ -406,13 +394,13 @@ class KojiContentGenerator(object): # parse it to get the Modulemd instance. mmd_path = os.path.join(output_path, mmd_filename) try: - with open(mmd_path, 'rb') as mmd_f: + with open(mmd_path, "rb") as mmd_f: raw_data = mmd_f.read() data = to_text_type(raw_data) mmd = load_mmd(data) - ret['filename'] = mmd_filename - ret['filesize'] = len(raw_data) - ret['checksum'] = hashlib.md5(raw_data).hexdigest() + ret["filename"] = mmd_filename + ret["filesize"] = len(raw_data) + ret["checksum"] = hashlib.md5(raw_data).hexdigest() except IOError: if arch == "src": # This might happen in case the Module is submitted directly @@ -428,8 +416,7 @@ class KojiContentGenerator(object): if arch in ["noarch", "src"]: # For generic noarch/src modulemd, include all the RPMs. for rpm in self.rpms: - components.append( - self._koji_rpm_to_component_record(rpm)) + components.append(self._koji_rpm_to_component_record(rpm)) else: # Check the RPM artifacts built for this architecture in modulemd file, # find the matching RPM in the `rpms_dict` coming from Koji and use it @@ -438,11 +425,10 @@ class KojiContentGenerator(object): # RPM sigmd5 signature is not stored in MMD. for rpm in mmd.get_rpm_artifacts().get(): if rpm not in self.rpms_dict: - raise RuntimeError("RPM %s found in the final modulemd but not " - "in Koji tag." % rpm) + raise RuntimeError( + "RPM %s found in the final modulemd but not in Koji tag." % rpm) tag_rpm = self.rpms_dict[rpm] - components.append( - self._koji_rpm_to_component_record(tag_rpm)) + components.append(self._koji_rpm_to_component_record(tag_rpm)) ret["components"] = components return ret @@ -455,18 +441,18 @@ class KojiContentGenerator(object): try: log_path = os.path.join(output_path, "build.log") - with open(log_path, 'rb') as build_log: + with open(log_path, "rb") as build_log: checksum = hashlib.md5(build_log.read()).hexdigest() stat = os.stat(log_path) ret.append( { - u'buildroot_id': 1, - u'arch': u'noarch', - u'type': u'log', - u'filename': u'build.log', - u'filesize': stat.st_size, - u'checksum_type': u'md5', - u'checksum': checksum + u"buildroot_id": 1, + u"arch": u"noarch", + u"type": u"log", + u"filename": u"build.log", + u"filesize": stat.st_size, + u"checksum_type": u"md5", + u"checksum": checksum, } ) except IOError: @@ -480,7 +466,7 @@ class KojiContentGenerator(object): u"metadata_version": 0, u"buildroots": [self._get_buildroot()], u"build": self._get_build(), - u"output": self._get_output(output_path) + u"output": self._get_output(output_path), } return ret @@ -567,12 +553,10 @@ class KojiContentGenerator(object): # For example: # "x86_64" -> ['athlon', 'i386', 'i586', 'i486', 'i686'] # "i686" -> [] - multilib_arches = set(compatible_arches) - set( - pungi.arch.get_compatible_arches(arch)) + multilib_arches = set(compatible_arches) - set(pungi.arch.get_compatible_arches(arch)) # List of architectures that should be in ExclusiveArch tag or missing # from ExcludeArch tag. Multilib should not be enabled here. - exclusive_arches = pungi.arch.get_valid_arches( - arch, multilib=False, add_noarch=False) + exclusive_arches = pungi.arch.get_valid_arches(arch, multilib=False, add_noarch=False) # Modulemd.SimpleSet into which we will add the RPMs. rpm_artifacts = Modulemd.SimpleSet() @@ -605,8 +589,7 @@ class KojiContentGenerator(object): # - the architecture of an RPM is not multilib architecture for `arch`. # - the architecture of an RPM is not the final mmd architecture. # - the architecture of an RPM is not "noarch" or "src". - if (rpm["arch"] not in multilib_arches and - rpm["arch"] not in [arch, "noarch", "src"]): + if rpm["arch"] not in multilib_arches and rpm["arch"] not in [arch, "noarch", "src"]: continue # Skip the RPM if it is excluded on this arch or exclusive @@ -728,8 +711,7 @@ class KojiContentGenerator(object): commit = xmd.get("mbs", {}).get("commit") scmurl = xmd.get("mbs", {}).get("scmurl") if not commit or not scmurl: - log.warning("%r: xmd['mbs'] does not contain 'commit' or 'scmurl'.", - self.module) + log.warning("%r: xmd['mbs'] does not contain 'commit' or 'scmurl'.", self.module) return td = None @@ -747,9 +729,7 @@ class KojiContentGenerator(object): if td is not None: shutil.rmtree(td) except Exception as e: - log.warning( - "Failed to remove temporary directory {!r}: {}".format( - td, str(e))) + log.warning("Failed to remove temporary directory {!r}: {}".format(td, str(e))) def _prepare_file_directory(self): """ Creates a temporary directory that will contain all the files @@ -787,10 +767,10 @@ class KojiContentGenerator(object): Uploads output files to Koji hub. """ to_upload = [] - for info in metadata['output']: - if info.get('metadata_only', False): + for info in metadata["output"]: + if info.get("metadata_only", False): continue - localpath = os.path.join(file_dir, info['filename']) + localpath = os.path.join(file_dir, info["filename"]) if not os.path.exists(localpath): err = "Cannot upload %s to Koji. No such file." % localpath log.error(err) @@ -799,7 +779,7 @@ class KojiContentGenerator(object): to_upload.append([localpath, info]) # Create unique server directory. - serverdir = 'mbs/%r.%d' % (time.time(), self.module.id) + serverdir = "mbs/%r.%d" % (time.time(), self.module.id) for localpath, info in to_upload: log.info("Uploading %s to Koji" % localpath) @@ -816,8 +796,8 @@ class KojiContentGenerator(object): tag_name = self.module.cg_build_koji_tag if not tag_name: - log.info("%r: Not tagging Content Generator build, no " - "cg_build_koji_tag set", self.module) + log.info( + "%r: Not tagging Content Generator build, no cg_build_koji_tag set", self.module) return tag_names_to_try = [tag_name, self.config.koji_cg_default_build_tag] @@ -827,20 +807,19 @@ class KojiContentGenerator(object): if tag_info: break - log.info("%r: Tag %s not found in Koji, trying next one.", - self.module, tag) + log.info("%r: Tag %s not found in Koji, trying next one.", self.module, tag) if not tag_info: log.warning( - "%r:, Not tagging Content Generator build, no available tag" - " found, tried %r", self.module, tag_names_to_try) + "%r:, Not tagging Content Generator build, no available tag found, tried %r", + self.module, tag_names_to_try, + ) return build = self._get_build() nvr = "%s-%s-%s" % (build["name"], build["version"], build["release"]) - log.info("Content generator build %s will be tagged as %s in " - "Koji", nvr, tag) + log.info("Content generator build %s will be tagged as %s in Koji", nvr, tag) session.tagBuild(tag_info["id"], nvr) def _load_koji_tag(self, koji_session): @@ -879,7 +858,7 @@ class KojiContentGenerator(object): except koji.GenericError as e: if "Build already exists" not in str(e): raise - log.warning('Failed to import content generator') + log.warning("Failed to import content generator") build_info = None if conf.koji_cg_tag_build: self._tag_cg_build() diff --git a/module_build_service/builder/KojiModuleBuilder.py b/module_build_service/builder/KojiModuleBuilder.py index c53c9e67..69015d9b 100644 --- a/module_build_service/builder/KojiModuleBuilder.py +++ b/module_build_service/builder/KojiModuleBuilder.py @@ -76,8 +76,10 @@ def koji_multicall_map(koji_session, koji_session_fnc, list_of_args=None, list_o if list_of_args is None and list_of_kwargs is None: raise ProgrammingError("One of list_of_args or list_of_kwargs must be set.") - if (type(list_of_args) not in [type(None), list] or - type(list_of_kwargs) not in [type(None), list]): + if ( + type(list_of_args) not in [type(None), list] + or type(list_of_kwargs) not in [type(None), list] + ): raise ProgrammingError("list_of_args and list_of_kwargs must be list or None.") if list_of_kwargs is None: @@ -99,16 +101,19 @@ def koji_multicall_map(koji_session, koji_session_fnc, list_of_args=None, list_o try: responses = koji_session.multiCall(strict=True) except Exception: - log.exception("Exception raised for multicall of method %r with args %r, %r:", - koji_session_fnc, args, kwargs) + log.exception( + "Exception raised for multicall of method %r with args %r, %r:", + koji_session_fnc, args, kwargs, + ) return None if not responses: log.error("Koji did not return response for multicall of %r", koji_session_fnc) return None if type(responses) != list: - log.error("Fault element was returned for multicall of method %r: %r", - koji_session_fnc, responses) + log.error( + "Fault element was returned for multicall of method %r: %r", koji_session_fnc, responses + ) return None results = [] @@ -122,13 +127,17 @@ def koji_multicall_map(koji_session, koji_session_fnc, list_of_args=None, list_o for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs): if type(response) == list: if not response: - log.error("Empty list returned for multicall of method %r with args %r, %r", - koji_session_fnc, args, kwargs) + log.error( + "Empty list returned for multicall of method %r with args %r, %r", + koji_session_fnc, args, kwargs + ) return None results.append(response[0]) else: - log.error("Unexpected data returned for multicall of method %r with args %r, %r: %r", - koji_session_fnc, args, kwargs, response) + log.error( + "Unexpected data returned for multicall of method %r with args %r, %r: %r", + koji_session_fnc, args, kwargs, response + ) return None return results @@ -150,9 +159,9 @@ class KojiModuleBuilder(GenericBuilder): backend = "koji" _build_lock = threading.Lock() - region = dogpile.cache.make_region().configure('dogpile.cache.memory') + region = dogpile.cache.make_region().configure("dogpile.cache.memory") - @module_build_service.utils.validate_koji_tag('tag_name') + @module_build_service.utils.validate_koji_tag("tag_name") def __init__(self, owner, module, config, tag_name, components): """ :param owner: a string representing who kicked off the builds @@ -186,12 +195,11 @@ class KojiModuleBuilder(GenericBuilder): self.components = components def __repr__(self): - return "" % ( - self.module_str, self.tag_name) + return "" % (self.module_str, self.tag_name) @region.cache_on_arguments() def getPerms(self): - return dict([(p['name'], p['id']) for p in self.koji_session.getAllPerms()]) + return dict([(p["name"], p["id"]) for p in self.koji_session.getAllPerms()]) @module_build_service.utils.retry(wait_on=(IOError, koji.GenericError)) def buildroot_ready(self, artifacts=None): @@ -201,24 +209,22 @@ class KojiModuleBuilder(GenericBuilder): """ assert self.module_target, "Invalid build target" - tag_id = self.module_target['build_tag'] + tag_id = self.module_target["build_tag"] repo = self.koji_session.getRepo(tag_id) builds = [self.koji_session.getBuild(a, strict=True) for a in artifacts or []] - log.info("%r checking buildroot readiness for " - "repo: %r, tag_id: %r, artifacts: %r, builds: %r" % ( - self, repo, tag_id, artifacts, builds)) + log.info( + "%r checking buildroot readiness for repo: %r, tag_id: %r, artifacts: %r, builds: %r" + % (self, repo, tag_id, artifacts, builds) + ) if not repo: log.info("Repo is not generated yet, buildroot is not ready yet.") return False - ready = bool(koji.util.checkForBuilds( - self.koji_session, - tag_id, - builds, - repo['create_event'], - latest=True, - )) + ready = bool( + koji.util.checkForBuilds( + self.koji_session, tag_id, builds, repo["create_event"], latest=True) + ) if ready: log.info("%r buildroot is ready" % self) else: @@ -239,19 +245,22 @@ class KojiModuleBuilder(GenericBuilder): # Get all the RPMs and builds of the reusable module in Koji rpms, builds = koji_session.listTaggedRPMS(reusable_module.koji_tag, latest=True) # Convert the list to a dict where each key is the build_id - builds = {build['build_id']: build for build in builds} + builds = {build["build_id"]: build for build in builds} # Create a mapping of package (SRPM) to the RPMs in NVR format package_to_rpms = {} for rpm in rpms: - package = builds[rpm['build_id']]['name'] + package = builds[rpm["build_id"]]["name"] if package not in package_to_rpms: package_to_rpms[package] = [] package_to_rpms[package].append(kobo.rpmlib.make_nvr(rpm)) components_in_module = [c.package for c in module_build.component_builds] reusable_components = get_reusable_components( - db_session, module_build, components_in_module, - previous_module_build=reusable_module) + db_session, + module_build, + components_in_module, + previous_module_build=reusable_module, + ) # Loop through all the reusable components to find if any of their RPMs are # being filtered for reusable_component in reusable_components: @@ -261,7 +270,7 @@ class KojiModuleBuilder(GenericBuilder): # We must get the component name from the NVR and not from # reusable_component.package because macros such as those used # by SCLs can change the name of the underlying build - component_name = kobo.rpmlib.parse_nvr(reusable_component.nvr)['name'] + component_name = kobo.rpmlib.parse_nvr(reusable_component.nvr)["name"] if component_name not in package_to_rpms: continue @@ -270,13 +279,13 @@ class KojiModuleBuilder(GenericBuilder): for nvr in package_to_rpms[component_name]: parsed_nvr = kobo.rpmlib.parse_nvr(nvr) # Don't compare with the epoch - parsed_nvr['epoch'] = None + parsed_nvr["epoch"] = None # Loop through all the filtered RPMs to find a match with the reusable # component's RPMs. for nvr2 in list(filtered_rpms): parsed_nvr2 = kobo.rpmlib.parse_nvr(nvr2) # Don't compare with the epoch - parsed_nvr2['epoch'] = None + parsed_nvr2["epoch"] = None # Only remove the filter if we are going to reuse a component with # the same exact NVR if parsed_nvr == parsed_nvr2: @@ -299,10 +308,10 @@ class KojiModuleBuilder(GenericBuilder): # Taken from Karsten's create-distmacro-pkg.sh # - however removed any provides to system-release/redhat-release - name = 'module-build-macros' + name = "module-build-macros" version = "0.1" release = "1" - today = datetime.date.today().strftime('%a %b %d %Y') + today = datetime.date.today().strftime("%a %b %d %Y") mmd = module_build.mmd() # Generate "Conflicts: name = version-release". This is workaround for @@ -320,19 +329,20 @@ class KojiModuleBuilder(GenericBuilder): module_build, req_data["filtered_rpms"]) else: filtered_rpms = req_data["filtered_rpms"] - filter_conflicts.extend(map( - KojiModuleBuilder.format_conflicts_line, filtered_rpms)) + filter_conflicts.extend(map(KojiModuleBuilder.format_conflicts_line, filtered_rpms)) - if req_name in conf.base_module_names and 'ursine_rpms' in req_data: + if req_name in conf.base_module_names and "ursine_rpms" in req_data: comments = ( - '# Filter out RPMs from stream collision modules found from ursine content' - ' for base module {}:'.format(req_name), - '# ' + ', '.join(req_data['stream_collision_modules']), + ("# Filter out RPMs from stream collision modules found from ursine content" + " for base module {}:".format(req_name)), + "# " + ", ".join(req_data["stream_collision_modules"]), + ) + filter_conflicts.extend( + chain( + comments, + map(KojiModuleBuilder.format_conflicts_line, req_data["ursine_rpms"]), + ) ) - filter_conflicts.extend(chain( - comments, - map(KojiModuleBuilder.format_conflicts_line, req_data['ursine_rpms']) - )) spec_content = textwrap.dedent(""" %global dist {disttag} @@ -433,11 +443,20 @@ class KojiModuleBuilder(GenericBuilder): log.debug("Building %s.spec" % name) # We are not interested in the rpmbuild stdout... - null_fd = open(os.devnull, 'w') - execute_cmd(['rpmbuild', '-bs', '%s.spec' % name, - '--define', '_topdir %s' % td, - '--define', '_sourcedir %s' % sources_dir], - cwd=td, stdout=null_fd) + null_fd = open(os.devnull, "w") + execute_cmd( + [ + "rpmbuild", + "-bs", + "%s.spec" % name, + "--define", + "_topdir %s" % td, + "--define", + "_sourcedir %s" % sources_dir, + ], + cwd=td, + stdout=null_fd, + ) null_fd.close() sdir = os.path.join(td, "SRPMS") srpm_paths = glob.glob("%s/*.src.rpm" % sdir) @@ -458,10 +477,8 @@ class KojiModuleBuilder(GenericBuilder): :return: the Koji session object. :rtype: :class:`koji.ClientSession` """ - koji_config = munch.Munch(koji.read_config( - profile_name=config.koji_profile, - user_config=config.koji_config, - )) + koji_config = munch.Munch( + koji.read_config(profile_name=config.koji_profile, user_config=config.koji_config)) # Timeout after 10 minutes. The default is 12 hours. koji_config["timeout"] = 60 * 10 @@ -494,9 +511,7 @@ class KojiModuleBuilder(GenericBuilder): koji_session.krb_login(principal=principal, keytab=keytab, ctx=ctx, ccache=ccache) elif authtype == "ssl": koji_session.ssl_login( - os.path.expanduser(koji_config.cert), - None, - os.path.expanduser(koji_config.serverca) + os.path.expanduser(koji_config.cert), None, os.path.expanduser(koji_config.serverca) ) else: raise ValueError("Unrecognized koji authtype %r" % authtype) @@ -512,8 +527,7 @@ class KojiModuleBuilder(GenericBuilder): # Create or update individual tags # the main tag needs arches so pungi can dump it - self.module_tag = self._koji_create_tag( - self.tag_name, self.arches, perm="admin") + self.module_tag = self._koji_create_tag(self.tag_name, self.arches, perm="admin") self.module_build_tag = self._koji_create_tag( self.tag_name + "-build", self.arches, perm="admin") @@ -530,19 +544,23 @@ class KojiModuleBuilder(GenericBuilder): @module_build_service.utils.retry(wait_on=SysCallError, interval=5) def add_groups(): - return self._koji_add_groups_to_tag( - dest_tag=self.module_build_tag, - groups=groups, - ) + return self._koji_add_groups_to_tag(dest_tag=self.module_build_tag, groups=groups) + add_groups() # Koji targets can only be 50 characters long, but the generate_koji_tag function # checks the length with '-build' at the end, but we know we will never append '-build', # so we can safely have the name check be more characters - target_length = 50 + len('-build') + target_length = 50 + len("-build") target = module_build_service.utils.generate_koji_tag( - self.module.name, self.module.stream, self.module.version, self.module.context, - target_length, scratch=self.module.scratch, scratch_id=self.module.id) + self.module.name, + self.module.stream, + self.module.version, + self.module.context, + target_length, + scratch=self.module.scratch, + scratch_id=self.module.id, + ) # Add main build target. self.module_target = self._koji_add_target(target, self.module_build_tag, self.module_tag) @@ -570,17 +588,19 @@ class KojiModuleBuilder(GenericBuilder): This method is safe to call multiple times. """ log.info("%r adding artifacts %r" % (self, artifacts)) - build_tag = self._get_tag(self.module_build_tag)['id'] + build_tag = self._get_tag(self.module_build_tag)["id"] xmd = self.mmd.get_xmd() if "mbs_options" in xmd.keys() and "blocked_packages" in xmd["mbs_options"].keys(): packages = [kobo.rpmlib.parse_nvr(nvr)["name"] for nvr in artifacts] - packages = [package for package in packages - if package in xmd["mbs_options"]["blocked_packages"]] + packages = [ + package for package in packages + if package in xmd["mbs_options"]["blocked_packages"] + ] if packages: self._koji_unblock_packages(packages) - tagged_nvrs = self._get_tagged_nvrs(self.module_build_tag['name']) + tagged_nvrs = self._get_tagged_nvrs(self.module_build_tag["name"]) self.koji_session.multicall = True for nvr in artifacts: @@ -593,8 +613,8 @@ class KojiModuleBuilder(GenericBuilder): if not install: continue - for group in ('srpm-build', 'build'): - name = kobo.rpmlib.parse_nvr(nvr)['name'] + for group in ("srpm-build", "build"): + name = kobo.rpmlib.parse_nvr(nvr)["name"] log.info("%r adding %s to group %s" % (self, name, group)) self.koji_session.groupPackageListAdd(build_tag, group, name) self.koji_session.multiCall(strict=True) @@ -606,11 +626,11 @@ class KojiModuleBuilder(GenericBuilder): :return: None """ if dest_tag: - tag = self._get_tag(self.module_tag)['id'] - tagged_nvrs = self._get_tagged_nvrs(self.module_tag['name']) + tag = self._get_tag(self.module_tag)["id"] + tagged_nvrs = self._get_tagged_nvrs(self.module_tag["name"]) else: - tag = self._get_tag(self.module_build_tag)['id'] - tagged_nvrs = self._get_tagged_nvrs(self.module_build_tag['name']) + tag = self._get_tag(self.module_build_tag)["id"] + tagged_nvrs = self._get_tagged_nvrs(self.module_build_tag["name"]) self.koji_session.multicall = True for nvr in artifacts: @@ -626,18 +646,18 @@ class KojiModuleBuilder(GenericBuilder): :param artifacts: a list of NVRs to untag :return: None """ - build_tag_name = self.tag_name + '-build' + build_tag_name = self.tag_name + "-build" dest_tag = self._get_tag(self.tag_name, strict=False) build_tag = self._get_tag(build_tag_name, strict=False) # Get the NVRs in the tags to make sure the builds exist and they're tagged before # untagging them if dest_tag: - dest_tagged_nvrs = self._get_tagged_nvrs(dest_tag['name']) + dest_tagged_nvrs = self._get_tagged_nvrs(dest_tag["name"]) else: log.info('The tag "{0}" doesn\'t exist'.format(self.tag_name)) dest_tagged_nvrs = [] if build_tag: - build_tagged_nvrs = self._get_tagged_nvrs(build_tag['name']) + build_tagged_nvrs = self._get_tagged_nvrs(build_tag["name"]) else: log.info('The tag "{0}" doesn\'t exist'.format(build_tag_name)) build_tagged_nvrs = [] @@ -649,11 +669,11 @@ class KojiModuleBuilder(GenericBuilder): self.koji_session.multicall = True for nvr in artifacts: if nvr in dest_tagged_nvrs: - log.info("%r untagging %r from %r" % (self, nvr, dest_tag['id'])) - self.koji_session.untagBuild(dest_tag['id'], nvr) + log.info("%r untagging %r from %r" % (self, nvr, dest_tag["id"])) + self.koji_session.untagBuild(dest_tag["id"], nvr) if nvr in build_tagged_nvrs: - log.info("%r untagging %r from %r" % (self, nvr, build_tag['id'])) - self.koji_session.untagBuild(build_tag['id'], nvr) + log.info("%r untagging %r from %r" % (self, nvr, build_tag["id"])) + self.koji_session.untagBuild(build_tag["id"], nvr) self.koji_session.multiCall(strict=True) def wait_task(self, task_id): @@ -683,12 +703,12 @@ class KojiModuleBuilder(GenericBuilder): :param component_build: a ComponentBuild object :return: a list of msgs that MBS needs to process """ - opts = {'latest': True, 'package': component_build.package, 'inherit': False} - build_tagged = self.koji_session.listTagged(self.module_build_tag['name'], **opts) + opts = {"latest": True, "package": component_build.package, "inherit": False} + build_tagged = self.koji_session.listTagged(self.module_build_tag["name"], **opts) dest_tagged = None # Only check the destination tag if the component is not a build_time_only component if not component_build.build_time_only: - dest_tagged = self.koji_session.listTagged(self.module_tag['name'], **opts) + dest_tagged = self.koji_session.listTagged(self.module_tag["name"], **opts) for rv in [build_tagged, dest_tagged]: if rv and len(rv) != 1: raise ValueError("Expected exactly one item in list. Got %s" % rv) @@ -716,33 +736,48 @@ class KojiModuleBuilder(GenericBuilder): return further_work # Start setting up MBS' database to use the existing build - log.info('Skipping build of "{0}" since it already exists.'.format(build['nvr'])) + log.info('Skipping build of "{0}" since it already exists.'.format(build["nvr"])) # Set it to COMPLETE so it doesn't count towards the concurrent component threshold - component_build.state = koji.BUILD_STATES['COMPLETE'] - component_build.nvr = build['nvr'] - component_build.task_id = build['task_id'] - component_build.state_reason = 'Found existing build' + component_build.state = koji.BUILD_STATES["COMPLETE"] + component_build.nvr = build["nvr"] + component_build.task_id = build["task_id"] + component_build.state_reason = "Found existing build" nvr_dict = kobo.rpmlib.parse_nvr(component_build.nvr) # Trigger a completed build message - further_work.append(module_build_service.messaging.KojiBuildChange( - 'recover_orphaned_artifact: fake message', build['build_id'], - build['task_id'], koji.BUILD_STATES['COMPLETE'], component_build.package, - nvr_dict['version'], nvr_dict['release'], component_build.module_build.id)) + further_work.append( + module_build_service.messaging.KojiBuildChange( + "recover_orphaned_artifact: fake message", + build["build_id"], + build["task_id"], + koji.BUILD_STATES["COMPLETE"], + component_build.package, + nvr_dict["version"], + nvr_dict["release"], + component_build.module_build.id, + ) + ) component_tagged_in = [] if build_tagged: - component_tagged_in.append(self.module_build_tag['name']) + component_tagged_in.append(self.module_build_tag["name"]) else: # Tag it in the build tag if it's not there self.tag_artifacts([component_build.nvr], dest_tag=False) if dest_tagged: - component_tagged_in.append(self.module_tag['name']) + component_tagged_in.append(self.module_tag["name"]) for tag in component_tagged_in: - log.info('The build being skipped isn\'t tagged in the "{0}" tag. Will send a ' - 'message to the tag handler'.format(tag)) - further_work.append(module_build_service.messaging.KojiTagChange( - 'recover_orphaned_artifact: fake message', tag, component_build.package, - component_build.nvr)) + log.info( + 'The build being skipped isn\'t tagged in the "{0}" tag. Will send a message to ' + "the tag handler".format(tag) + ) + further_work.append( + module_build_service.messaging.KojiTagChange( + "recover_orphaned_artifact: fake message", + tag, + component_build.package, + component_build.nvr, + ) + ) return further_work def build(self, artifact_name, source): @@ -768,21 +803,23 @@ class KojiModuleBuilder(GenericBuilder): # For some reason repr(time.time()) includes 4 or 5 # more digits of precision than str(time.time()) # Unnamed Engineer: Guido v. R., I am disappoint - return '%s/%r.%s' % (prefix, time.time(), - ''.join([random.choice(string.ascii_letters) - for i in range(8)])) + return "%s/%r.%s" % ( + prefix, + time.time(), + "".join([random.choice(string.ascii_letters) for i in range(8)]), + ) if not self.__prep: raise RuntimeError("Buildroot is not prep-ed") self._koji_whitelist_packages([artifact_name]) - if source.startswith('cli-build/'): + if source.startswith("cli-build/"): # treat source as a custom srpm that has already been uploaded to koji pass - elif '://' not in source: + elif "://" not in source: # treat source as an srpm and upload it - serverdir = _unique_path('cli-build') + serverdir = _unique_path("cli-build") callback = None self.koji_session.uploadWrapper(source, serverdir, callback=callback) source = "%s/%s" % (serverdir, os.path.basename(source)) @@ -792,32 +829,30 @@ class KojiModuleBuilder(GenericBuilder): # The reason is that it is faster to build this RPM in # already existing shared target, because Koji does not need to do # repo-regen. - if (artifact_name == "module-build-macros" and - self.config.koji_build_macros_target): + if artifact_name == "module-build-macros" and self.config.koji_build_macros_target: module_target = self.config.koji_build_macros_target else: - module_target = self.module_target['name'] + module_target = self.module_target["name"] build_opts = { "skip_tag": True, "mbs_artifact_name": artifact_name, - "mbs_module_target": module_target + "mbs_module_target": module_target, } # disabled by default, wouldn't work until Koji issue #1158 is done if conf.allow_arch_override: - build_opts['arch_override'] = \ - self.mmd.get_rpm_components()[artifact_name].get_arches().get() + build_opts["arch_override"] = ( + self.mmd.get_rpm_components()[artifact_name].get_arches().get()) - task_id = self.koji_session.build(source, module_target, build_opts, - priority=self.build_priority) - log.info("submitted build of %s (task_id=%s), via %s" % ( - source, task_id, self)) + task_id = self.koji_session.build( + source, module_target, build_opts, priority=self.build_priority) + log.info("submitted build of %s (task_id=%s), via %s" % (source, task_id, self)) if task_id: - state = koji.BUILD_STATES['BUILDING'] + state = koji.BUILD_STATES["BUILDING"] reason = "Submitted %s to Koji" % (artifact_name) else: - state = koji.BUILD_STATES['FAILED'] + state = koji.BUILD_STATES["FAILED"] reason = "Failed to submit artifact %s to Koji" % (artifact_name) return task_id, state, reason, None @@ -825,8 +860,10 @@ class KojiModuleBuilder(GenericBuilder): try: self.koji_session.cancelTask(task_id) except Exception as error: - log.error('Failed to cancel task ID {0} in Koji. The error ' - 'message was: {1}'.format(task_id, str(error))) + log.error( + "Failed to cancel task ID {0} in Koji. The error " + "message was: {1}".format(task_id, str(error)) + ) @classmethod def repo_from_tag(cls, config, tag_name, arch): @@ -840,52 +877,52 @@ class KojiModuleBuilder(GenericBuilder): """ return "%s/%s/latest/%s" % (config.koji_repository_url, tag_name, arch) - @module_build_service.utils.validate_koji_tag('tag', post='') + @module_build_service.utils.validate_koji_tag("tag", post="") def _get_tag(self, tag, strict=True): if isinstance(tag, dict): - tag = tag['name'] + tag = tag["name"] taginfo = self.koji_session.getTag(tag) if not taginfo: if strict: raise SystemError("Unknown tag: %s" % tag) return taginfo - @module_build_service.utils.validate_koji_tag(['tag_name'], post='') + @module_build_service.utils.validate_koji_tag(["tag_name"], post="") def _koji_add_many_tag_inheritance(self, tag_name, parent_tags): tag = self._get_tag(tag_name) # highest priority num is at the end - inheritance_data = sorted(self.koji_session.getInheritanceData(tag['name']) or - [], key=lambda k: k['priority']) + inheritance_data = sorted( + self.koji_session.getInheritanceData(tag["name"]) or [], key=lambda k: k["priority"]) # Set initial priority to last record in inheritance data or 0 priority = 0 if inheritance_data: - priority = inheritance_data[-1]['priority'] + 10 + priority = inheritance_data[-1]["priority"] + 10 def record_exists(parent_id, data): for item in data: - if parent_id == item['parent_id']: + if parent_id == item["parent_id"]: return True return False for parent in parent_tags: # We expect that they're sorted parent = self._get_tag(parent) - if record_exists(parent['id'], inheritance_data): + if record_exists(parent["id"], inheritance_data): continue parent_data = {} - parent_data['parent_id'] = parent['id'] - parent_data['priority'] = priority - parent_data['maxdepth'] = None - parent_data['intransitive'] = False - parent_data['noconfig'] = False - parent_data['pkg_filter'] = '' + parent_data["parent_id"] = parent["id"] + parent_data["priority"] = priority + parent_data["maxdepth"] = None + parent_data["intransitive"] = False + parent_data["noconfig"] = False + parent_data["pkg_filter"] = "" inheritance_data.append(parent_data) priority += 10 if inheritance_data: - self.koji_session.setInheritanceData(tag['id'], inheritance_data) + self.koji_session.setInheritanceData(tag["id"], inheritance_data) - @module_build_service.utils.validate_koji_tag('dest_tag') + @module_build_service.utils.validate_koji_tag("dest_tag") def _koji_add_groups_to_tag(self, dest_tag, groups): """Add groups to a tag as well as packages listed by group @@ -899,17 +936,17 @@ class KojiModuleBuilder(GenericBuilder): log.debug("Adding groups=%s to tag=%s" % (list(groups), dest_tag)) if groups and not isinstance(groups, dict): raise ValueError("Expected dict {'group' : [str(package1), ...]") - dest_tag = self._get_tag(dest_tag)['name'] - existing_groups = dict([(p['name'], p['group_id']) - for p - in self.koji_session.getTagGroups(dest_tag, inherit=False) - ]) + dest_tag = self._get_tag(dest_tag)["name"] + existing_groups = dict([ + (p["name"], p["group_id"]) + for p in self.koji_session.getTagGroups(dest_tag, inherit=False) + ]) for group, packages in groups.items(): group_id = existing_groups.get(group, None) if group_id is not None: - log.debug("Group %s already exists for tag %s. Skipping creation." - % (group, dest_tag)) + log.debug( + "Group %s already exists for tag %s. Skipping creation." % (group, dest_tag)) continue self.koji_session.groupListAdd(dest_tag, group) @@ -919,7 +956,7 @@ class KojiModuleBuilder(GenericBuilder): for pkg in packages: self.koji_session.groupPackageListAdd(dest_tag, group, pkg) - @module_build_service.utils.validate_koji_tag('tag_name') + @module_build_service.utils.validate_koji_tag("tag_name") def _koji_create_tag(self, tag_name, arches=None, perm=None): """Create a tag in Koji @@ -945,16 +982,16 @@ class KojiModuleBuilder(GenericBuilder): raise ValueError("Expected list or None on input got %s" % type(arches)) current_arches = [] - if taginfo['arches']: # None if none - current_arches = taginfo['arches'].split() # string separated by empty spaces + if taginfo["arches"]: # None if none + current_arches = taginfo["arches"].split() # string separated by empty spaces if set(arches) != set(current_arches): - opts['arches'] = " ".join(arches) + opts["arches"] = " ".join(arches) if perm: - if taginfo['locked']: - raise SystemError("Tag %s: master lock already set. Can't edit tag" - % taginfo['name']) + if taginfo["locked"]: + raise SystemError( + "Tag %s: master lock already set. Can't edit tag" % taginfo["name"]) perm_ids = self.getPerms() @@ -962,15 +999,15 @@ class KojiModuleBuilder(GenericBuilder): raise ValueError("Unknown permissions %s" % perm) perm_id = perm_ids[perm] - if taginfo['perm'] not in (perm_id, perm): # check either id or the string - opts['perm'] = perm_id + if taginfo["perm"] not in (perm_id, perm): # check either id or the string + opts["perm"] = perm_id # Create deepcopy of conf dict, because we are going to change it later. - opts['extra'] = copy.deepcopy(conf.koji_tag_extra_opts) + opts["extra"] = copy.deepcopy(conf.koji_tag_extra_opts) xmd = self.mmd.get_xmd() if "mbs_options" in xmd.keys() and "repo_include_all" in xmd["mbs_options"].keys(): - opts['extra']['repo_include_all'] = xmd["mbs_options"]["repo_include_all"] + opts["extra"]["repo_include_all"] = xmd["mbs_options"]["repo_include_all"] # edit tag with opts self.koji_session.editTag2(tag_name, **opts) @@ -983,18 +1020,20 @@ class KojiModuleBuilder(GenericBuilder): # This will help with potential resubmitting of failed builds pkglists = {} for tag in tags: - pkglists[tag['id']] = dict([(p['package_name'], p['package_id']) - for p in self.koji_session.listPackages(tagID=tag['id'])]) + pkglists[tag["id"]] = dict([ + (p["package_name"], p["package_id"]) + for p in self.koji_session.listPackages(tagID=tag["id"]) + ]) self.koji_session.multicall = True for tag in tags: - pkglist = pkglists[tag['id']] + pkglist = pkglists[tag["id"]] for package in packages: if pkglist.get(package, None): log.debug("%s Package %s is already whitelisted." % (self, package)) continue - self.koji_session.packageListAdd(tag['name'], package, self.owner) + self.koji_session.packageListAdd(tag["name"], package, self.owner) self.koji_session.multiCall(strict=True) def _koji_block_packages(self, packages): @@ -1013,7 +1052,7 @@ class KojiModuleBuilder(GenericBuilder): args = [[self.module_build_tag["name"], package] for package in packages] koji_multicall_map(self.koji_session, self.koji_session.packageListUnblock, args) - @module_build_service.utils.validate_koji_tag(['build_tag', 'dest_tag']) + @module_build_service.utils.validate_koji_tag(["build_tag", "dest_tag"]) def _koji_add_target(self, name, build_tag, dest_tag): """Add build target if it doesn't exist or validate the existing one @@ -1036,25 +1075,29 @@ class KojiModuleBuilder(GenericBuilder): target_info = self.koji_session.getBuildTarget(name) barches = build_tag.get("arches", None) - assert barches, "Build tag %s has no arches defined." % build_tag['name'] + assert barches, "Build tag %s has no arches defined." % build_tag["name"] if not target_info: - target_info = self.koji_session.createBuildTarget(name, build_tag['name'], - dest_tag['name']) + target_info = self.koji_session.createBuildTarget( + name, build_tag["name"], dest_tag["name"]) else: # verify whether build and destination tag matches - if build_tag['name'] != target_info['build_tag_name']: - raise SystemError(("Target references unexpected build_tag_name. " - "Got '%s', expected '%s'. Please contact administrator.") - % (target_info['build_tag_name'], build_tag['name'])) - if dest_tag['name'] != target_info['dest_tag_name']: - raise SystemError(("Target references unexpected dest_tag_name. " - "Got '%s', expected '%s'. Please contact administrator.") - % (target_info['dest_tag_name'], dest_tag['name'])) + if build_tag["name"] != target_info["build_tag_name"]: + raise SystemError( + "Target references unexpected build_tag_name. " + "Got '%s', expected '%s'. Please contact administrator." + % (target_info["build_tag_name"], build_tag["name"]) + ) + if dest_tag["name"] != target_info["dest_tag_name"]: + raise SystemError( + "Target references unexpected dest_tag_name. " + "Got '%s', expected '%s'. Please contact administrator." + % (target_info["dest_tag_name"], dest_tag["name"]) + ) return self.koji_session.getBuildTarget(name) - def list_tasks_for_components(self, component_builds=None, state='active'): + def list_tasks_for_components(self, component_builds=None, state="active"): """ :param component_builds: list of component builds which we want to check :param state: limit the check only for Koji tasks in the given state @@ -1064,33 +1107,36 @@ class KojiModuleBuilder(GenericBuilder): """ component_builds = component_builds or [] - if state == 'active': - states = [koji.TASK_STATES['FREE'], - koji.TASK_STATES['OPEN'], - koji.TASK_STATES['ASSIGNED']] + if state == "active": + states = [ + koji.TASK_STATES["FREE"], + koji.TASK_STATES["OPEN"], + koji.TASK_STATES["ASSIGNED"], + ] elif state.upper() in koji.TASK_STATES: states = [koji.TASK_STATES[state.upper()]] else: - raise ValueError("State {} is not valid within Koji task states." - .format(state)) + raise ValueError("State {} is not valid within Koji task states.".format(state)) tasks = [] - for task in self.koji_session.listTasks(opts={'state': states, - 'decode': True, - 'method': 'build'}): - task_opts = task['request'][-1] + for task in self.koji_session.listTasks( + opts={"state": states, "decode": True, "method": "build"} + ): + task_opts = task["request"][-1] assert isinstance(task_opts, dict), "Task options shall be a dict." - if 'scratch' in task_opts and task_opts['scratch']: + if "scratch" in task_opts and task_opts["scratch"]: continue - if 'mbs_artifact_name' not in task_opts: - task_opts['mbs_artifact_name'] = None - if 'mbs_module_target' not in task_opts: - task_opts['mbs_module_target'] = None + if "mbs_artifact_name" not in task_opts: + task_opts["mbs_artifact_name"] = None + if "mbs_module_target" not in task_opts: + task_opts["mbs_module_target"] = None for c in component_builds: # TODO: https://pagure.io/fm-orchestrator/issue/397 # Subj: Do not mix target/tag when looking for component builds - if (c.package == task_opts['mbs_artifact_name'] and - c.module_build.koji_tag == task_opts['mbs_module_target']): + if ( + c.package == task_opts["mbs_artifact_name"] + and c.module_build.koji_tag == task_opts["mbs_module_target"] + ): tasks.append(task) return tasks @@ -1143,7 +1189,8 @@ class KojiModuleBuilder(GenericBuilder): "packageID": component_id, "userID": mbs_user_id, "state": koji.BUILD_STATES["COMPLETE"], - "queryOpts": {"order": "-build_id", "limit": 1}}) + "queryOpts": {"order": "-build_id", "limit": 1}, + }) # Get the latest Koji build created by MBS for every component in single Koji call. builds_per_component = koji_retrying_multicall_map( @@ -1209,8 +1256,7 @@ class KojiModuleBuilder(GenericBuilder): """ with models.make_session(conf) as db_session: build = models.ModuleBuild.get_build_from_nsvc( - db_session, mmd.get_name(), mmd.get_stream(), mmd.get_version(), - mmd.get_context()) + db_session, mmd.get_name(), mmd.get_stream(), mmd.get_version(), mmd.get_context()) koji_session = KojiModuleBuilder.get_session(conf, login=False) rpms = koji_session.listTaggedRPMS(build.koji_tag, latest=True)[0] nvrs = set(kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms) @@ -1218,9 +1264,11 @@ class KojiModuleBuilder(GenericBuilder): def finalize(self, succeeded=True): # Only import to koji CG if the module is "build" and not scratch. - if (not self.module.scratch and - self.config.koji_enable_content_generator and - self.module.state == models.BUILD_STATES['build']): + if ( + not self.module.scratch + and self.config.koji_enable_content_generator + and self.module.state == models.BUILD_STATES["build"] + ): cg = KojiContentGenerator(self.module, self.config) cg.koji_import() if conf.koji_cg_devel_module: @@ -1244,8 +1292,10 @@ class KojiModuleBuilder(GenericBuilder): tags = [] koji_tags = session.listTags(rpm_md["build_id"]) for t in koji_tags: - if (not t["name"].endswith("-build") and - t["name"].startswith(tuple(conf.koji_tag_prefixes))): + if ( + not t["name"].endswith("-build") + and t["name"].startswith(tuple(conf.koji_tag_prefixes)) + ): tags.append(t["name"]) return tags diff --git a/module_build_service/builder/MockModuleBuilder.py b/module_build_service/builder/MockModuleBuilder.py index e87d6cf6..91d56171 100644 --- a/module_build_service/builder/MockModuleBuilder.py +++ b/module_build_service/builder/MockModuleBuilder.py @@ -43,7 +43,7 @@ from module_build_service.builder.utils import ( create_local_repo_from_koji_tag, execute_cmd, find_srpm, - get_koji_config + get_koji_config, ) from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder @@ -68,8 +68,7 @@ class MockModuleBuilder(GenericBuilder): except IOError: pass else: - raise IOError("None of {} mock config files found." - .format(conf.mock_config_file)) + raise IOError("None of {} mock config files found.".format(conf.mock_config_file)) # Load yum config file template for cf in conf.yum_config_file: @@ -80,10 +79,9 @@ class MockModuleBuilder(GenericBuilder): except IOError: pass else: - raise IOError("None of {} yum config files found." - .format(conf.yum_config_file)) + raise IOError("None of {} yum config files found.".format(conf.yum_config_file)) - @module_build_service.utils.validate_koji_tag('tag_name') + @module_build_service.utils.validate_koji_tag("tag_name") def __init__(self, owner, module, config, tag_name, components): self.module_str = module.name self.module = module @@ -101,8 +99,7 @@ class MockModuleBuilder(GenericBuilder): if arch_detected: self.arch = arch_detected else: - log.warning("Couldn't determine machine arch. Falling back " - "to configured arch.") + log.warning("Couldn't determine machine arch. Falling back to configured arch.") self.arch = conf.arch_fallback else: self.arch = conf.arch_fallback @@ -144,8 +141,8 @@ class MockModuleBuilder(GenericBuilder): for name in os.listdir(self.configdir): os.remove(os.path.join(self.configdir, name)) - log.info("MockModuleBuilder initialized, tag_name=%s, tag_dir=%s" % - (tag_name, self.tag_dir)) + log.info( + "MockModuleBuilder initialized, tag_name=%s, tag_dir=%s" % (tag_name, self.tag_dir)) @property def module_build_tag(self): @@ -175,18 +172,21 @@ class MockModuleBuilder(GenericBuilder): m1_mmd = self.module.mmd() artifacts = Modulemd.SimpleSet() - rpm_files = [f - for f in os.listdir(self.resultsdir) - if f.endswith(".rpm")] + rpm_files = [f for f in os.listdir(self.resultsdir) if f.endswith(".rpm")] if rpm_files: - output = subprocess.check_output(['rpm', - '--queryformat', - '%{NAME} %{EPOCHNUM} %{VERSION} %{RELEASE} %{ARCH}\n', - '-qp'] + rpm_files, - cwd=self.resultsdir, - universal_newlines=True) - nevras = output.strip().split('\n') + output = subprocess.check_output( + [ + "rpm", + "--queryformat", + "%{NAME} %{EPOCHNUM} %{VERSION} %{RELEASE} %{ARCH}\n", + "-qp", + ] + + rpm_files, + cwd=self.resultsdir, + universal_newlines=True, + ) + nevras = output.strip().split("\n") if len(nevras) != len(rpm_files): raise RuntimeError("rpm -qp returned an unexpected number of lines") @@ -198,20 +198,20 @@ class MockModuleBuilder(GenericBuilder): if name in m1_mmd.get_rpm_filter().get(): continue - pkglist_f.write(rpm_file + '\n') - artifacts.add('{}-{}:{}-{}.{}'.format(name, epoch, version, release, arch)) + pkglist_f.write(rpm_file + "\n") + artifacts.add("{}-{}:{}-{}.{}".format(name, epoch, version, release, arch)) pkglist_f.close() m1_mmd.set_rpm_artifacts(artifacts) # Generate repo. - execute_cmd(['/usr/bin/createrepo_c', '--pkglist', pkglist, path]) + execute_cmd(["/usr/bin/createrepo_c", "--pkglist", pkglist, path]) # ...and inject modules.yaml there if asked. if include_module_yaml: mmd_path = os.path.join(path, "modules.yaml") m1_mmd.dump(mmd_path) - execute_cmd(['/usr/bin/modifyrepo_c', '--mdtype=modules', mmd_path, repodata_path]) + execute_cmd(["/usr/bin/modifyrepo_c", "--mdtype=modules", mmd_path, repodata_path]) def _add_repo(self, name, baseurl, extra=""): """ @@ -247,18 +247,18 @@ class MockModuleBuilder(GenericBuilder): with MockModuleBuilder._config_lock: infile = os.path.join(self.configdir, "mock.cfg") - with open(infile, 'r') as f: + with open(infile, "r") as f: # This looks scary, but it is the way how mock itself loads the # config file ... config_opts = {} - code = compile(f.read(), infile, 'exec') + code = compile(f.read(), infile, "exec") # pylint: disable=exec-used exec(code) self.groups = config_opts["chroot_setup_cmd"].split(" ")[1:] - self.yum_conf = config_opts['yum.conf'] - self.enabled_modules = config_opts['module_enable'] - self.releasever = config_opts['releasever'] + self.yum_conf = config_opts["yum.conf"] + self.enabled_modules = config_opts["module_enable"] + self.releasever = config_opts["releasever"] def _write_mock_config(self): """ @@ -267,8 +267,8 @@ class MockModuleBuilder(GenericBuilder): with MockModuleBuilder._config_lock: config = str(MockModuleBuilder.mock_config_template) - config = config.replace("$root", "%s-%s" % (self.tag_name, - str(threading.current_thread().name))) + config = config.replace( + "$root", "%s-%s" % (self.tag_name, str(threading.current_thread().name))) config = config.replace("$arch", self.arch) config = config.replace("$group", " ".join(self.groups)) config = config.replace("$yum_conf", self.yum_conf) @@ -278,13 +278,13 @@ class MockModuleBuilder(GenericBuilder): # We write the most recent config to "mock.cfg", so thread-related # configs can be later (re-)generated from it using _load_mock_config. outfile = os.path.join(self.configdir, "mock.cfg") - with open(outfile, 'w') as f: + with open(outfile, "w") as f: f.write(config) # Write the config to thread-related configuration file. - outfile = os.path.join(self.configdir, "mock-%s.cfg" % - str(threading.current_thread().name)) - with open(outfile, 'w') as f: + outfile = os.path.join( + self.configdir, "mock-%s.cfg" % str(threading.current_thread().name)) + with open(outfile, "w") as f: f.write(config) def buildroot_connect(self, groups): @@ -319,6 +319,7 @@ class MockModuleBuilder(GenericBuilder): self._write_mock_config() from module_build_service.scheduler.consumer import fake_repo_done_message + fake_repo_done_message(self.tag_name) def tag_artifacts(self, artifacts): @@ -361,11 +362,11 @@ class MockModuleBuilder(GenericBuilder): repo = koji_session.getRepo(repo_name) if repo: baseurl = koji.PathInfo(topdir=koji_config.topurl).repo(repo["id"], repo_name) - baseurl = '{0}/{1}/'.format(baseurl, self.arch) + baseurl = "{0}/{1}/".format(baseurl, self.arch) else: repo_dir = os.path.join(self.config.cache_dir, "koji_tags", tag) - create_local_repo_from_koji_tag(self.config, tag, repo_dir, - [self.arch, "noarch"]) + create_local_repo_from_koji_tag( + self.config, tag, repo_dir, [self.arch, "noarch"]) baseurl = "file://" + repo_dir # Check to see if there are any external repos tied to the tag for ext_repo in koji_session.getTagExternalRepos(repo_name): @@ -382,13 +383,13 @@ class MockModuleBuilder(GenericBuilder): # build_id=1 and task_id=1 are OK here, because we are building just # one RPM at the time. msg = module_build_service.messaging.KojiBuildChange( - msg_id='a faked internal message', + msg_id="a faked internal message", build_id=build_id, task_id=build_id, build_name=nvr["name"], build_new_state=state, build_release=nvr["release"], - build_version=nvr["version"] + build_version=nvr["version"], ) module_build_service.scheduler.consumer.work_queue_put(msg) @@ -411,7 +412,7 @@ class MockModuleBuilder(GenericBuilder): os.remove(log_path) # Remove other files containing useless information - elif logf.endswith('-srpm-stdout.log'): + elif logf.endswith("-srpm-stdout.log"): with open(log_path) as f: data = f.read(4096) if re.match("Downloading [^\n]*\n\n\nWrote: [^\n]", data): @@ -421,24 +422,27 @@ class MockModuleBuilder(GenericBuilder): """ Builds the artifact from the SRPM. """ - state = koji.BUILD_STATES['BUILDING'] + state = koji.BUILD_STATES["BUILDING"] # Use the mock config associated with this thread. - mock_config = os.path.join(self.configdir, - "mock-%s.cfg" % str(threading.current_thread().name)) + mock_config = os.path.join( + self.configdir, "mock-%s.cfg" % str(threading.current_thread().name)) # Open the logs to which we will forward mock stdout/stderr. - mock_stdout_log = open(os.path.join(self.resultsdir, - artifact_name + "-mock-stdout.log"), "w") - mock_stderr_log = open(os.path.join(self.resultsdir, - artifact_name + "-mock-stderr.log"), "w") + mock_stdout_log = open( + os.path.join(self.resultsdir, artifact_name + "-mock-stdout.log"), "w") + mock_stderr_log = open( + os.path.join(self.resultsdir, artifact_name + "-mock-stderr.log"), "w") srpm = artifact_name resultsdir = builder.resultsdir try: # Initialize mock. - execute_cmd(["mock", "-v", "-r", mock_config, "--init"], - stdout=mock_stdout_log, stderr=mock_stderr_log) + execute_cmd( + ["mock", "-v", "-r", mock_config, "--init"], + stdout=mock_stdout_log, + stderr=mock_stderr_log, + ) # Start the build and store results to resultsdir builder.build(mock_stdout_log, mock_stderr_log) @@ -448,23 +452,21 @@ class MockModuleBuilder(GenericBuilder): # are put in the scheduler's work queue and are handled # by MBS after the build_srpm() method returns and scope gets # back to scheduler.main.main() method. - state = koji.BUILD_STATES['COMPLETE'] + state = koji.BUILD_STATES["COMPLETE"] self._send_build_change(state, srpm, build_id) - with open(os.path.join(resultsdir, "status.log"), 'w') as f: + with open(os.path.join(resultsdir, "status.log"), "w") as f: f.write("complete\n") except Exception as e: - log.error("Error while building artifact %s: %s" % (artifact_name, - str(e))) + log.error("Error while building artifact %s: %s" % (artifact_name, str(e))) # Emit messages simulating complete build. These messages # are put in the scheduler's work queue and are handled # by MBS after the build_srpm() method returns and scope gets # back to scheduler.main.main() method. - state = koji.BUILD_STATES['FAILED'] - self._send_build_change(state, srpm, - build_id) - with open(os.path.join(resultsdir, "status.log"), 'w') as f: + state = koji.BUILD_STATES["FAILED"] + self._send_build_change(state, srpm, build_id) + with open(os.path.join(resultsdir, "status.log"), "w") as f: f.write("failed\n") mock_stdout_log.close() @@ -493,7 +495,7 @@ class MockModuleBuilder(GenericBuilder): # already in repository ready to be used. This is not a case for Mock # backend in the time we return here. reason = "Building %s in Mock" % (artifact_name) - return build_id, koji.BUILD_STATES['BUILDING'], reason, None + return build_id, koji.BUILD_STATES["BUILDING"], reason, None def build(self, artifact_name, source): log.info("Starting building artifact %s: %s" % (artifact_name, source)) @@ -502,8 +504,8 @@ class MockModuleBuilder(GenericBuilder): # generate the thread-specific mock config by writing it to fs again. self._load_mock_config() self._write_mock_config() - mock_config = os.path.join(self.configdir, "mock-%s.cfg" - % str(threading.current_thread().name)) + mock_config = os.path.join( + self.configdir, "mock-%s.cfg" % str(threading.current_thread().name)) # Get the build-id in thread-safe manner. build_id = None @@ -513,15 +515,14 @@ class MockModuleBuilder(GenericBuilder): # Clear resultsdir associated with this thread or in case it does not # exist, create it. - resultsdir = os.path.join(self.resultsdir, - str(threading.current_thread().name)) + resultsdir = os.path.join(self.resultsdir, str(threading.current_thread().name)) if os.path.exists(resultsdir): for name in os.listdir(resultsdir): os.remove(os.path.join(resultsdir, name)) else: os.makedirs(resultsdir) - if source.endswith('.src.rpm'): + if source.endswith(".src.rpm"): builder = SRPMBuilder(mock_config, resultsdir, source) else: # Otherwise, assume we're building from some scm repo @@ -536,7 +537,7 @@ class MockModuleBuilder(GenericBuilder): def cancel_build(self, task_id): pass - def list_tasks_for_components(self, component_builds=None, state='active'): + def list_tasks_for_components(self, component_builds=None, state="active"): pass def repo_from_tag(cls, config, tag_name, arch): @@ -557,8 +558,7 @@ class MockModuleBuilder(GenericBuilder): """ with models.make_session(conf) as db_session: build = models.ModuleBuild.get_build_from_nsvc( - db_session, mmd.get_name(), mmd.get_stream(), mmd.get_version(), - mmd.get_context()) + db_session, mmd.get_name(), mmd.get_stream(), mmd.get_version(), mmd.get_context()) if build.koji_tag.startswith("repofile://"): # Modules from local repository have already the RPMs filled in mmd. return list(mmd.get_rpm_artifacts().get()) @@ -573,9 +573,7 @@ class BaseBuilder(object): def __init__(self, config, resultsdir): self.config = config self.resultsdir = resultsdir - self.cmd = ["mock", "-v", "-r", config, - "--no-clean", - "--resultdir=%s" % resultsdir] + self.cmd = ["mock", "-v", "-r", config, "--no-clean", "--resultdir=%s" % resultsdir] def build(self, stdout, stderr): execute_cmd(self.cmd, stdout=stdout, stderr=stderr) @@ -602,24 +600,20 @@ class SCMBuilder(BaseBuilder): # See https://bugzilla.redhat.com/show_bug.cgi?id=1459437 for # more info. Once mock-scm supports this feature, we can remove # this code. - distgit_get_branch = \ - "sh -c {}'; git -C {} checkout {}'".format(pipes.quote(distgit_get), - artifact_name, - branch) + distgit_get_branch = "sh -c {}'; git -C {} checkout {}'".format( + pipes.quote(distgit_get), artifact_name, branch) f.writelines([ "config_opts['scm'] = True\n", "config_opts['scm_opts']['method'] = 'distgit'\n", - "config_opts['scm_opts']['package'] = '{}'\n".format( - artifact_name), - "config_opts['scm_opts']['distgit_get'] = {!r}\n".format( - distgit_get_branch), + "config_opts['scm_opts']['package'] = '{}'\n".format(artifact_name), + "config_opts['scm_opts']['distgit_get'] = {!r}\n".format(distgit_get_branch), ]) # Set distgit_src_get only if it's defined. if distgit_cmds[1]: - f.write("config_opts['scm_opts']['distgit_src_get'] = '{}'\n".format( - distgit_cmds[1])) + f.write( + "config_opts['scm_opts']['distgit_src_get'] = '{}'\n".format(distgit_cmds[1])) # The local git repositories cloned by `fedpkg clone` typically do not have # the tarballs with sources committed in a git repo. They normally live in lookaside @@ -633,7 +627,7 @@ class SCMBuilder(BaseBuilder): def _make_executable(self, path): mode = os.stat(path).st_mode - mode |= (mode & 0o444) >> 2 # copy R bits to X + mode |= (mode & 0o444) >> 2 # copy R bits to X os.chmod(path, mode) def _get_distgit_commands(self, source): @@ -658,6 +652,6 @@ class SCMBuilder(BaseBuilder): # let's return 0.0 so the type is consistent return self.koji_session.getAverageBuildDuration(component.package) or 0.0 except Exception: - log.debug('The Koji call to getAverageBuildDuration failed. Is Koji properly ' - 'configured?') + log.debug( + "The Koji call to getAverageBuildDuration failed. Is Koji properly configured?") return 0.0 diff --git a/module_build_service/builder/__init__.py b/module_build_service/builder/__init__.py index 90ac5778..59a6d018 100644 --- a/module_build_service/builder/__init__.py +++ b/module_build_service/builder/__init__.py @@ -2,9 +2,7 @@ import pkg_resources from module_build_service.builder.base import GenericBuilder -__all__ = [ - GenericBuilder -] +__all__ = [GenericBuilder] -for entrypoint in pkg_resources.iter_entry_points('mbs.builder_backends'): +for entrypoint in pkg_resources.iter_entry_points("mbs.builder_backends"): GenericBuilder.register_backend_class(entrypoint.load()) diff --git a/module_build_service/builder/base.py b/module_build_service/builder/base.py index ccb36332..0e3b7950 100644 --- a/module_build_service/builder/base.py +++ b/module_build_service/builder/base.py @@ -91,9 +91,10 @@ class GenericBuilder(six.with_metaclass(ABCMeta)): # We are skipping the caching based on the first two arguments of # default_buildroot_groups, because they are "self" and db.session # instance which are different each call we call that method. - default_buildroot_groups_cache = dogpile.cache.make_region( - function_key_generator=create_dogpile_key_generator_func(2)).configure( - 'dogpile.cache.memory') + default_buildroot_groups_cache = ( + dogpile.cache.make_region(function_key_generator=create_dogpile_key_generator_func(2)) + .configure("dogpile.cache.memory") + ) @classmethod def register_backend_class(cls, backend_class): @@ -113,13 +114,14 @@ class GenericBuilder(six.with_metaclass(ABCMeta)): # check if the backend is within allowed backends for the used resolver resolver = module_build_service.resolver.system_resolver if not resolver.is_builder_compatible(backend): - raise ValueError("Builder backend '{}' is not compatible with " - "resolver backend '{}'. Check your configuration." - .format(backend, resolver.backend)) + raise ValueError( + "Builder backend '{}' is not compatible with resolver backend '{}'. Check your " + "configuration.".format(backend, resolver.backend) + ) if backend in GenericBuilder.backends: - return GenericBuilder.backends[backend](owner=owner, module=module, - config=config, **extra) + return GenericBuilder.backends[backend]( + owner=owner, module=module, config=config, **extra) else: raise ValueError("Builder backend='%s' not recognized" % backend) @@ -137,8 +139,13 @@ class GenericBuilder(six.with_metaclass(ABCMeta)): """ components = [c.package for c in module.component_builds] builder = GenericBuilder.create( - module.owner, module, config.system, config, tag_name=module.koji_tag, - components=components) + module.owner, + module, + config.system, + config, + tag_name=module.koji_tag, + components=components, + ) if buildroot_connect is True: groups = GenericBuilder.default_buildroot_groups(session, module) builder.buildroot_connect(groups) @@ -156,8 +163,7 @@ class GenericBuilder(six.with_metaclass(ABCMeta)): the tag with particular name and architecture. """ if backend in GenericBuilder.backends: - return GenericBuilder.backends[backend].repo_from_tag( - config, tag_name, arch) + return GenericBuilder.backends[backend].repo_from_tag(config, tag_name, arch) else: raise ValueError("Builder backend='%s' not recognized" % backend) @@ -310,23 +316,18 @@ class GenericBuilder(six.with_metaclass(ABCMeta)): # Resolve default buildroot groups using the MBS, but only for # non-local modules. - groups = resolver.resolve_profiles( - mmd, ('buildroot', 'srpm-buildroot')) - - groups = { - 'build': groups['buildroot'], - 'srpm-build': groups['srpm-buildroot'], - } + groups = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot")) + groups = {"build": groups["buildroot"], "srpm-build": groups["srpm-buildroot"]} except ValueError: reason = "Failed to gather buildroot groups from SCM." log.exception(reason) - module.transition(conf, state="failed", state_reason=reason, failure_type='user') + module.transition(conf, state="failed", state_reason=reason, failure_type="user") session.commit() raise return groups @abstractmethod - def list_tasks_for_components(self, component_builds=None, state='active'): + def list_tasks_for_components(self, component_builds=None, state="active"): """ :param component_builds: list of component builds which we want to check :param state: limit the check only for tasks in the given state @@ -416,13 +417,15 @@ class GenericBuilder(six.with_metaclass(ABCMeta)): continue if average_time_to_build < 0: - log.warning("Negative average build duration for component %s: %s", - component, str(average_time_to_build)) + log.warning( + "Negative average build duration for component %s: %s", + component, str(average_time_to_build), + ) weights[component] = weight continue # Increase the task weight by 0.75 for every hour of build duration. - adj = (average_time_to_build / ((60 * 60) / 0.75)) + adj = average_time_to_build / ((60 * 60) / 0.75) # cap the adjustment at +4.5 weight += min(4.5, adj) diff --git a/module_build_service/builder/utils.py b/module_build_service/builder/utils.py index 42a6cfca..89fb3e91 100644 --- a/module_build_service/builder/utils.py +++ b/module_build_service/builder/utils.py @@ -58,10 +58,8 @@ def get_koji_config(mbs_config): # Placed here to avoid py2/py3 conflicts... import koji - koji_config = munch.Munch(koji.read_config( - profile_name=mbs_config.koji_profile, - user_config=mbs_config.koji_config, - )) + koji_config = munch.Munch( + koji.read_config(profile_name=mbs_config.koji_profile, user_config=mbs_config.koji_config)) # Timeout after 10 minutes. The default is 12 hours. koji_config["timeout"] = 60 * 10 return koji_config @@ -93,7 +91,7 @@ def create_local_repo_from_koji_tag(config, tag, repo_dir, archs=None): log.exception("Failed to list rpms in tag %r" % tag) # Reformat builds so they are dict with build_id as a key. - builds = {build['build_id']: build for build in builds} + builds = {build["build_id"]: build for build in builds} # Prepare pathinfo we will use to generate the URL. pathinfo = koji.PathInfo(topdir=session.opts["topurl"]) @@ -104,26 +102,25 @@ def create_local_repo_from_koji_tag(config, tag, repo_dir, archs=None): # Prepare the list of URLs to download download_args = [] for rpm in rpms: - build_info = builds[rpm['build_id']] + build_info = builds[rpm["build_id"]] # We do not download debuginfo packages or packages built for archs # we are not interested in. - if koji.is_debuginfo(rpm['name']) or not rpm['arch'] in archs: + if koji.is_debuginfo(rpm["name"]) or not rpm["arch"] in archs: continue fname = pathinfo.rpm(rpm) relpath = os.path.basename(fname) local_fn = os.path.join(repo_dir, relpath) # Download only when the RPM is not downloaded or the size does not match. - if not os.path.exists(local_fn) or os.path.getsize(local_fn) != rpm['size']: + if not os.path.exists(local_fn) or os.path.getsize(local_fn) != rpm["size"]: if os.path.exists(local_fn): os.remove(local_fn) repo_changed = True - url = pathinfo.build(build_info) + '/' + fname + url = pathinfo.build(build_info) + "/" + fname download_args.append((url, local_fn)) - log.info( - "Downloading %d packages from Koji tag %s to %s" % (len(download_args), tag, repo_dir)) + log.info("Downloading %d packages from Koji tag %s to %s" % (len(download_args), tag, repo_dir)) # Create the output directory try: @@ -162,4 +159,4 @@ def create_local_repo_from_koji_tag(config, tag, repo_dir, archs=None): shutil.rmtree(repodata_path) log.info("Creating local repository in %s" % repo_dir) - execute_cmd(['/usr/bin/createrepo_c', repo_dir]) + execute_cmd(["/usr/bin/createrepo_c", repo_dir]) diff --git a/module_build_service/config.py b/module_build_service/config.py index d3a75987..d216334e 100644 --- a/module_build_service/config.py +++ b/module_build_service/config.py @@ -35,12 +35,12 @@ from module_build_service import logger # TODO: It'd be nice to reuse this from models.ModuleBuild.rebuild_strategies but models.py # currently relies on this file, so we can't import it -SUPPORTED_STRATEGIES = ['changed-and-after', 'only-changed', 'all'] +SUPPORTED_STRATEGIES = ["changed-and-after", "only-changed", "all"] SUPPORTED_RESOLVERS = { - 'mbs': {'builders': ['mock']}, - 'db': {'builders': ['koji', 'mock', 'copr']}, - 'local': {'builders': ['mock']}, + "mbs": {"builders": ["mock"]}, + "db": {"builders": ["koji", "mock", "copr"]}, + "local": {"builders": ["mock"]}, } @@ -48,21 +48,20 @@ def init_config(app): """ Configure MBS and the Flask app """ config_module = None - config_file = '/etc/module-build-service/config.py' - config_section = 'DevConfiguration' + config_file = "/etc/module-build-service/config.py" + config_section = "DevConfiguration" # automagically detect production environment: # - existing and readable config_file presets ProdConfiguration try: with open(config_file): - config_section = 'ProdConfiguration' + config_section = "ProdConfiguration" except Exception: pass # - Flask app within mod_wsgi presets ProdConfiguration - flask_app_env = hasattr(app, 'request') and hasattr(app.request, 'environ') - if flask_app_env and any([var.startswith('mod_wsgi.') - for var in app.request.environ]): - config_section = 'ProdConfiguration' + flask_app_env = hasattr(app, "request") and hasattr(app.request, "environ") + if flask_app_env and any([var.startswith("mod_wsgi.") for var in app.request.environ]): + config_section = "ProdConfiguration" # Load LocalBuildConfiguration section in case we are building modules # locally. @@ -73,50 +72,49 @@ def init_config(app): config_section = "LocalBuildConfiguration" # try getting config_file from os.environ - if 'MBS_CONFIG_FILE' in os.environ: - config_file = os.environ['MBS_CONFIG_FILE'] + if "MBS_CONFIG_FILE" in os.environ: + config_file = os.environ["MBS_CONFIG_FILE"] # try getting config_section from os.environ - if 'MBS_CONFIG_SECTION' in os.environ: - config_section = os.environ['MBS_CONFIG_SECTION'] + if "MBS_CONFIG_SECTION" in os.environ: + config_section = os.environ["MBS_CONFIG_SECTION"] # preferably get these values from Flask app if flask_app_env: # try getting config_file from Flask app - if 'MBS_CONFIG_FILE' in app.request.environ: - config_file = app.request.environ['MBS_CONFIG_FILE'] + if "MBS_CONFIG_FILE" in app.request.environ: + config_file = app.request.environ["MBS_CONFIG_FILE"] # try getting config_section from Flask app - if 'MBS_CONFIG_SECTION' in app.request.environ: - config_section = app.request.environ['MBS_CONFIG_SECTION'] + if "MBS_CONFIG_SECTION" in app.request.environ: + config_section = app.request.environ["MBS_CONFIG_SECTION"] + + true_options = ("1", "on", "true", "y", "yes") # TestConfiguration shall only be used for running tests, otherwise... - if any(['py.test' in arg or 'pytest' in arg for arg in sys.argv]): - config_section = 'TestConfiguration' + if any(["py.test" in arg or "pytest" in arg for arg in sys.argv]): + config_section = "TestConfiguration" from conf import config + config_module = config # ...MODULE_BUILD_SERVICE_DEVELOPER_ENV has always the last word # and overrides anything previously set before! # Again, check Flask app (preferably) or fallback to os.environ. # In any of the following cases, use configuration directly from MBS package # -> /conf/config.py. - elif (flask_app_env and - 'MODULE_BUILD_SERVICE_DEVELOPER_ENV' in app.request.environ): - if app.request.environ['MODULE_BUILD_SERVICE_DEVELOPER_ENV'].lower() in ( - '1', 'on', 'true', 'y', 'yes'): - config_section = 'DevConfiguration' + elif flask_app_env and "MODULE_BUILD_SERVICE_DEVELOPER_ENV" in app.request.environ: + if app.request.environ["MODULE_BUILD_SERVICE_DEVELOPER_ENV"].lower() in true_options: + config_section = "DevConfiguration" from conf import config + config_module = config - elif ('MODULE_BUILD_SERVICE_DEVELOPER_ENV' in os.environ and - os.environ['MODULE_BUILD_SERVICE_DEVELOPER_ENV'].lower() in ( - '1', 'on', 'true', 'y', 'yes')): - config_section = 'DevConfiguration' + elif os.environ.get("MODULE_BUILD_SERVICE_DEVELOPER_ENV", "").lower() in true_options: + config_section = "DevConfiguration" from conf import config + config_module = config # try loading configuration from file if not config_module: try: - config_module = imp.load_source('mbs_runtime_config', - config_file) + config_module = imp.load_source("mbs_runtime_config", config_file) except Exception: - raise SystemError("Configuration file {} was not found." - .format(config_file)) + raise SystemError("Configuration file {} was not found.".format(config_file)) # finally configure MBS and the Flask app config_section_obj = getattr(config_module, config_section) @@ -129,431 +127,482 @@ class Path: """ Config type for paths. Expands the users home directory. """ + pass class Config(object): """Class representing the orchestrator configuration.""" + _defaults = { - 'debug': { - 'type': bool, - 'default': False, - 'desc': 'Debug mode'}, - 'system': { - 'type': str, - 'default': 'koji', - 'desc': 'The buildsystem to use.'}, - 'db': { - 'type': str, - 'default': '', - 'desc': 'RDB URL.'}, - 'default_dist_tag_prefix': { - 'type': str, - 'default': 'module+', - 'desc': 'Default dist-tag prefix for built modules.'}, - 'polling_interval': { - 'type': int, - 'default': 0, - 'desc': 'Polling interval, in seconds.'}, - 'cache_dir': { - 'type': Path, - 'default': '~/modulebuild/cache', - 'desc': 'Cache directory'}, - 'mbs_url': { - 'type': str, - 'default': 'https://mbs.fedoraproject.org/module-build-service/1/module-builds/', - 'desc': 'MBS instance url for MBSResolver'}, - 'check_for_eol': { - 'type': bool, - 'default': False, - 'desc': 'Flag to determine whether or not MBS should block EOL modules from building.'}, - 'pdc_url': { - 'type': str, - 'default': 'https://pdc.fedoraproject.org/rest_api/v1', - 'desc': 'PDC URL, used for checking stream EOL.'}, - 'koji_config': { - 'type': str, - 'default': None, - 'desc': 'Koji config file.'}, - 'koji_profile': { - 'type': str, - 'default': None, - 'desc': 'Koji config profile.'}, - 'arches': { - 'type': list, - 'default': [], - 'desc': 'Koji architectures.'}, - 'allow_arch_override': { - 'type': bool, - 'default': False, - 'desc': 'Allow to support a custom architecture set'}, - 'koji_build_priority': { - 'type': int, - 'default': 10, - 'desc': ''}, - 'koji_repository_url': { - 'type': str, - 'default': None, - 'desc': 'Koji repository URL.'}, - 'koji_build_macros_target': { - 'type': str, - 'default': '', - 'desc': 'Target to build "module-build-macros" RPM in.'}, - 'koji_tag_prefixes': { - 'type': list, - 'default': ['module', 'scrmod'], - 'desc': 'List of allowed koji tag prefixes.'}, - 'koji_tag_extra_opts': { - 'type': dict, - 'default': { - 'mock.package_manager': 'dnf', + "debug": {"type": bool, "default": False, "desc": "Debug mode"}, + "system": {"type": str, "default": "koji", "desc": "The buildsystem to use."}, + "db": {"type": str, "default": "", "desc": "RDB URL."}, + "default_dist_tag_prefix": { + "type": str, + "default": "module+", + "desc": "Default dist-tag prefix for built modules.", + }, + "polling_interval": {"type": int, "default": 0, "desc": "Polling interval, in seconds."}, + "cache_dir": {"type": Path, "default": "~/modulebuild/cache", "desc": "Cache directory"}, + "mbs_url": { + "type": str, + "default": "https://mbs.fedoraproject.org/module-build-service/1/module-builds/", + "desc": "MBS instance url for MBSResolver", + }, + "check_for_eol": { + "type": bool, + "default": False, + "desc": "Flag to determine whether or not MBS should block EOL modules from building.", + }, + "pdc_url": { + "type": str, + "default": "https://pdc.fedoraproject.org/rest_api/v1", + "desc": "PDC URL, used for checking stream EOL.", + }, + "koji_config": {"type": str, "default": None, "desc": "Koji config file."}, + "koji_profile": {"type": str, "default": None, "desc": "Koji config profile."}, + "arches": {"type": list, "default": [], "desc": "Koji architectures."}, + "allow_arch_override": { + "type": bool, + "default": False, + "desc": "Allow to support a custom architecture set", + }, + "koji_build_priority": {"type": int, "default": 10, "desc": ""}, + "koji_repository_url": {"type": str, "default": None, "desc": "Koji repository URL."}, + "koji_build_macros_target": { + "type": str, + "default": "", + "desc": 'Target to build "module-build-macros" RPM in.', + }, + "koji_tag_prefixes": { + "type": list, + "default": ["module", "scrmod"], + "desc": "List of allowed koji tag prefixes.", + }, + "koji_tag_extra_opts": { + "type": dict, + "default": { + "mock.package_manager": "dnf", # This is needed to include all the Koji builds (and therefore # all the packages) from all inherited tags into this tag. # See https://pagure.io/koji/issue/588 and # https://pagure.io/fm-orchestrator/issue/660 for background. - 'repo_include_all': True, + "repo_include_all": True, # Has been requested by Fedora infra in # https://pagure.io/fedora-infrastructure/issue/7620. # Disables systemd-nspawn for chroot. - 'mock.new_chroot': 0, + "mock.new_chroot": 0, }, - 'desc': 'Extra options set for newly created Koji tags.'}, - 'koji_target_delete_time': { - 'type': int, - 'default': 24 * 3600, - 'desc': 'Time in seconds after which the Koji target of ' - 'built module is deleted'}, - 'koji_enable_content_generator': { - 'type': bool, - 'default': True, - 'desc': 'Enable or disable imports to koji using content ' - 'generator api'}, - 'allow_name_override_from_scm': { - 'type': bool, - 'default': False, - 'desc': 'Allow modulemd files to override the module name ' - 'if different from the scm repo name.'}, - 'allow_stream_override_from_scm': { - 'type': bool, - 'default': False, - 'desc': 'Allow modulemd files to override the module stream ' - 'if different from the scm repo branch.'}, - 'allow_custom_scmurls': { - 'type': bool, - 'default': False, - 'desc': 'Allow custom scmurls.'}, - 'rpms_default_repository': { - 'type': str, - 'default': 'https://src.fedoraproject.org/rpms/', - 'desc': 'RPMs default repository URL.'}, - 'rpms_allow_repository': { - 'type': bool, - 'default': False, - 'desc': 'Allow custom RPMs repositories.'}, - 'rpms_default_cache': { - 'type': str, - 'default': 'http://pkgs.fedoraproject.org/repo/pkgs/', - 'desc': 'RPMs default cache URL.'}, - 'rpms_allow_cache': { - 'type': bool, - 'default': False, - 'desc': 'Allow custom RPMs cache.'}, - 'modules_default_repository': { - 'type': str, - 'default': 'https://src.fedoraproject.org/modules/', - 'desc': 'Included modules default repository URL.'}, - 'modules_allow_repository': { - 'type': bool, - 'default': False, - 'desc': 'Allow custom included modules repositories.'}, - 'allowed_groups': { - 'type': set, - 'default': set(['packager']), - 'desc': 'The set of groups allowed to submit builds.'}, - 'allowed_groups_to_import_module': { - 'type': set, - 'default': set(), - 'desc': 'The set of groups allowed to import module builds.'}, - 'log_backend': { - 'type': str, - 'default': None, - 'desc': 'Log backend'}, - 'log_file': { - 'type': str, - 'default': '', - 'desc': 'Path to log file'}, - 'log_level': { - 'type': str, - 'default': 0, - 'desc': 'Log level'}, - 'build_logs_dir': { - 'type': Path, - 'default': "", - 'desc': 'Directory to store module build logs to.'}, - 'build_logs_name_format': { - 'type': str, - 'default': "build-{id}.log", - 'desc': ('Format of a module build log\'s name. Use `Build` attributes as formatting ' - 'kwargs')}, - 'krb_keytab': { - 'type': None, - 'default': None, - 'desc': ''}, - 'krb_principal': { - 'type': None, - 'default': None, - 'desc': ''}, - 'messaging': { - 'type': str, - 'default': 'fedmsg', - 'desc': 'The messaging system to use.'}, - 'messaging_topic_prefix': { - 'type': list, - 'default': ['org.fedoraproject.prod'], - 'desc': 'The messaging system topic prefixes which we are interested in.'}, - 'amq_recv_addresses': { - 'type': list, - 'default': [], - 'desc': 'Apache MQ broker url to receive messages.'}, - 'amq_dest_address': { - 'type': str, - 'default': '', - 'desc': 'Apache MQ broker address to send messages'}, - 'amq_cert_file': { - 'type': str, - 'default': '', - 'desc': 'Certificate for Apache MQ broker auth.'}, - 'amq_private_key_file': { - 'type': str, - 'default': '', - 'desc': 'Private key for Apache MQ broker auth.'}, - 'amq_trusted_cert_file': { - 'type': str, - 'default': '', - 'desc': 'Trusted certificate for ssl connection.'}, - 'distgits': { - 'type': dict, - 'default': { - 'https://src.fedoraproject.org': ('fedpkg clone --anonymous {}', - 'fedpkg --release module sources'), - 'file://': ('git clone {repo_path}', None), + "desc": "Extra options set for newly created Koji tags.", + }, + "koji_target_delete_time": { + "type": int, + "default": 24 * 3600, + "desc": "Time in seconds after which the Koji target of built module is deleted", + }, + "koji_enable_content_generator": { + "type": bool, + "default": True, + "desc": "Enable or disable imports to koji using content generator api", + }, + "allow_name_override_from_scm": { + "type": bool, + "default": False, + "desc": "Allow modulemd files to override the module name " + "if different from the scm repo name.", + }, + "allow_stream_override_from_scm": { + "type": bool, + "default": False, + "desc": "Allow modulemd files to override the module stream " + "if different from the scm repo branch.", + }, + "allow_custom_scmurls": {"type": bool, "default": False, "desc": "Allow custom scmurls."}, + "rpms_default_repository": { + "type": str, + "default": "https://src.fedoraproject.org/rpms/", + "desc": "RPMs default repository URL.", + }, + "rpms_allow_repository": { + "type": bool, + "default": False, + "desc": "Allow custom RPMs repositories.", + }, + "rpms_default_cache": { + "type": str, + "default": "http://pkgs.fedoraproject.org/repo/pkgs/", + "desc": "RPMs default cache URL.", + }, + "rpms_allow_cache": {"type": bool, "default": False, "desc": "Allow custom RPMs cache."}, + "modules_default_repository": { + "type": str, + "default": "https://src.fedoraproject.org/modules/", + "desc": "Included modules default repository URL.", + }, + "modules_allow_repository": { + "type": bool, + "default": False, + "desc": "Allow custom included modules repositories.", + }, + "allowed_groups": { + "type": set, + "default": set(["packager"]), + "desc": "The set of groups allowed to submit builds.", + }, + "allowed_groups_to_import_module": { + "type": set, + "default": set(), + "desc": "The set of groups allowed to import module builds.", + }, + "log_backend": {"type": str, "default": None, "desc": "Log backend"}, + "log_file": {"type": str, "default": "", "desc": "Path to log file"}, + "log_level": {"type": str, "default": 0, "desc": "Log level"}, + "build_logs_dir": { + "type": Path, + "default": "", + "desc": "Directory to store module build logs to.", + }, + "build_logs_name_format": { + "type": str, + "default": "build-{id}.log", + "desc": ( + "Format of a module build log's name. Use `Build` attributes as formatting " + "kwargs" + ), + }, + "krb_keytab": {"type": None, "default": None, "desc": ""}, + "krb_principal": {"type": None, "default": None, "desc": ""}, + "messaging": {"type": str, "default": "fedmsg", "desc": "The messaging system to use."}, + "messaging_topic_prefix": { + "type": list, + "default": ["org.fedoraproject.prod"], + "desc": "The messaging system topic prefixes which we are interested in.", + }, + "amq_recv_addresses": { + "type": list, + "default": [], + "desc": "Apache MQ broker url to receive messages.", + }, + "amq_dest_address": { + "type": str, + "default": "", + "desc": "Apache MQ broker address to send messages", + }, + "amq_cert_file": { + "type": str, + "default": "", + "desc": "Certificate for Apache MQ broker auth.", + }, + "amq_private_key_file": { + "type": str, + "default": "", + "desc": "Private key for Apache MQ broker auth.", + }, + "amq_trusted_cert_file": { + "type": str, + "default": "", + "desc": "Trusted certificate for ssl connection.", + }, + "distgits": { + "type": dict, + "default": { + "https://src.fedoraproject.org": ( + "fedpkg clone --anonymous {}", + "fedpkg --release module sources", + ), + "file://": ("git clone {repo_path}", None), }, - 'desc': 'Mapping between dist-git and command to '}, - 'mock_config': { - 'type': str, - 'default': 'fedora-25-x86_64', - 'desc': ''}, - 'mock_config_file': { - 'type': list, - 'default': ['/etc/module-build-service/mock.cfg', 'conf/mock.cfg'], - 'desc': 'List of mock config file paths in order of preference.'}, - 'mock_build_srpm_cmd': { - 'type': str, - 'default': 'fedpkg --release f26 srpm', - 'desc': ''}, - 'mock_resultsdir': { - 'type': Path, - 'default': '~/modulebuild/builds', - 'desc': 'Directory for Mock build results.'}, - 'mock_purge_useless_logs': { - 'type': bool, - 'default': True, - 'desc': 'Remove empty or otherwise useless log files.'}, - 'arch_autodetect': { - 'type': bool, - 'default': True, - 'desc': 'Auto-detect machine arch when configuring builder.'}, - 'arch_fallback': { - 'type': str, - 'default': 'x86_64', - 'desc': 'Fallback arch if auto-detection is off or unable to determine it.'}, - 'scmurls': { - 'type': list, - 'default': [], - 'desc': 'Allowed SCM URLs for submitted module.'}, - 'yaml_submit_allowed': { - 'type': bool, - 'default': False, - 'desc': 'Is it allowed to directly submit build by modulemd yaml file?'}, - 'num_concurrent_builds': { - 'type': int, - 'default': 0, - 'desc': 'Number of concurrent component builds.'}, - 'net_timeout': { - 'type': int, - 'default': 120, - 'desc': 'Global network timeout for read/write operations, in seconds.'}, - 'net_retry_interval': { - 'type': int, - 'default': 30, - 'desc': 'Global network retry interval for read/write operations, in seconds.'}, - 'scm_net_timeout': { - 'type': int, - 'default': 60, - 'desc': 'Network timeout for SCM operations, in seconds.'}, - 'scm_net_retry_interval': { - 'type': int, - 'default': 15, - 'desc': 'Network retry interval for SCM operations, in seconds.'}, - 'no_auth': { - 'type': bool, - 'default': False, - 'desc': 'Disable client authentication.'}, - 'admin_groups': { - 'type': set, - 'default': set([]), - 'desc': 'The set of groups allowed to manage MBS.'}, - 'yum_config_file': { - 'type': list, - 'default': ['/etc/module-build-service/yum.conf', 'conf/yum.conf'], - 'desc': 'List of yum config file paths in order of preference.'}, - 'auth_method': { - 'type': str, - 'default': 'oidc', - 'desc': 'Authentiation method to MBS. Options are oidc or kerberos'}, - 'kerberos_http_host': { - 'type': str, - 'default': '', - 'desc': ('Hardcodes the HTTP host MBS identifies as in Kerberos. If this isn\'t set, ' - 'it will be derived dynamically.')}, - 'kerberos_keytab': { - 'type': str, - 'default': '', - 'desc': ('Overrides the use of the environment variable KRB5_KTNAME, which specifies ' - 'the location to the Kerberos keytab for authentication.')}, - 'ldap_uri': { - 'type': str, - 'default': '', - 'desc': 'LDAP URI to query for group information when using Kerberos authentication'}, - 'ldap_groups_dn': { - 'type': str, - 'default': '', - 'desc': ('The distinguished name of the container or organizational unit containing ' - 'the groups in LDAP')}, - 'base_module_names': { - 'type': list, - 'default': ['platform'], - 'desc': ("List of base module names which define the product version " - "(by their stream) of modules depending on them.")}, - 'base_module_arches': { - 'type': dict, - 'default': {}, - 'desc': 'Per base-module name:stream Koji arches list.'}, - 'koji_cg_tag_build': { - 'type': bool, - 'default': True, - 'desc': 'Indicate whether tagging build is enabled during importing ' - 'module to Koji.'}, - 'koji_cg_devel_module': { - 'type': bool, - 'default': True, - 'desc': 'Indicate whether a devel module should be imported into Koji.'}, - 'koji_cg_build_tag_template': { - 'type': str, - 'default': "{}-modular-updates-candidate", - 'desc': "Name of a Koji tag where the top-level Content Generator " - "build is tagged to. The '{}' string is replaced by a " - "stream name of a base module on top of which the " - "module is built."}, - 'koji_cg_default_build_tag': { - 'type': str, - 'default': "modular-updates-candidate", - 'desc': "The name of Koji tag which should be used as fallback " - "when koji_cg_build_tag_template tag is not found in " - "Koji."}, - 'rebuild_strategy': { - 'type': str, - 'default': 'changed-and-after', - 'desc': 'The module rebuild strategy to use by default.'}, - 'rebuild_strategy_allow_override': { - 'type': bool, - 'default': False, - 'desc': ('Allows a user to specify the rebuild strategy they want to use when ' - 'submitting a module build.')}, - 'rebuild_strategies_allowed': { - 'type': list, - 'default': SUPPORTED_STRATEGIES, - 'desc': ('The allowed module rebuild strategies. This is only used when ' - 'REBUILD_STRATEGY_ALLOW_OVERRIDE is True.')}, - 'cleanup_failed_builds_time': { - 'type': int, - 'default': 180, - 'desc': ('Time in days when to cleanup failed module builds and transition them to ' - 'the "garbage" state.')}, - 'cleanup_stuck_builds_time': { - 'type': int, - 'default': 7, - 'desc': ('Time in days when to cleanup stuck module builds and transition them to ' - 'the "failed" state. The module has to be in a state defined by the ' - '"cleanup_stuck_builds_states" option.')}, - 'cleanup_stuck_builds_states': { - 'type': list, - 'default': ["init", "build"], - 'desc': ('States of builds which will be considered to move to failed state when a' - ' build is in one of those states longer than the value configured in the ' - '"cleanup_stuck_builds_time"')}, - 'resolver': { - 'type': str, - 'default': 'db', - 'desc': 'Where to look up for modules. Note that this can (and ' - 'probably will) be builder-specific.'}, - 'koji_external_repo_url_prefix': { - 'type': str, - 'default': 'https://kojipkgs.fedoraproject.org/', - 'desc': 'URL prefix of base module\'s external repo.'}, - 'allowed_users': { - 'type': set, - 'default': set(), - 'desc': 'The users/service accounts that don\'t require to be part of a group'}, - 'br_stream_override_module': { - 'type': str, - 'default': 'platform', - 'desc': ('The module name to override in the buildrequires based on the branch name. ' - '"br_stream_override_regexes" must also be set for this to take ' - 'effect.') + "desc": "Mapping between dist-git and command to ", }, - 'br_stream_override_regexes': { - 'type': list, - 'default': [], - 'desc': ('The list of regexes used to parse the stream override from the branch name. ' - '"br_stream_override_module" must also be set for this to take ' - 'effect. The regexes can contain multiple capture groups that will be ' - 'concatenated. Any null capture groups will be ignored. The first regex that ' - 'matches the branch will be used.') + "mock_config": {"type": str, "default": "fedora-25-x86_64", "desc": ""}, + "mock_config_file": { + "type": list, + "default": ["/etc/module-build-service/mock.cfg", "conf/mock.cfg"], + "desc": "List of mock config file paths in order of preference.", }, - 'default_buildroot_packages': { - 'type': list, - 'default': ["bash", "bzip2", "coreutils", "cpio", "diffutils", "findutils", "gawk", - "gcc", "gcc-c++", "grep", "gzip", "info", "make", - "patch", "fedora-release", "redhat-rpm-config", "rpm-build", "sed", - "shadow-utils", "tar", "unzip", "util-linux", "which", "xz"], - 'desc': ('The list packages for offline module build RPM buildroot.') + "mock_build_srpm_cmd": {"type": str, "default": "fedpkg --release f26 srpm", "desc": ""}, + "mock_resultsdir": { + "type": Path, + "default": "~/modulebuild/builds", + "desc": "Directory for Mock build results.", }, - 'default_srpm_buildroot_packages': { - 'type': list, - 'default': ["bash", "gnupg2", "fedora-release", - "redhat-rpm-config", "fedpkg-minimal", "rpm-build", "shadow-utils"], - 'desc': ('The list packages for offline module build RPM buildroot.') + "mock_purge_useless_logs": { + "type": bool, + "default": True, + "desc": "Remove empty or otherwise useless log files.", }, - 'greenwave_decision_context': { - 'type': str, - 'default': '', - 'desc': 'The Greenwave decision context that determines a module\'s gating status.', + "arch_autodetect": { + "type": bool, + "default": True, + "desc": "Auto-detect machine arch when configuring builder.", }, - 'allowed_disttag_marking_module_names': { - 'type': list, - 'default': [], - 'desc': ('List of modules that are allowed to influence the RPM disttag when ' - 'buildrequired. These modules can set xmd.mbs.disttag_marking to do so. MBS ' - 'will use this list order to determine which modules take precedence.') + "arch_fallback": { + "type": str, + "default": "x86_64", + "desc": "Fallback arch if auto-detection is off or unable to determine it.", + }, + "scmurls": {"type": list, "default": [], "desc": "Allowed SCM URLs for submitted module."}, + "yaml_submit_allowed": { + "type": bool, + "default": False, + "desc": "Is it allowed to directly submit build by modulemd yaml file?", + }, + "num_concurrent_builds": { + "type": int, + "default": 0, + "desc": "Number of concurrent component builds.", + }, + "net_timeout": { + "type": int, + "default": 120, + "desc": "Global network timeout for read/write operations, in seconds.", + }, + "net_retry_interval": { + "type": int, + "default": 30, + "desc": "Global network retry interval for read/write operations, in seconds.", + }, + "scm_net_timeout": { + "type": int, + "default": 60, + "desc": "Network timeout for SCM operations, in seconds.", + }, + "scm_net_retry_interval": { + "type": int, + "default": 15, + "desc": "Network retry interval for SCM operations, in seconds.", + }, + "no_auth": {"type": bool, "default": False, "desc": "Disable client authentication."}, + "admin_groups": { + "type": set, + "default": set([]), + "desc": "The set of groups allowed to manage MBS.", + }, + "yum_config_file": { + "type": list, + "default": ["/etc/module-build-service/yum.conf", "conf/yum.conf"], + "desc": "List of yum config file paths in order of preference.", + }, + "auth_method": { + "type": str, + "default": "oidc", + "desc": "Authentiation method to MBS. Options are oidc or kerberos", + }, + "kerberos_http_host": { + "type": str, + "default": "", + "desc": ( + "Hardcodes the HTTP host MBS identifies as in Kerberos. If this isn't set, " + "it will be derived dynamically." + ), + }, + "kerberos_keytab": { + "type": str, + "default": "", + "desc": ( + "Overrides the use of the environment variable KRB5_KTNAME, which specifies " + "the location to the Kerberos keytab for authentication." + ), + }, + "ldap_uri": { + "type": str, + "default": "", + "desc": "LDAP URI to query for group information when using Kerberos authentication", + }, + "ldap_groups_dn": { + "type": str, + "default": "", + "desc": ( + "The distinguished name of the container or organizational unit containing " + "the groups in LDAP" + ), + }, + "base_module_names": { + "type": list, + "default": ["platform"], + "desc": ( + "List of base module names which define the product version " + "(by their stream) of modules depending on them." + ), + }, + "base_module_arches": { + "type": dict, + "default": {}, + "desc": "Per base-module name:stream Koji arches list.", + }, + "koji_cg_tag_build": { + "type": bool, + "default": True, + "desc": "Indicate whether tagging build is enabled during importing module to Koji.", + }, + "koji_cg_devel_module": { + "type": bool, + "default": True, + "desc": "Indicate whether a devel module should be imported into Koji.", + }, + "koji_cg_build_tag_template": { + "type": str, + "default": "{}-modular-updates-candidate", + "desc": "Name of a Koji tag where the top-level Content Generator " + "build is tagged to. The '{}' string is replaced by a " + "stream name of a base module on top of which the " + "module is built.", + }, + "koji_cg_default_build_tag": { + "type": str, + "default": "modular-updates-candidate", + "desc": "The name of Koji tag which should be used as fallback " + "when koji_cg_build_tag_template tag is not found in " + "Koji.", + }, + "rebuild_strategy": { + "type": str, + "default": "changed-and-after", + "desc": "The module rebuild strategy to use by default.", + }, + "rebuild_strategy_allow_override": { + "type": bool, + "default": False, + "desc": ( + "Allows a user to specify the rebuild strategy they want to use when " + "submitting a module build." + ), + }, + "rebuild_strategies_allowed": { + "type": list, + "default": SUPPORTED_STRATEGIES, + "desc": ( + "The allowed module rebuild strategies. This is only used when " + "REBUILD_STRATEGY_ALLOW_OVERRIDE is True." + ), + }, + "cleanup_failed_builds_time": { + "type": int, + "default": 180, + "desc": ( + "Time in days when to cleanup failed module builds and transition them to " + 'the "garbage" state.' + ), + }, + "cleanup_stuck_builds_time": { + "type": int, + "default": 7, + "desc": ( + "Time in days when to cleanup stuck module builds and transition them to " + 'the "failed" state. The module has to be in a state defined by the ' + '"cleanup_stuck_builds_states" option.' + ), + }, + "cleanup_stuck_builds_states": { + "type": list, + "default": ["init", "build"], + "desc": ( + "States of builds which will be considered to move to failed state when a" + " build is in one of those states longer than the value configured in the " + '"cleanup_stuck_builds_time"' + ), + }, + "resolver": { + "type": str, + "default": "db", + "desc": "Where to look up for modules. Note that this can (and " + "probably will) be builder-specific.", + }, + "koji_external_repo_url_prefix": { + "type": str, + "default": "https://kojipkgs.fedoraproject.org/", + "desc": "URL prefix of base module's external repo.", + }, + "allowed_users": { + "type": set, + "default": set(), + "desc": "The users/service accounts that don't require to be part of a group", + }, + "br_stream_override_module": { + "type": str, + "default": "platform", + "desc": ( + "The module name to override in the buildrequires based on the branch name. " + '"br_stream_override_regexes" must also be set for this to take ' + "effect." + ), + }, + "br_stream_override_regexes": { + "type": list, + "default": [], + "desc": ( + "The list of regexes used to parse the stream override from the branch name. " + '"br_stream_override_module" must also be set for this to take ' + "effect. The regexes can contain multiple capture groups that will be " + "concatenated. Any null capture groups will be ignored. The first regex that " + "matches the branch will be used." + ), + }, + "default_buildroot_packages": { + "type": list, + "default": [ + "bash", + "bzip2", + "coreutils", + "cpio", + "diffutils", + "findutils", + "gawk", + "gcc", + "gcc-c++", + "grep", + "gzip", + "info", + "make", + "patch", + "fedora-release", + "redhat-rpm-config", + "rpm-build", + "sed", + "shadow-utils", + "tar", + "unzip", + "util-linux", + "which", + "xz", + ], + "desc": ("The list packages for offline module build RPM buildroot."), + }, + "default_srpm_buildroot_packages": { + "type": list, + "default": [ + "bash", + "gnupg2", + "fedora-release", + "redhat-rpm-config", + "fedpkg-minimal", + "rpm-build", + "shadow-utils", + ], + "desc": ("The list packages for offline module build RPM buildroot."), + }, + "greenwave_decision_context": { + "type": str, + "default": "", + "desc": "The Greenwave decision context that determines a module's gating status.", + }, + "allowed_disttag_marking_module_names": { + "type": list, + "default": [], + "desc": ( + "List of modules that are allowed to influence the RPM disttag when " + "buildrequired. These modules can set xmd.mbs.disttag_marking to do so. MBS " + "will use this list order to determine which modules take precedence." + ), + }, + "stream_suffixes": { + "type": dict, + "default": {}, + "desc": "A mapping of platform stream regular expressions and the " + "corresponding suffix added to formatted stream version. " + 'For example, {r"regexp": 0.1, ...}', }, - 'stream_suffixes': { - 'type': dict, - 'default': {}, - 'desc': 'A mapping of platform stream regular expressions and the ' - 'corresponding suffix added to formatted stream version. ' - 'For example, {r"regexp": 0.1, ...}' - } } def __init__(self, conf_section_obj): @@ -564,12 +613,12 @@ class Config(object): # set defaults for name, values in self._defaults.items(): - self.set_item(name, values['default'], values['type']) + self.set_item(name, values["default"], values["type"]) # override defaults for key in dir(conf_section_obj): # skip keys starting with underscore - if key.startswith('_'): + if key.startswith("_"): continue # set item (lower key) self.set_item(key.lower(), getattr(conf_section_obj, key)) @@ -579,7 +628,7 @@ class Config(object): Set value for configuration item. Creates the self._key = value attribute and self.key property to set/get/del the attribute. """ - if key == 'set_item' or key.startswith('_'): + if key == "set_item" or key.startswith("_"): raise Exception("Configuration item's name is not allowed: %s" % key) # Create the empty self._key attribute, so we can assign to it. @@ -588,13 +637,12 @@ class Config(object): # Create self.key property to access the self._key attribute. # Use the setifok_func if available for the attribute. - setifok_func = '_setifok_{}'.format(key) + setifok_func = "_setifok_{}".format(key) if hasattr(self, setifok_func): setx = lambda self, val: getattr(self, setifok_func)(val) elif value_type == Path: # For paths, expanduser. - setx = lambda self, val: setattr( - self, "_" + key, os.path.expanduser(val)) + setx = lambda self, val: setattr(self, "_" + key, os.path.expanduser(val)) else: setx = lambda self, val: setattr(self, "_" + key, val) getx = lambda self: getattr(self, "_" + key) @@ -604,7 +652,7 @@ class Config(object): # managed/registered configuration items if key in self._defaults: # type conversion for configuration item - convert = self._defaults[key]['type'] + convert = self._defaults[key]["type"] if convert in [bool, int, list, str, set, dict]: try: # Do no try to convert None... @@ -614,8 +662,8 @@ class Config(object): raise TypeError("Configuration value conversion failed for name: %s" % key) # unknown type/unsupported conversion, or conversion not needed elif convert is not None and convert not in [Path]: - raise TypeError("Unsupported type %s for configuration item name: %s" - % (convert, key)) + raise TypeError( + "Unsupported type %s for configuration item name: %s" % (convert, key)) # Set the attribute to the correct value setattr(self, key, value) @@ -639,12 +687,12 @@ class Config(object): def _setifok_rpms_default_repository(self, s): rpm_repo = str(s) - rpm_repo = rpm_repo.rstrip('/') + '/' + rpm_repo = rpm_repo.rstrip("/") + "/" self._rpms_default_repository = rpm_repo def _setifok_rpms_default_cache(self, s): rpm_cache = str(s) - rpm_cache = rpm_cache.rstrip('/') + '/' + rpm_cache = rpm_cache.rstrip("/") + "/" self._rpms_default_cache = rpm_cache def _setifok_log_backend(self, s): @@ -669,13 +717,14 @@ class Config(object): of the installed plugins. The MBS core provides two such plugins, but a third-party could install another usable one. """ - entrypoints = pkg_resources.iter_entry_points('mbs.messaging_backends') + entrypoints = pkg_resources.iter_entry_points("mbs.messaging_backends") installed_backends = [e.name for e in entrypoints] s = str(s) if s not in installed_backends: - raise ValueError('The messaging plugin for "{0}" is not installed.' - ' The following are installed: {1}' - .format(s, ', '.join(installed_backends))) + raise ValueError( + 'The messaging plugin for "{0}" is not installed.' + " The following are installed: {1}".format(s, ", ".join(installed_backends)) + ) self._messaging = s def _setifok_amq_recv_addresses(self, l): @@ -689,16 +738,16 @@ class Config(object): def _setifok_num_concurrent_builds(self, i): if not isinstance(i, int): - raise TypeError('NUM_CONCURRENT_BUILDS needs to be an int') + raise TypeError("NUM_CONCURRENT_BUILDS needs to be an int") if i < 0: - raise ValueError('NUM_CONCURRENT_BUILDS must be >= 0') + raise ValueError("NUM_CONCURRENT_BUILDS must be >= 0") self._num_concurrent_builds = i def _setifok_auth_method(self, s): s = str(s) - if s.lower() not in ('oidc', 'kerberos'): - raise ValueError('Unsupported authentication method') - if s.lower() == 'kerberos': + if s.lower() not in ("oidc", "kerberos"): + raise ValueError("Unsupported authentication method") + if s.lower() == "kerberos": try: import ldap3 # noqa except ImportError: @@ -710,22 +759,25 @@ class Config(object): if keytab: keytab = os.path.expanduser(keytab) if not os.path.exists(keytab): - raise ValueError('The path set for KERBEROS_KEYTAB does not exist') + raise ValueError("The path set for KERBEROS_KEYTAB does not exist") self._kerberos_keytab = keytab def _setifok_ldap_uri(self, s): ldap_uri = str(s) - if ldap_uri and not re.match(r'^(?:ldap(?:s)?:\/\/.+)$', ldap_uri): + if ldap_uri and not re.match(r"^(?:ldap(?:s)?:\/\/.+)$", ldap_uri): raise ValueError('LDAP_URI is invalid. It must start with "ldap://" or "ldaps://"') self._ldap_uri = ldap_uri def _setifok_rebuild_strategy(self, strategy): if strategy not in SUPPORTED_STRATEGIES: - raise ValueError('The strategy "{0}" is not supported. Choose from: {1}' - .format(strategy, ', '.join(SUPPORTED_STRATEGIES))) + raise ValueError( + 'The strategy "{0}" is not supported. Choose from: {1}'.format( + strategy, ", ".join(SUPPORTED_STRATEGIES) + ) + ) self._rebuild_strategy = strategy def _setifok_base_module_arches(self, data): @@ -740,24 +792,28 @@ class Config(object): def _setifok_rebuild_strategies_allowed(self, strategies): if not isinstance(strategies, list): - raise ValueError('REBUILD_STRATEGIES_ALLOWED must be a list') + raise ValueError("REBUILD_STRATEGIES_ALLOWED must be a list") elif not strategies: - raise ValueError('REBUILD_STRATEGIES_ALLOWED must contain at least one rebuild ' - 'strategy') + raise ValueError( + "REBUILD_STRATEGIES_ALLOWED must contain at least one rebuild strategy") for strategy in strategies: if strategy not in SUPPORTED_STRATEGIES: - raise ValueError('REBUILD_STRATEGIES_ALLOWED must be one of: {0}' - .format(', '.join(SUPPORTED_STRATEGIES))) + raise ValueError( + "REBUILD_STRATEGIES_ALLOWED must be one of: {0}".format( + ", ".join(SUPPORTED_STRATEGIES)) + ) self._rebuild_strategies_allowed = strategies def _setifok_cleanup_failed_builds_time(self, num_days): if num_days < 1: - raise ValueError('CLEANUP_FAILED_BUILDS_TIME must be set to 1 or more days') + raise ValueError("CLEANUP_FAILED_BUILDS_TIME must be set to 1 or more days") self._cleanup_failed_builds_time = num_days def _setifok_resolver(self, s): if s not in SUPPORTED_RESOLVERS.keys(): - raise ValueError('The resolver "{0}" is not supported. Choose from: {1}' - .format(s, ', '.join(SUPPORTED_RESOLVERS.keys()))) + raise ValueError( + 'The resolver "{0}" is not supported. Choose from: {1}'.format( + s, ", ".join(SUPPORTED_RESOLVERS.keys())) + ) self._resolver = s diff --git a/module_build_service/errors.py b/module_build_service/errors.py index d2fc4aa9..0c3bfaa5 100644 --- a/module_build_service/errors.py +++ b/module_build_service/errors.py @@ -56,9 +56,6 @@ class StreamAmbigous(ValueError): def json_error(status, error, message): - response = jsonify( - {'status': status, - 'error': error, - 'message': message}) + response = jsonify({"status": status, "error": error, "message": message}) response.status_code = status return response diff --git a/module_build_service/glib.py b/module_build_service/glib.py index b98876b5..b6371af6 100644 --- a/module_build_service/glib.py +++ b/module_build_service/glib.py @@ -39,9 +39,9 @@ def variant_str(s): """ Converts a string to a GLib.Variant """ if not isinstance(s, str): - raise TypeError('Only strings are supported for scalars') + raise TypeError("Only strings are supported for scalars") - return GLib.Variant('s', s) + return GLib.Variant("s", s) def variant_list(l): @@ -50,11 +50,11 @@ def variant_list(l): l_variant = list() for item in l: if item is None: - item = '' + item = "" if type(item) == str: l_variant.append(variant_str(item)) elif type(item) == text_type: - l_variant.append(variant_str(item.encode('utf-8'))) + l_variant.append(variant_str(item.encode("utf-8"))) elif type(item) == list: l_variant.append(variant_list(item)) elif type(item) == dict: @@ -62,33 +62,33 @@ def variant_list(l): elif type(item) == bool: l_variant.append(variant_bool(item)) else: - raise TypeError('Cannot convert unknown type') - return GLib.Variant('av', l_variant) + raise TypeError("Cannot convert unknown type") + return GLib.Variant("av", l_variant) def variant_bool(b): """ Converts a boolean to a GLib.Varant """ if not isinstance(b, bool): - raise TypeError('Only booleans are supported') + raise TypeError("Only booleans are supported") - return GLib.Variant('b', b) + return GLib.Variant("b", b) def dict_values(d): """ Converts each dictionary value to a GLib.Variant """ if not isinstance(d, dict): - raise TypeError('Only dictionaries are supported for mappings') + raise TypeError("Only dictionaries are supported for mappings") d_variant = dict() for k, v in d.items(): if v is None: - v = '' + v = "" if type(v) == str: d_variant[k] = variant_str(v) elif type(v) == text_type: - d_variant[k] = variant_str(v.encode('utf-8')) + d_variant[k] = variant_str(v.encode("utf-8")) elif type(v) == list: d_variant[k] = variant_list(v) elif type(v) == dict: @@ -96,7 +96,7 @@ def dict_values(d): elif type(v) == bool: d_variant[k] = variant_bool(v) else: - raise TypeError('Cannot convert unknown type') + raise TypeError("Cannot convert unknown type") return d_variant @@ -104,7 +104,7 @@ def variant_dict(d): """ Converts a dictionary to a dictionary of GLib.Variant """ if not isinstance(d, dict): - raise TypeError('Only dictionaries are supported for mappings') + raise TypeError("Only dictionaries are supported for mappings") d_variant = dict_values(d) - return GLib.Variant('a{sv}', d_variant) + return GLib.Variant("a{sv}", d_variant) diff --git a/module_build_service/logger.py b/module_build_service/logger.py index c8e86f80..5097e086 100644 --- a/module_build_service/logger.py +++ b/module_build_service/logger.py @@ -58,7 +58,7 @@ level_flags["verbose"] = levels["info"] level_flags["quiet"] = levels["error"] -log_format = '%(asctime)s - %(threadName)s - %(name)s - %(levelname)s - %(message)s' +log_format = "%(asctime)s - %(threadName)s - %(name)s - %(levelname)s - %(message)s" class ModuleBuildFileHandler(logging.FileHandler): @@ -66,7 +66,8 @@ class ModuleBuildFileHandler(logging.FileHandler): FileHandler subclass which handles only messages generated during particular module build with `build_id` set in its constructor. """ - def __init__(self, build_id, filename, mode='a', encoding=None, delay=0): + + def __init__(self, build_id, filename, mode="a", encoding=None, delay=0): logging.FileHandler.__init__(self, filename, mode, encoding, delay) self.build_id = build_id @@ -88,6 +89,7 @@ class ModuleBuildLogs(object): """ Manages ModuleBuildFileHandler logging handlers. """ + def __init__(self, build_logs_dir, build_logs_name_format, level=logging.INFO): """ Creates new ModuleBuildLogs instance. Module build logs are stored @@ -152,7 +154,7 @@ class ModuleBuildLogs(object): class MBSLogger: def __init__(self): - self._logger = logging.getLogger('MBS') + self._logger = logging.getLogger("MBS") self._level = logging.NOTSET self._current_path = os.path.dirname(os.path.realpath(__file__)) @@ -173,33 +175,33 @@ class MBSLogger: self.level = level def debug(self, *args, **kwargs): - return self._log_call('debug', args, kwargs) + return self._log_call("debug", args, kwargs) def info(self, *args, **kwargs): - return self._log_call('info', args, kwargs) + return self._log_call("info", args, kwargs) def warning(self, *args, **kwargs): - return self._log_call('warning', args, kwargs) + return self._log_call("warning", args, kwargs) def error(self, *args, **kwargs): - return self._log_call('error', args, kwargs) + return self._log_call("error", args, kwargs) def critical(self, *args, **kwargs): - return self._log_call('critical', args, kwargs) + return self._log_call("critical", args, kwargs) def exception(self, *args, **kwargs): - return self._log_call('exception', args, kwargs) + return self._log_call("exception", args, kwargs) def log(self, *args, **kwargs): - return self._log_call('log', args, kwargs) + return self._log_call("log", args, kwargs) def _log_call(self, level_name, args, kwargs): caller_filename = inspect.stack()[2][1] caller_filename = os.path.normpath(caller_filename) if not caller_filename.startswith(self._current_path): - log_name = 'MBS' + log_name = "MBS" else: - log_name = 'MBS' + caller_filename[len(self._current_path):-3].replace('/', '.') + log_name = "MBS" + caller_filename[len(self._current_path):-3].replace("/", ".") return getattr(logging.getLogger(log_name), level_name)(*args, **kwargs) @@ -231,6 +233,5 @@ def init_logging(conf): log = MBSLogger() log.level = conf.log_level else: - logging.basicConfig(filename=conf.log_file, level=conf.log_level, - format=log_format) + logging.basicConfig(filename=conf.log_file, level=conf.log_level, format=log_format) log = MBSLogger() diff --git a/module_build_service/manage.py b/module_build_service/manage.py index 3faa77d1..201fc574 100755 --- a/module_build_service/manage.py +++ b/module_build_service/manage.py @@ -28,14 +28,17 @@ import flask_migrate import logging import os import getpass +import textwrap from werkzeug.datastructures import FileStorage from module_build_service import app, conf, db, create_app from module_build_service import models from module_build_service.utils import ( submit_module_build_from_yaml, - load_local_builds, load_mmd, import_mmd, - import_builds_from_local_dnf_repos + load_local_builds, + load_mmd, + import_mmd, + import_builds_from_local_dnf_repos, ) from module_build_service.errors import StreamAmbigous import module_build_service.messaging @@ -43,31 +46,36 @@ import module_build_service.scheduler.consumer manager = Manager(create_app) -help_args = ('-?', '--help') +help_args = ("-?", "--help") manager.help_args = help_args migrate = flask_migrate.Migrate(app, db) -manager.add_command('db', flask_migrate.MigrateCommand) -manager.add_option('-d', '--debug', dest='debug', action='store_true') -manager.add_option('-v', '--verbose', dest='verbose', action='store_true') -manager.add_option('-q', '--quiet', dest='quiet', action='store_true') +manager.add_command("db", flask_migrate.MigrateCommand) +manager.add_option("-d", "--debug", dest="debug", action="store_true") +manager.add_option("-v", "--verbose", dest="verbose", action="store_true") +manager.add_option("-q", "--quiet", dest="quiet", action="store_true") def console_script_help(f): @wraps(f) def wrapped(*args, **kwargs): import sys + if any([arg in help_args for arg in sys.argv[1:]]): command = os.path.basename(sys.argv[0]) - print("""{0} + print(textwrap.dedent( + """\ + {0} -Usage: {0} [{1}] + Usage: {0} [{1}] -See also: - mbs-manager(1)""".format(command, - '|'.join(help_args))) + See also: + mbs-manager(1) + """).strip().format(command, "|".join(help_args)) + ) sys.exit(2) r = f(*args, **kwargs) return r + return wrapped @@ -76,10 +84,9 @@ See also: def upgradedb(): """ Upgrades the database schema to the latest revision """ - app.config["SERVER_NAME"] = 'localhost' + app.config["SERVER_NAME"] = "localhost" # TODO: configurable? - migrations_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), - 'migrations') + migrations_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "migrations") with app.app_context(): flask_migrate.upgrade(directory=migrations_dir) @@ -101,28 +108,36 @@ def import_module(mmd_file): import_mmd(db.session, mmd) -@manager.option('--stream', action='store', dest="stream") -@manager.option('--file', action='store', dest="yaml_file") -@manager.option('--srpm', action='append', default=[], dest="srpms", metavar='SRPM') -@manager.option('--skiptests', action='store_true', dest="skiptests") -@manager.option('--offline', action='store_true', dest="offline") -@manager.option('-l', '--add-local-build', action='append', default=None, dest='local_build_nsvs') -@manager.option('-s', '--set-stream', action='append', default=[], dest='default_streams') -@manager.option('-r', '--platform-repo-file', action='append', default=[], - dest='platform_repofiles') -@manager.option('-p', '--platform-id', action='store', default=None, - dest='platform_id') -def build_module_locally(local_build_nsvs=None, yaml_file=None, srpms=None, - stream=None, skiptests=False, default_streams=None, - offline=False, platform_repofiles=None, platform_id=None): +@manager.option("--stream", action="store", dest="stream") +@manager.option("--file", action="store", dest="yaml_file") +@manager.option("--srpm", action="append", default=[], dest="srpms", metavar="SRPM") +@manager.option("--skiptests", action="store_true", dest="skiptests") +@manager.option("--offline", action="store_true", dest="offline") +@manager.option("-l", "--add-local-build", action="append", default=None, dest="local_build_nsvs") +@manager.option("-s", "--set-stream", action="append", default=[], dest="default_streams") +@manager.option( + "-r", "--platform-repo-file", action="append", default=[], dest="platform_repofiles" +) +@manager.option("-p", "--platform-id", action="store", default=None, dest="platform_id") +def build_module_locally( + local_build_nsvs=None, + yaml_file=None, + srpms=None, + stream=None, + skiptests=False, + default_streams=None, + offline=False, + platform_repofiles=None, + platform_id=None, +): """ Performs local module build using Mock """ - if 'SERVER_NAME' not in app.config or not app.config['SERVER_NAME']: - app.config["SERVER_NAME"] = 'localhost' + if "SERVER_NAME" not in app.config or not app.config["SERVER_NAME"]: + app.config["SERVER_NAME"] = "localhost" - if app.config['RESOLVER'] == 'db': - raise ValueError("Please set RESOLVER to 'mbs' in your " - "configuration for local builds.") + if app.config["RESOLVER"] == "db": + raise ValueError( + "Please set RESOLVER to 'mbs' in your configuration for local builds.") with app.app_context(): conf.set_item("system", "mock") @@ -130,10 +145,10 @@ def build_module_locally(local_build_nsvs=None, yaml_file=None, srpms=None, # Use our own local SQLite3 database. confdir = os.path.abspath(os.getcwd()) - dbdir = os.path.abspath(os.path.join(confdir, '..')) if confdir.endswith('conf') \ - else confdir - dbpath = '/{0}'.format(os.path.join(dbdir, '.mbs_local_build.db')) - dburi = 'sqlite://' + dbpath + dbdir = \ + os.path.abspath(os.path.join(confdir, "..")) if confdir.endswith("conf") else confdir + dbpath = "/{0}".format(os.path.join(dbdir, ".mbs_local_build.db")) + dburi = "sqlite://" + dbpath app.config["SQLALCHEMY_DATABASE_URI"] = dburi conf.set_item("sqlalchemy_database_uri", dburi) if os.path.exists(dbpath): @@ -164,11 +179,11 @@ def build_module_locally(local_build_nsvs=None, yaml_file=None, srpms=None, handle.filename = filename try: modules_list = submit_module_build_from_yaml( - username, handle, params, stream=str(stream), skiptests=skiptests) + username, handle, params, stream=str(stream), skiptests=skiptests + ) except StreamAmbigous as e: logging.error(str(e)) - logging.error( - "Use '-s module_name:module_stream' to choose the stream") + logging.error("Use '-s module_name:module_stream' to choose the stream") return stop = module_build_service.scheduler.make_simple_stop_condition(db.session) @@ -176,57 +191,60 @@ def build_module_locally(local_build_nsvs=None, yaml_file=None, srpms=None, # Run the consumer until stop_condition returns True module_build_service.scheduler.main([], stop) - if any(module.state == models.BUILD_STATES['failed'] for module in modules_list): - raise RuntimeError('Module build failed') + if any(module.state == models.BUILD_STATES["failed"] for module in modules_list): + raise RuntimeError("Module build failed") -@manager.option('identifier', metavar='NAME:STREAM[:VERSION[:CONTEXT]]', - help='Identifier for selecting module builds to retire') -@manager.option('--confirm', action='store_true', default=False, - help='Perform retire operation without prompting') +@manager.option( + "identifier", + metavar="NAME:STREAM[:VERSION[:CONTEXT]]", + help="Identifier for selecting module builds to retire", +) +@manager.option( + "--confirm", + action="store_true", + default=False, + help="Perform retire operation without prompting", +) def retire(identifier, confirm=False): """ Retire module build(s) by placing them into 'garbage' state. """ # Parse identifier and build query - parts = identifier.split(':') + parts = identifier.split(":") if len(parts) < 2: - raise ValueError('Identifier must contain at least NAME:STREAM') + raise ValueError("Identifier must contain at least NAME:STREAM") if len(parts) >= 5: - raise ValueError('Too many parts in identifier') + raise ValueError("Too many parts in identifier") - filter_by_kwargs = { - 'state': models.BUILD_STATES['ready'], - 'name': parts[0], - 'stream': parts[1], - } + filter_by_kwargs = {"state": models.BUILD_STATES["ready"], "name": parts[0], "stream": parts[1]} if len(parts) >= 3: - filter_by_kwargs['version'] = parts[2] + filter_by_kwargs["version"] = parts[2] if len(parts) >= 4: - filter_by_kwargs['context'] = parts[3] + filter_by_kwargs["context"] = parts[3] # Find module builds to retire module_builds = db.session.query(models.ModuleBuild).filter_by(**filter_by_kwargs).all() if not module_builds: - logging.info('No module builds found.') + logging.info("No module builds found.") return - logging.info('Found %d module builds:', len(module_builds)) + logging.info("Found %d module builds:", len(module_builds)) for build in module_builds: - logging.info('\t%s', ':'.join((build.name, build.stream, build.version, build.context))) + logging.info("\t%s", ":".join((build.name, build.stream, build.version, build.context))) # Prompt for confirmation - is_confirmed = confirm or prompt_bool('Retire {} module builds?'.format(len(module_builds))) + is_confirmed = confirm or prompt_bool("Retire {} module builds?".format(len(module_builds))) if not is_confirmed: - logging.info('Module builds were NOT retired.') + logging.info("Module builds were NOT retired.") return # Retire module builds for build in module_builds: - build.transition(conf, models.BUILD_STATES['garbage'], 'Module build retired') + build.transition(conf, models.BUILD_STATES["garbage"], "Module build retired") db.session.commit() - logging.info('Module builds retired.') + logging.info("Module builds retired.") @console_script_help @@ -238,13 +256,9 @@ def run(host=None, port=None, debug=None): port = port or conf.port debug = debug or conf.debug - logging.info('Starting Module Build Service frontend') + logging.info("Starting Module Build Service frontend") - app.run( - host=host, - port=port, - debug=debug - ) + app.run(host=host, port=port, debug=debug) def manager_wrapper(): diff --git a/module_build_service/messaging.py b/module_build_service/messaging.py index 8cd29cf6..0afa3c37 100644 --- a/module_build_service/messaging.py +++ b/module_build_service/messaging.py @@ -66,9 +66,10 @@ class BaseMessage(object): "{}={!r}".format(name, getattr(self, name)) if param.default != param.empty else repr(getattr(self, name)) - for name, param in init_sig.parameters.items()) + for name, param in init_sig.parameters.items() + ) - return "{}({})".format(type(self).__name__, ', '.join(args_strs)) + return "{}({})".format(type(self).__name__, ", ".join(args_strs)) def __getitem__(self, key): """ Used to trick moksha into thinking we are a dict. """ @@ -87,13 +88,11 @@ class BaseMessage(object): class MessageParser(object): - def parse(self, msg): raise NotImplementedError() class FedmsgMessageParser(MessageParser): - def parse(self, msg): """ Takes a fedmsg topic and message and converts it to a message object @@ -101,83 +100,105 @@ class FedmsgMessageParser(MessageParser): :return: an object of BaseMessage descent if the message is a type that the app looks for, otherwise None is returned """ - if 'body' in msg: - msg = msg['body'] - topic = msg['topic'] - topic_categories = _messaging_backends['fedmsg']['services'] - categories_re = '|'.join(map(re.escape, topic_categories)) + if "body" in msg: + msg = msg["body"] + topic = msg["topic"] + topic_categories = _messaging_backends["fedmsg"]["services"] + categories_re = "|".join(map(re.escape, topic_categories)) regex_pattern = re.compile( - r'(?P' + categories_re + r')' - r'(?:(?:\.)(?Pbuild|repo|module|decision))?' - r'(?:(?:\.)(?Pstate|build))?' - r'(?:\.)(?Pchange|done|end|tag|update)$' + r"(?P" + categories_re + r")" + r"(?:(?:\.)(?Pbuild|repo|module|decision))?" + r"(?:(?:\.)(?Pstate|build))?" + r"(?:\.)(?Pchange|done|end|tag|update)$" ) regex_results = re.search(regex_pattern, topic) if regex_results: - category = regex_results.group('category') - object = regex_results.group('object') - subobject = regex_results.group('subobject') - event = regex_results.group('event') + category = regex_results.group("category") + object = regex_results.group("object") + subobject = regex_results.group("subobject") + event = regex_results.group("event") - msg_id = msg.get('msg_id') - msg_inner_msg = msg.get('msg') + msg_id = msg.get("msg_id") + msg_inner_msg = msg.get("msg") # If there isn't a msg dict in msg then this message can be skipped if not msg_inner_msg: - log.debug(('Skipping message without any content with the ' - 'topic "{0}"').format(topic)) + log.debug( + "Skipping message without any content with the " 'topic "{0}"'.format(topic)) return None msg_obj = None # Ignore all messages from the secondary koji instances. - if category == 'buildsys': - instance = msg_inner_msg.get('instance', 'primary') - if instance != 'primary': + if category == "buildsys": + instance = msg_inner_msg.get("instance", "primary") + if instance != "primary": log.debug("Ignoring message from %r koji hub." % instance) return - if category == 'buildsys' and object == 'build' and \ - subobject == 'state' and event == 'change': - build_id = msg_inner_msg.get('build_id') - task_id = msg_inner_msg.get('task_id') - build_new_state = msg_inner_msg.get('new') - build_name = msg_inner_msg.get('name') - build_version = msg_inner_msg.get('version') - build_release = msg_inner_msg.get('release') + if ( + category == "buildsys" + and object == "build" + and subobject == "state" + and event == "change" + ): + build_id = msg_inner_msg.get("build_id") + task_id = msg_inner_msg.get("task_id") + build_new_state = msg_inner_msg.get("new") + build_name = msg_inner_msg.get("name") + build_version = msg_inner_msg.get("version") + build_release = msg_inner_msg.get("release") msg_obj = KojiBuildChange( - msg_id, build_id, task_id, build_new_state, build_name, - build_version, build_release) + msg_id, + build_id, + task_id, + build_new_state, + build_name, + build_version, + build_release, + ) - elif category == 'buildsys' and object == 'repo' and \ - subobject is None and event == 'done': - repo_tag = msg_inner_msg.get('tag') + elif ( + category == "buildsys" + and object == "repo" + and subobject is None + and event == "done" + ): + repo_tag = msg_inner_msg.get("tag") msg_obj = KojiRepoChange(msg_id, repo_tag) - elif category == 'buildsys' and event == 'tag': - tag = msg_inner_msg.get('tag') - name = msg_inner_msg.get('name') - version = msg_inner_msg.get('version') - release = msg_inner_msg.get('release') + elif category == "buildsys" and event == "tag": + tag = msg_inner_msg.get("tag") + name = msg_inner_msg.get("name") + version = msg_inner_msg.get("version") + release = msg_inner_msg.get("release") nvr = None if name and version and release: - nvr = '-'.join((name, version, release)) + nvr = "-".join((name, version, release)) msg_obj = KojiTagChange(msg_id, tag, name, nvr) - elif category == 'mbs' and object == 'module' and \ - subobject == 'state' and event == 'change': - msg_obj = MBSModule( - msg_id, msg_inner_msg.get('id'), msg_inner_msg.get('state')) + elif ( + category == "mbs" + and object == "module" + and subobject == "state" + and event == "change" + ): + msg_obj = MBSModule(msg_id, msg_inner_msg.get("id"), msg_inner_msg.get("state")) - elif (category == 'greenwave' and object == 'decision' and - subobject is None and event == 'update'): + elif ( + category == "greenwave" + and object == "decision" + and subobject is None + and event == "update" + ): msg_obj = GreenwaveDecisionUpdate( msg_id=msg_id, - decision_context=msg_inner_msg.get('decision_context'), - policies_satisfied=msg_inner_msg.get('policies_satisfied'), - subject_identifier=msg_inner_msg.get('subject_identifier')) + decision_context=msg_inner_msg.get("decision_context"), + policies_satisfied=msg_inner_msg.get("policies_satisfied"), + subject_identifier=msg_inner_msg.get("subject_identifier"), + ) # If the message matched the regex and is important to the app, # it will be returned @@ -201,9 +222,19 @@ class KojiBuildChange(BaseMessage): :param module_build_id: the optional id of the module_build in the database :param state_reason: the optional reason as to why the state changed """ - def __init__(self, msg_id, build_id, task_id, build_new_state, build_name, - build_version, build_release, module_build_id=None, - state_reason=None): + + def __init__( + self, + msg_id, + build_id, + task_id, + build_new_state, + build_name, + build_version, + build_release, + module_build_id=None, + state_reason=None, + ): if task_id is None: raise IgnoreMessage("KojiBuildChange with a null task_id is invalid.") super(KojiBuildChange, self).__init__(msg_id) @@ -225,6 +256,7 @@ class KojiTagChange(BaseMessage): :param artifact: the name of tagged artifact (e.g. module-build-macros) :param nvr: the nvr of the tagged artifact """ + def __init__(self, msg_id, tag, artifact, nvr): super(KojiTagChange, self).__init__(msg_id) self.tag = tag @@ -238,6 +270,7 @@ class KojiRepoChange(BaseMessage): :param msg_id: the id of the msg (e.g. 2016-SomeGUID) :param repo_tag: the repo's tag (e.g. SHADOWBUILD-f25-build) """ + def __init__(self, msg_id, repo_tag): super(KojiRepoChange, self).__init__(msg_id) self.repo_tag = repo_tag @@ -250,6 +283,7 @@ class MBSModule(BaseMessage): :param module_build_id: the id of the module build :param module_build_state: the state of the module build """ + def __init__(self, msg_id, module_build_id, module_build_state): super(MBSModule, self).__init__(msg_id) self.module_build_id = module_build_id @@ -259,8 +293,7 @@ class MBSModule(BaseMessage): class GreenwaveDecisionUpdate(BaseMessage): """A class representing message send to topic greenwave.decision.update""" - def __init__(self, msg_id, decision_context, policies_satisfied, - subject_identifier): + def __init__(self, msg_id, decision_context, policies_satisfied, subject_identifier): super(GreenwaveDecisionUpdate, self).__init__(msg_id) self.decision_context = decision_context self.policies_satisfied = policies_satisfied @@ -277,14 +310,18 @@ def publish(topic, msg, conf, service): :return: """ try: - handler = _messaging_backends[conf.messaging]['publish'] + handler = _messaging_backends[conf.messaging]["publish"] except KeyError: - raise KeyError("No messaging backend found for %r in %r" % ( - conf.messaging, _messaging_backends.keys())) + raise KeyError( + "No messaging backend found for %r in %r" % (conf.messaging, _messaging_backends.keys()) + ) from module_build_service.monitor import ( - messaging_tx_to_send_counter, messaging_tx_sent_ok_counter, - messaging_tx_failed_counter) + messaging_tx_to_send_counter, + messaging_tx_sent_ok_counter, + messaging_tx_failed_counter, + ) + messaging_tx_to_send_counter.inc() try: rv = handler(topic, msg, conf, service) @@ -298,6 +335,7 @@ def publish(topic, msg, conf, service): def _fedmsg_publish(topic, msg, conf, service): # fedmsg doesn't really need access to conf, however other backends do import fedmsg + return fedmsg.publish(topic, msg=msg, modname=service) @@ -318,11 +356,12 @@ def _in_memory_publish(topic, msg, conf, service): wrapped_msg = FedmsgMessageParser().parse({ "msg_id": str(_in_memory_msg_id), "topic": service + "." + topic, - "msg": msg, + "msg": msg }) # Put the message to queue. from module_build_service.scheduler.consumer import work_queue_put + try: work_queue_put(wrapped_msg) except ValueError as e: @@ -336,26 +375,25 @@ def _in_memory_publish(topic, msg, conf, service): _fedmsg_backend = { - 'publish': _fedmsg_publish, - 'services': ['buildsys', 'mbs', 'greenwave'], - 'parser': FedmsgMessageParser(), - 'topic_suffix': '.', + "publish": _fedmsg_publish, + "services": ["buildsys", "mbs", "greenwave"], + "parser": FedmsgMessageParser(), + "topic_suffix": ".", } _in_memory_backend = { - 'publish': _in_memory_publish, - 'services': [], - 'parser': FedmsgMessageParser(), # re-used. :) - 'topic_suffix': '.', + "publish": _in_memory_publish, + "services": [], + "parser": FedmsgMessageParser(), # re-used. :) + "topic_suffix": ".", } _messaging_backends = {} -for entrypoint in pkg_resources.iter_entry_points('mbs.messaging_backends'): +for entrypoint in pkg_resources.iter_entry_points("mbs.messaging_backends"): _messaging_backends[entrypoint.name] = ep = entrypoint.load() - required = ['publish', 'services', 'parser', 'topic_suffix'] + required = ["publish", "services", "parser", "topic_suffix"] if any([key not in ep for key in required]): - raise ValueError('messaging backend %r is malformed: %r' % ( - entrypoint.name, ep)) + raise ValueError("messaging backend %r is malformed: %r" % (entrypoint.name, ep)) if not _messaging_backends: raise ValueError("No messaging plugins are installed or available.") diff --git a/module_build_service/mmd_resolver.py b/module_build_service/mmd_resolver.py index a3e29dc9..867a5dfe 100644 --- a/module_build_service/mmd_resolver.py +++ b/module_build_service/mmd_resolver.py @@ -32,7 +32,7 @@ from module_build_service.models import ModuleBuild class MMDResolverPolicy(enum.Enum): - All = "all" # All possible top-level combinations + All = "all" # All possible top-level combinations First = "first" # All possible top-level combinations (filtered by N:S, first picked) @@ -106,7 +106,8 @@ class MMDResolver(object): # This method creates such solve.Dep. stream_dep = lambda n, s: pool.Dep("module(%s:%s)" % (n, s)) versioned_stream_dep = lambda n, s, v, op: pool.Dep("module(%s:%s)" % (n, s)).Rel( - op, pool.Dep(str(v))) + op, pool.Dep(str(v)) + ) # There are relations between modules in `deps`. For example: # deps = [{'gtk': ['1'], 'foo': ['1']}]" means "gtk:1 and foo:1" are both required. @@ -144,14 +145,15 @@ class MMDResolver(object): # In case x.y.z versioning is not used for this base module, do not # use versions solv.Dep. - if len(str(ModuleBuild.get_stream_version( - stream_for_version, right_pad=False))) < 5: + stream_version_str = str( + ModuleBuild.get_stream_version(stream_for_version, right_pad=False)) + if len(stream_version_str) < 5: if stream.startswith("-"): req_neg = rel_or_dep( - req_neg, solv.REL_OR, stream_dep(name, stream[1:])) + req_neg, solv.REL_OR, stream_dep(name, stream[1:]) + ) else: - req_pos = rel_or_dep( - req_pos, solv.REL_OR, stream_dep(name, stream)) + req_pos = rel_or_dep(req_pos, solv.REL_OR, stream_dep(name, stream)) else: # The main reason why to use `exact_versions` is the case when # adding deps for the input module we want to resolve. This module @@ -178,19 +180,23 @@ class MMDResolver(object): if not exact_versions: op |= solv.REL_GT version = ModuleBuild.get_stream_version( - stream_for_version, right_pad=False) + stream_for_version, right_pad=False + ) if stream.startswith("-"): req_neg = rel_or_dep( - req_neg, solv.REL_OR, - versioned_stream_dep(name, stream[1:], version, op)) + req_neg, + solv.REL_OR, + versioned_stream_dep(name, stream[1:], version, op), + ) else: req_pos = rel_or_dep( - req_pos, solv.REL_OR, - versioned_stream_dep(name, stream, version, op)) + req_pos, + solv.REL_OR, + versioned_stream_dep(name, stream, version, op), + ) else: if stream.startswith("-"): - req_neg = rel_or_dep( - req_neg, solv.REL_OR, stream_dep(name, stream[1:])) + req_neg = rel_or_dep(req_neg, solv.REL_OR, stream_dep(name, stream[1:])) else: req_pos = rel_or_dep(req_pos, solv.REL_OR, stream_dep(name, stream)) @@ -291,9 +297,10 @@ class MMDResolver(object): # Helper method to return the dependencies of `mmd` in the {name: [streams], ... form}. # The `fn` is either "get_requires" or "get_buildrequires" str depending on whether # the return deps should be runtime requires or buildrequires. - normdeps = lambda mmd, fn: [{name: streams.get() - for name, streams in getattr(dep, fn)().items()} - for dep in mmd.get_dependencies()] + normdeps = lambda mmd, fn: [ + {name: streams.get() for name, streams in getattr(dep, fn)().items()} + for dep in mmd.get_dependencies() + ] base_module_stream_overrides = self._get_base_module_stream_overrides(mmd) @@ -319,20 +326,21 @@ class MMDResolver(object): # This is used for example to find the buildrequired module when # no particular stream is used - for example when buildrequiring # "gtk: []" - solvable.add_deparray(solv.SOLVABLE_PROVIDES, - pool.Dep("module(%s)" % n)) + solvable.add_deparray(solv.SOLVABLE_PROVIDES, pool.Dep("module(%s)" % n)) # Add "Provides: module(name:stream) = version", so we can find buildrequired # modules when "gtk:[1]" is used and also choose the latest version. - solvable.add_deparray(solv.SOLVABLE_PROVIDES, - pool.Dep("module(%s:%s)" % (n, s)).Rel( - solv.REL_EQ, pool.Dep(str(v)))) + solvable.add_deparray( + solv.SOLVABLE_PROVIDES, + pool.Dep("module(%s:%s)" % (n, s)).Rel(solv.REL_EQ, pool.Dep(str(v))), + ) self._add_base_module_provides(solvable, mmd) # Fill in the "Requires" of this module, so we can track its dependencies # on other modules. - requires = self._deps2reqs(normdeps(mmd, "get_requires"), - base_module_stream_overrides, False) + requires = self._deps2reqs( + normdeps(mmd, "get_requires"), base_module_stream_overrides, False + ) log.debug("Adding module %s with requires: %r", solvable.name, requires) solvable.add_deparray(solv.SOLVABLE_REQUIRES, requires) @@ -491,8 +499,10 @@ class MMDResolver(object): deps[0] = deps[0][1:] deps[-1] = deps[-1][:-1] # Generate the new deps using the parserpmrichdep. - deps = [self.pool.parserpmrichdep(dep) if dep.startswith("(") else self.pool.Dep(dep) - for dep in deps] + deps = [ + self.pool.parserpmrichdep(dep) if dep.startswith("(") else self.pool.Dep(dep) + for dep in deps + ] # 2) For each dep (name:stream), get the set of all solvables in particular NSVCs, # which provides that name:stream. Then use itertools.product() to actually @@ -519,8 +529,10 @@ class MMDResolver(object): # we are currently trying, otherwise it would just choose some random ones. # We do that by FAVORING those modules - this is done in libsolv by another # job prepending to our main job to resolve the deps of input module. - jobs = [self.pool.Job(solv.Job.SOLVER_FAVOR | solv.Job.SOLVER_SOLVABLE, s.id) - for s in opt] + [job] + jobs = [ + self.pool.Job(solv.Job.SOLVER_FAVOR | solv.Job.SOLVER_SOLVABLE, s.id) + for s in opt + ] + [job] # Log the job. log.debug("Jobs:") @@ -533,10 +545,11 @@ class MMDResolver(object): if problem_str: err_msg = problem_str else: - err_msg = ', '.join(str(p) for p in problems) + err_msg = ", ".join(str(p) for p in problems) raise RuntimeError( - 'Problems were found during module dependency resolution: {}' - .format(err_msg)) + "Problems were found during module dependency resolution: {}".format( + err_msg) + ) # Find out what was actually resolved by libsolv to be installed as a result # of our jobs - those are the modules we are looking for. newsolvables = solver.transaction().newsolvables() @@ -603,9 +616,11 @@ class MMDResolver(object): transactions[ns] = [trans[sorted_trans[0][0]]] # Convert the solvables in alternatives to nsvc and return them as set of frozensets. - return set(frozenset(s2nsvc(s) for s in transactions[0]) - for src_alternatives in alternatives.values() - for transactions in src_alternatives.values()) + return set( + frozenset(s2nsvc(s) for s in transactions[0]) + for src_alternatives in alternatives.values() + for transactions in src_alternatives.values() + ) @staticmethod def _detect_transitive_stream_collision(problems): @@ -636,9 +651,9 @@ class MMDResolver(object): pair.sort() # only for pretty print yield pair - formatted_conflicts_pairs = ', '.join( - '{} and {}'.format(*item) for item in find_conflicts_pairs() + formatted_conflicts_pairs = ", ".join( + "{} and {}".format(*item) for item in find_conflicts_pairs() ) if formatted_conflicts_pairs: - return 'The module has conflicting buildrequires of: {}'.format( + return "The module has conflicting buildrequires of: {}".format( formatted_conflicts_pairs) diff --git a/module_build_service/models.py b/module_build_service/models.py index 2336b4f7..65f2089e 100644 --- a/module_build_service/models.py +++ b/module_build_service/models.py @@ -43,7 +43,7 @@ import module_build_service.messaging from module_build_service.glib import from_variant_dict from module_build_service import db, log, get_url_for, app, conf -DEFAULT_MODULE_CONTEXT = '00000000' +DEFAULT_MODULE_CONTEXT = "00000000" # Just like koji.BUILD_STATES, except our own codes for modules. @@ -58,32 +58,26 @@ BUILD_STATES = { # fetch them. If this is all good, then we set the build to the 'wait' # state. If anything goes wrong, we jump immediately to the 'failed' state. "init": 0, - # Here, the scheduler picks up tasks in wait and switches to build # immediately. Eventually, we'll add throttling logic here so we don't # submit too many builds for the build system to handle "wait": 1, - # The scheduler works on builds in this state. We prepare the buildroot, # submit builds for all the components, and wait for the results to come # back. "build": 2, - # Once all components have succeeded, we set the top-level module build # to 'done'. "done": 3, - # If any of the component builds fail, then we set the top-level module # build to 'failed' also. "failed": 4, - # This is a state to be set when a module is ready to be part of a # larger compose. perhaps it is set by an external service that knows # about the Grand Plan. "ready": 5, - # If the module has failed and was garbage collected by MBS - "garbage": 6 + "garbage": 6, } INVERSE_BUILD_STATES = {v: k for k, v in BUILD_STATES.items()} @@ -115,13 +109,12 @@ def _setup_event_listeners(session): """ Starts listening for events related to database session. """ - if not sqlalchemy.event.contains( - session, 'before_commit', session_before_commit_handlers): - sqlalchemy.event.listen(session, 'before_commit', - session_before_commit_handlers) + if not sqlalchemy.event.contains(session, "before_commit", session_before_commit_handlers): + sqlalchemy.event.listen(session, "before_commit", session_before_commit_handlers) # initialize DB event listeners from the monitor module from module_build_service.monitor import db_hook_event_listeners + db_hook_event_listeners(session.bind.engine) @@ -134,16 +127,15 @@ def make_session(conf): # Do not use scoped_session in case we are using in-memory database, # because we want to use the same session across all threads to be able # to use the same in-memory database in tests. - if conf.sqlalchemy_database_uri == 'sqlite://': + if conf.sqlalchemy_database_uri == "sqlite://": _setup_event_listeners(db.session) yield db.session db.session.commit() return # Needs to be set to create app_context. - if (not has_app_context() and - ('SERVER_NAME' not in app.config or not app.config['SERVER_NAME'])): - app.config['SERVER_NAME'] = 'localhost' + if not has_app_context() and ("SERVER_NAME" not in app.config or not app.config["SERVER_NAME"]): + app.config["SERVER_NAME"] = "localhost" # If there is no app_context, we have to create one before creating # the session. If we would create app_context after the session (this @@ -152,9 +144,7 @@ def make_session(conf): with app.app_context() if not has_app_context() else _dummy_context_mgr(): # TODO - we could use ZopeTransactionExtension() here some day for # improved safety on the backend. - engine = sqlalchemy.engine_from_config({ - 'sqlalchemy.url': conf.sqlalchemy_database_uri, - }) + engine = sqlalchemy.engine_from_config({"sqlalchemy.url": conf.sqlalchemy_database_uri}) session = scoped_session(sessionmaker(bind=engine))() _setup_event_listeners(session) try: @@ -174,20 +164,20 @@ class MBSBase(db.Model): module_builds_to_module_buildrequires = db.Table( - 'module_builds_to_module_buildrequires', - db.Column('module_id', db.Integer, db.ForeignKey('module_builds.id'), nullable=False), - db.Column('module_buildrequire_id', db.Integer, db.ForeignKey('module_builds.id'), - nullable=False), - db.UniqueConstraint('module_id', 'module_buildrequire_id', name='unique_buildrequires') + "module_builds_to_module_buildrequires", + db.Column("module_id", db.Integer, db.ForeignKey("module_builds.id"), nullable=False), + db.Column( + "module_buildrequire_id", db.Integer, db.ForeignKey("module_builds.id"), nullable=False), + db.UniqueConstraint("module_id", "module_buildrequire_id", name="unique_buildrequires"), ) module_builds_to_virtual_streams = db.Table( - 'module_builds_to_virtual_streams', - db.Column('module_build_id', db.Integer, db.ForeignKey('module_builds.id'), nullable=False), - db.Column('virtual_stream_id', db.Integer, db.ForeignKey('virtual_streams.id'), nullable=False), + "module_builds_to_virtual_streams", + db.Column("module_build_id", db.Integer, db.ForeignKey("module_builds.id"), nullable=False), + db.Column("virtual_stream_id", db.Integer, db.ForeignKey("virtual_streams.id"), nullable=False), db.UniqueConstraint( - 'module_build_id', 'virtual_stream_id', name='unique_module_to_virtual_stream') + "module_build_id", "virtual_stream_id", name="unique_module_to_virtual_stream"), ) @@ -218,10 +208,7 @@ class ModuleBuild(MBSBase): new_repo_task_id = db.Column(db.Integer) rebuild_strategy = db.Column(db.String, nullable=False) virtual_streams = db.relationship( - 'VirtualStream', - secondary=module_builds_to_virtual_streams, - back_populates='module_builds', - ) + "VirtualStream", secondary=module_builds_to_virtual_streams, back_populates="module_builds") # A monotonically increasing integer that represents which batch or # iteration this module is currently on for successive rebuilds of its @@ -231,18 +218,19 @@ class ModuleBuild(MBSBase): # This is only used for base modules for ordering purposes (f27.0.1 => 270001) stream_version = db.Column(db.Float) buildrequires = db.relationship( - 'ModuleBuild', + "ModuleBuild", secondary=module_builds_to_module_buildrequires, primaryjoin=module_builds_to_module_buildrequires.c.module_id == id, secondaryjoin=module_builds_to_module_buildrequires.c.module_buildrequire_id == id, - backref='buildrequire_for' + backref="buildrequire_for", ) rebuild_strategies = { - 'all': 'All components will be rebuilt', - 'changed-and-after': ('All components that have changed and those in subsequent batches ' - 'will be rebuilt'), - 'only-changed': 'All changed components will be rebuilt' + "all": "All components will be rebuilt", + "changed-and-after": ( + "All components that have changed and those in subsequent batches will be rebuilt" + ), + "only-changed": "All changed components will be rebuilt", } def current_batch(self, state=None): @@ -282,8 +270,7 @@ class ModuleBuild(MBSBase): ] else: return [ - component for component in self.component_builds - if component.batch <= self.batch + component for component in self.component_builds if component.batch <= self.batch ] @staticmethod @@ -305,30 +292,40 @@ class ModuleBuild(MBSBase): streams for given module `name`. """ # Prepare the subquery to find out all unique name:stream records. - subq = session.query( - func.max(ModuleBuild.id).label("maxid"), - func.max(sqlalchemy.cast(ModuleBuild.version, db.BigInteger)) - ).group_by(ModuleBuild.stream).filter_by( - name=name, state=BUILD_STATES["ready"]).subquery('t2') + subq = ( + session.query( + func.max(ModuleBuild.id).label("maxid"), + func.max(sqlalchemy.cast(ModuleBuild.version, db.BigInteger)), + ) + .group_by(ModuleBuild.stream) + .filter_by(name=name, state=BUILD_STATES["ready"]) + .subquery("t2") + ) # Use the subquery to actually return all the columns for its results. - query = session.query(ModuleBuild).join( - subq, and_(ModuleBuild.id == subq.c.maxid)) + query = session.query(ModuleBuild).join(subq, and_(ModuleBuild.id == subq.c.maxid)) return query.all() @staticmethod def _get_last_builds_in_stream_query(session, name, stream, **kwargs): # Prepare the subquery to find out all unique name:stream records. - subq = session.query( - func.max(sqlalchemy.cast(ModuleBuild.version, db.BigInteger)).label("maxversion") - ).filter_by(name=name, state=BUILD_STATES["ready"], stream=stream, **kwargs).subquery('t2') + subq = ( + session.query( + func.max(sqlalchemy.cast(ModuleBuild.version, db.BigInteger)).label("maxversion") + ) + .filter_by(name=name, state=BUILD_STATES["ready"], stream=stream, **kwargs) + .subquery("t2") + ) # Use the subquery to actually return all the columns for its results. query = session.query(ModuleBuild).join( - subq, and_( + subq, + and_( ModuleBuild.name == name, ModuleBuild.stream == stream, - sqlalchemy.cast(ModuleBuild.version, db.BigInteger) == subq.c.maxversion)) + sqlalchemy.cast(ModuleBuild.version, db.BigInteger) == subq.c.maxversion, + ), + ) return query @staticmethod @@ -370,8 +367,11 @@ class ModuleBuild(MBSBase): Returns build defined by NSVC. Optional kwargs are passed to SQLAlchemy filter_by method. """ - return session.query(ModuleBuild).filter_by( - name=name, stream=stream, version=str(version), context=context, **kwargs).first() + return ( + session.query(ModuleBuild) + .filter_by(name=name, stream=stream, version=str(version), context=context, **kwargs) + .first() + ) @staticmethod def get_scratch_builds_from_nsvc(session, name, stream, version, context, **kwargs): @@ -379,9 +379,12 @@ class ModuleBuild(MBSBase): Returns all scratch builds defined by NSVC. This is done by using the supplied `context` as a match prefix. Optional kwargs are passed to SQLAlchemy filter_by method. """ - return session.query(ModuleBuild).filter_by( - name=name, stream=stream, version=str(version), scratch=True, **kwargs)\ - .filter(ModuleBuild.context.like(context + '%')).all() + return ( + session.query(ModuleBuild) + .filter_by(name=name, stream=stream, version=str(version), scratch=True, **kwargs) + .filter(ModuleBuild.context.like(context + "%")) + .all() + ) @staticmethod def _add_stream_version_lte_filter(session, query, stream_version): @@ -398,9 +401,8 @@ class ModuleBuild(MBSBase): # Compute the minimal stream_version. For example, for `stream_version` 281234, # the minimal `stream_version` is 280000. min_stream_version = (stream_version // 10000) * 10000 - return query\ - .filter(ModuleBuild.stream_version <= stream_version)\ - .filter(ModuleBuild.stream_version >= min_stream_version) + return query.filter(ModuleBuild.stream_version <= stream_version).filter( + ModuleBuild.stream_version >= min_stream_version) @staticmethod def _add_virtual_streams_filter(session, query, virtual_streams): @@ -416,11 +418,11 @@ class ModuleBuild(MBSBase): if not virtual_streams: return query - return query.join( - VirtualStream, ModuleBuild.virtual_streams - ).filter( - VirtualStream.name.in_(virtual_streams) - ).distinct(ModuleBuild.id) + return ( + query.join(VirtualStream, ModuleBuild.virtual_streams) + .filter(VirtualStream.name.in_(virtual_streams)) + .distinct(ModuleBuild.id) + ) @staticmethod def get_last_builds_in_stream_version_lte(session, name, stream_version, virtual_streams=None): @@ -437,10 +439,12 @@ class ModuleBuild(MBSBase): :param list virtual_streams: A list of the virtual streams to filter on. The filtering uses "or" logic. When falsy, no filtering occurs. """ - query = session.query(ModuleBuild)\ - .filter(ModuleBuild.name == name)\ - .filter(ModuleBuild.state == BUILD_STATES["ready"])\ + query = ( + session.query(ModuleBuild) + .filter(ModuleBuild.name == name) + .filter(ModuleBuild.state == BUILD_STATES["ready"]) .order_by(ModuleBuild.version.desc()) + ) query = ModuleBuild._add_stream_version_lte_filter(session, query, stream_version) query = ModuleBuild._add_virtual_streams_filter(session, query, virtual_streams) @@ -485,19 +489,20 @@ class ModuleBuild(MBSBase): def mmd(self): from module_build_service.utils import load_mmd + try: return load_mmd(self.modulemd) except Exception: - log.exception('An error occurred while trying to parse the modulemd') + log.exception("An error occurred while trying to parse the modulemd") raise ValueError("Invalid modulemd") @property def previous_non_failed_state(self): for trace in reversed(self.module_builds_trace): - if trace.state != BUILD_STATES['failed']: + if trace.state != BUILD_STATES["failed"]: return trace.state - @validates('state') + @validates("state") def validate_state(self, key, field): if field in BUILD_STATES.values(): return field @@ -505,22 +510,22 @@ class ModuleBuild(MBSBase): return BUILD_STATES[field] raise ValueError("%s: %s, not in %r" % (key, field, BUILD_STATES)) - @validates('rebuild_strategy') + @validates("rebuild_strategy") def validate_rebuild_stategy(self, key, rebuild_strategy): if rebuild_strategy not in self.rebuild_strategies.keys(): - choices = ', '.join(self.rebuild_strategies.keys()) - raise ValueError('The rebuild_strategy of "{0}" is invalid. Choose from: {1}' - .format(rebuild_strategy, choices)) + choices = ", ".join(self.rebuild_strategies.keys()) + raise ValueError( + 'The rebuild_strategy of "{0}" is invalid. Choose from: {1}'.format( + rebuild_strategy, choices) + ) return rebuild_strategy @classmethod def from_module_event(cls, session, event): if type(event) == module_build_service.messaging.MBSModule: - return session.query(cls).filter( - cls.id == event.module_build_id).first() + return session.query(cls).filter(cls.id == event.module_build_id).first() else: - raise ValueError("%r is not a module message." - % type(event).__name__) + raise ValueError("%r is not a module message." % type(event).__name__) @staticmethod def contexts_from_mmd(mmd_str): @@ -538,28 +543,31 @@ class ModuleBuild(MBSBase): context hashes. """ from module_build_service.utils import load_mmd + try: mmd = load_mmd(mmd_str) except Exception: raise ValueError("Invalid modulemd") - mbs_xmd = mmd.get_xmd().get('mbs', {}) + mbs_xmd = mmd.get_xmd().get("mbs", {}) rv = [] # Get the buildrequires from the XMD section, because it contains # all the buildrequires as we resolved them using dependency resolver. # We have to use keys because GLib.Variant doesn't support `in` directly. if "buildrequires" not in mbs_xmd.keys(): - raise ValueError('The module\'s modulemd hasn\'t been formatted by MBS') + raise ValueError("The module's modulemd hasn't been formatted by MBS") mmd_formatted_buildrequires = { - dep: info['ref'] for dep, info in mbs_xmd["buildrequires"].items()} + dep: info["ref"] for dep, info in mbs_xmd["buildrequires"].items() + } property_json = json.dumps(OrderedDict(sorted(mmd_formatted_buildrequires.items()))) - rv.append(hashlib.sha1(property_json.encode('utf-8')).hexdigest()) + rv.append(hashlib.sha1(property_json.encode("utf-8")).hexdigest()) # Get the streams of buildrequires and hash it. mmd_formatted_buildrequires = { - dep: info['stream'] for dep, info in mbs_xmd["buildrequires"].items()} + dep: info["stream"] for dep, info in mbs_xmd["buildrequires"].items() + } property_json = json.dumps(OrderedDict(sorted(mmd_formatted_buildrequires.items()))) - build_context = hashlib.sha1(property_json.encode('utf-8')).hexdigest() + build_context = hashlib.sha1(property_json.encode("utf-8")).hexdigest() rv.append(build_context) # Get the requires from the real "dependencies" section in MMD. @@ -571,29 +579,45 @@ class ModuleBuild(MBSBase): mmd_requires[name] = mmd_requires[name].union(streams.get()) # Sort the streams for each module name and also sort the module names. - mmd_requires = { - dep: sorted(list(streams)) for dep, streams in mmd_requires.items()} + mmd_requires = {dep: sorted(list(streams)) for dep, streams in mmd_requires.items()} property_json = json.dumps(OrderedDict(sorted(mmd_requires.items()))) - runtime_context = hashlib.sha1(property_json.encode('utf-8')).hexdigest() + runtime_context = hashlib.sha1(property_json.encode("utf-8")).hexdigest() rv.append(runtime_context) - combined_hashes = '{0}:{1}'.format(build_context, runtime_context) - context = hashlib.sha1(combined_hashes.encode('utf-8')).hexdigest()[:8] + combined_hashes = "{0}:{1}".format(build_context, runtime_context) + context = hashlib.sha1(combined_hashes.encode("utf-8")).hexdigest()[:8] rv.append(context) return tuple(rv) @property def siblings(self): - query = self.query.filter_by( - name=self.name, stream=self.stream, version=self.version, scratch=self.scratch).options( - load_only('id')).filter(ModuleBuild.id != self.id) + query = ( + self.query.filter_by( + name=self.name, stream=self.stream, version=self.version, scratch=self.scratch) + .options(load_only("id")) + .filter(ModuleBuild.id != self.id) + ) return [build.id for build in query.all()] @classmethod - def create(cls, session, conf, name, stream, version, modulemd, scmurl, username, - context=None, rebuild_strategy=None, scratch=False, srpms=None, - publish_msg=True, **kwargs): + def create( + cls, + session, + conf, + name, + stream, + version, + modulemd, + scmurl, + username, + context=None, + rebuild_strategy=None, + scratch=False, + srpms=None, + publish_msg=True, + **kwargs + ): now = datetime.utcnow() module = cls( name=name, @@ -624,14 +648,14 @@ class ModuleBuild(MBSBase): session.commit() if publish_msg: module_build_service.messaging.publish( - service='mbs', - topic='module.state.change', + service="mbs", + topic="module.state.change", msg=module.json(show_tasks=False), # Note the state is "init" here... conf=conf, ) return module - def transition(self, conf, state, state_reason=None, failure_type='unspec'): + def transition(self, conf, state, state_reason=None, failure_type="unspec"): """Record that a build has transitioned state. The history of state transitions are recorded in model @@ -653,9 +677,9 @@ class ModuleBuild(MBSBase): from module_build_service.monitor import builder_success_counter, builder_failed_counter - if INVERSE_BUILD_STATES[self.state] in ['done', 'failed']: + if INVERSE_BUILD_STATES[self.state] in ["done", "failed"]: self.time_completed = now - if INVERSE_BUILD_STATES[self.state] == 'done': + if INVERSE_BUILD_STATES[self.state] == "done": builder_success_counter.inc() else: builder_failed_counter.labels(reason=failure_type).inc() @@ -664,16 +688,14 @@ class ModuleBuild(MBSBase): self.state_reason = state_reason # record module's state change - mbt = ModuleBuildTrace(state_time=now, - state=self.state, - state_reason=state_reason) + mbt = ModuleBuildTrace(state_time=now, state=self.state, state_reason=state_reason) self.module_builds_trace.append(mbt) log.info("%r, state %r->%r" % (self, old_state, self.state)) if old_state != self.state: module_build_service.messaging.publish( - service='mbs', - topic='module.state.change', + service="mbs", + topic="module.state.change", msg=self.json(show_tasks=False), conf=conf, ) @@ -697,14 +719,13 @@ class ModuleBuild(MBSBase): filters["name"] = name if stream: filters["stream"] = stream - local_modules = session.query(ModuleBuild).filter_by( - **filters).all() + local_modules = session.query(ModuleBuild).filter_by(**filters).all() if not local_modules: return [] - local_modules = [m for m in local_modules - if m.koji_tag and - m.koji_tag.startswith(conf.mock_resultsdir)] + local_modules = [ + m for m in local_modules if m.koji_tag and m.koji_tag.startswith(conf.mock_resultsdir) + ] return local_modules @classmethod @@ -718,13 +739,15 @@ class ModuleBuild(MBSBase): There should be at most one. """ - if event.repo_tag.endswith('-build'): + if event.repo_tag.endswith("-build"): tag = event.repo_tag[:-6] else: tag = event.repo_tag - query = session.query(cls)\ - .filter(cls.koji_tag == tag)\ + query = ( + session.query(cls) + .filter(cls.koji_tag == tag) .filter(cls.state == BUILD_STATES["build"]) + ) count = query.count() if count > 1: @@ -734,10 +757,12 @@ class ModuleBuild(MBSBase): @classmethod def from_tag_change_event(cls, session, event): - tag = event.tag[:-6] if event.tag.endswith('-build') else event.tag - query = session.query(cls)\ - .filter(cls.koji_tag == tag)\ + tag = event.tag[:-6] if event.tag.endswith("-build") else event.tag + query = ( + session.query(cls) + .filter(cls.koji_tag == tag) .filter(cls.state == BUILD_STATES["build"]) + ) count = query.count() if count > 1: @@ -747,43 +772,43 @@ class ModuleBuild(MBSBase): def short_json(self, show_stream_version=False): rv = { - 'id': self.id, - 'state': self.state, - 'state_name': INVERSE_BUILD_STATES[self.state], - 'stream': self.stream, - 'version': self.version, - 'name': self.name, - 'context': self.context, + "id": self.id, + "state": self.state, + "state_name": INVERSE_BUILD_STATES[self.state], + "stream": self.stream, + "version": self.version, + "name": self.name, + "context": self.context, } if show_stream_version: - rv['stream_version'] = self.stream_version + rv["stream_version"] = self.stream_version return rv def json(self, show_tasks=True): mmd = self.mmd() xmd = from_variant_dict(mmd.get_xmd()) try: - buildrequires = xmd['mbs']['buildrequires'] + buildrequires = xmd["mbs"]["buildrequires"] except KeyError: buildrequires = {} rv = self.short_json() rv.update({ - 'component_builds': [build.id for build in self.component_builds], - 'koji_tag': self.koji_tag, - 'owner': self.owner, - 'rebuild_strategy': self.rebuild_strategy, - 'scmurl': self.scmurl, - 'scratch': self.scratch, - 'srpms': json.loads(self.srpms or '[]'), - 'siblings': self.siblings, - 'state_reason': self.state_reason, - 'time_completed': _utc_datetime_to_iso(self.time_completed), - 'time_modified': _utc_datetime_to_iso(self.time_modified), - 'time_submitted': _utc_datetime_to_iso(self.time_submitted), - 'buildrequires': buildrequires, + "component_builds": [build.id for build in self.component_builds], + "koji_tag": self.koji_tag, + "owner": self.owner, + "rebuild_strategy": self.rebuild_strategy, + "scmurl": self.scmurl, + "scratch": self.scratch, + "srpms": json.loads(self.srpms or "[]"), + "siblings": self.siblings, + "state_reason": self.state_reason, + "time_completed": _utc_datetime_to_iso(self.time_completed), + "time_modified": _utc_datetime_to_iso(self.time_modified), + "time_submitted": _utc_datetime_to_iso(self.time_submitted), + "buildrequires": buildrequires, }) if show_tasks: - rv['tasks'] = self.tasks() + rv["tasks"] = self.tasks() return rv def extended_json(self, show_state_url=False, api_version=1): @@ -797,25 +822,26 @@ class ModuleBuild(MBSBase): rv = self.json(show_tasks=True) state_url = None if show_state_url: - state_url = get_url_for('module_build', api_version=api_version, id=self.id) + state_url = get_url_for("module_build", api_version=api_version, id=self.id) rv.update({ - 'base_module_buildrequires': [br.short_json(True) for br in self.buildrequires], - 'build_context': self.build_context, - 'modulemd': self.modulemd, - 'ref_build_context': self.ref_build_context, - 'runtime_context': self.runtime_context, - 'state_trace': [ + "base_module_buildrequires": [br.short_json(True) for br in self.buildrequires], + "build_context": self.build_context, + "modulemd": self.modulemd, + "ref_build_context": self.ref_build_context, + "runtime_context": self.runtime_context, + "state_trace": [ { - 'time': _utc_datetime_to_iso(record.state_time), - 'state': record.state, - 'state_name': INVERSE_BUILD_STATES[record.state], - 'reason': record.state_reason - } for record in self.state_trace(self.id) + "time": _utc_datetime_to_iso(record.state_time), + "state": record.state, + "state_name": INVERSE_BUILD_STATES[record.state], + "reason": record.state_reason, + } + for record in self.state_trace(self.id) ], - 'state_url': state_url, - 'stream_version': self.stream_version, - 'virtual_streams': [virtual_stream.name for virtual_stream in self.virtual_streams], + "state_url": state_url, + "stream_version": self.stream_version, + "virtual_streams": [virtual_stream.name for virtual_stream in self.virtual_streams], }) return rv @@ -825,11 +851,12 @@ class ModuleBuild(MBSBase): :return: dictionary containing the tasks associated with the build """ tasks = dict() - if self.id and self.state != 'init': - for build in ComponentBuild.query\ - .filter_by(module_id=self.id)\ - .options(lazyload('module_build'))\ - .all(): + if self.id and self.state != "init": + for build in ( + ComponentBuild.query.filter_by(module_id=self.id) + .options(lazyload("module_build")) + .all() + ): tasks[build.format] = tasks.get(build.format, {}) tasks[build.format][build.package] = dict( task_id=build.task_id, @@ -843,8 +870,11 @@ class ModuleBuild(MBSBase): return tasks def state_trace(self, module_id): - return ModuleBuildTrace.query.filter_by( - module_id=module_id).order_by(ModuleBuildTrace.state_time).all() + return ( + ModuleBuildTrace.query.filter_by(module_id=module_id) + .order_by(ModuleBuildTrace.state_time) + .all() + ) @staticmethod def get_stream_version(stream, right_pad=True): @@ -861,7 +891,7 @@ class ModuleBuild(MBSBase): :rtype: float or None if the stream doesn't have a valid version """ # The platform version (e.g. prefix1.2.0 => 010200) - version = '' + version = "" for char in stream: # See if the current character is an integer, signifying the version has started if char.isdigit(): @@ -870,19 +900,19 @@ class ModuleBuild(MBSBase): elif version: # If the character is a period and the version is set, then # the loop is still processing the version part of the stream - if char == '.': - version += '.' + if char == ".": + version += "." # If the version is set and the character is not a period or # digit, then the remainder of the stream is a suffix like "-beta" else: break # Remove the periods and pad the numbers if necessary - version = ''.join([section.zfill(2) for section in version.rstrip('.').split('.')]) + version = "".join([section.zfill(2) for section in version.rstrip(".").split(".")]) if version: if right_pad: - version += (6 - len(version)) * '0' + version += (6 - len(version)) * "0" result = float(version) @@ -908,68 +938,79 @@ class ModuleBuild(MBSBase): for bm in conf.base_module_names: # xmd is a GLib Variant and doesn't support .get() syntax try: - bm_dict = xmd['mbs']['buildrequires'].get(bm) + bm_dict = xmd["mbs"]["buildrequires"].get(bm) except KeyError: - raise RuntimeError( - 'The module\'s mmd is missing information in the xmd section') + raise RuntimeError("The module's mmd is missing information in the xmd section") if not bm_dict: continue base_module = self.get_build_from_nsvc( - db_session, bm, bm_dict['stream'], bm_dict['version'], bm_dict['context']) + db_session, bm, bm_dict["stream"], bm_dict["version"], bm_dict["context"] + ) if not base_module: - log.error('Module #{} buildrequires "{}" but it wasn\'t found in the database' - .format(self.id, repr(bm_dict))) + log.error( + 'Module #{} buildrequires "{}" but it wasn\'t found in the database'.format( + self.id, repr(bm_dict)) + ) continue rv.append(base_module) return rv def __repr__(self): - return (("") - % (self.name, self.id, self.stream, self.version, self.scratch, - INVERSE_BUILD_STATES[self.state], self.batch, self.state_reason)) + return ( + "" + ) % ( + self.name, + self.id, + self.stream, + self.version, + self.scratch, + INVERSE_BUILD_STATES[self.state], + self.batch, + self.state_reason, + ) class VirtualStream(MBSBase): - __tablename__ = 'virtual_streams' + __tablename__ = "virtual_streams" id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String, nullable=False, unique=True) module_builds = db.relationship( - 'ModuleBuild', - secondary=module_builds_to_virtual_streams, - back_populates='virtual_streams', + "ModuleBuild", secondary=module_builds_to_virtual_streams, back_populates="virtual_streams" ) def __repr__(self): - return ''.format(self.id, self.name) + return "".format(self.id, self.name) class ModuleBuildTrace(MBSBase): __tablename__ = "module_builds_trace" id = db.Column(db.Integer, primary_key=True) - module_id = db.Column(db.Integer, db.ForeignKey('module_builds.id'), nullable=False) + module_id = db.Column(db.Integer, db.ForeignKey("module_builds.id"), nullable=False) state_time = db.Column(db.DateTime, nullable=False) state = db.Column(db.Integer, nullable=True) state_reason = db.Column(db.String, nullable=True) - module_build = db.relationship('ModuleBuild', backref='module_builds_trace', lazy=False) + module_build = db.relationship("ModuleBuild", backref="module_builds_trace", lazy=False) def json(self): retval = { - 'id': self.id, - 'module_id': self.module_id, - 'state_time': _utc_datetime_to_iso(self.state_time), - 'state': self.state, - 'state_reason': self.state_reason, + "id": self.id, + "module_id": self.module_id, + "state_time": _utc_datetime_to_iso(self.state_time), + "state": self.state, + "state_reason": self.state_reason, } return retval def __repr__(self): - return ("" - % (self.id, self.module_id, self.state_time, self.state, self.state_reason)) + return ( + "" + % (self.id, self.module_id, self.state_time, self.state, self.state_reason) + ) class ComponentBuild(MBSBase): @@ -1002,10 +1043,9 @@ class ComponentBuild(MBSBase): # component is not currently part of a batch. batch = db.Column(db.Integer, default=0) - module_id = db.Column(db.Integer, db.ForeignKey('module_builds.id'), nullable=False) - module_build = db.relationship('ModuleBuild', backref='component_builds', lazy=False) - reused_component_id = db.Column( - db.Integer, db.ForeignKey('component_builds.id')) + module_id = db.Column(db.Integer, db.ForeignKey("module_builds.id"), nullable=False) + module_build = db.relationship("ModuleBuild", backref="component_builds", lazy=False) + reused_component_id = db.Column(db.Integer, db.ForeignKey("component_builds.id")) # Weight defines the complexity of the component build as calculated by the builder's # get_build_weights function @@ -1015,45 +1055,49 @@ class ComponentBuild(MBSBase): def from_component_event(cls, session, event): if isinstance(event, module_build_service.messaging.KojiBuildChange): if event.module_build_id: - return session.query(cls).filter_by( - task_id=event.task_id, module_id=event.module_build_id)\ + return ( + session.query(cls) + .filter_by(task_id=event.task_id, module_id=event.module_build_id) .one() + ) else: - return session.query(cls).filter( - cls.task_id == event.task_id).first() + return session.query(cls).filter(cls.task_id == event.task_id).first() else: - raise ValueError("%r is not a koji message." % event['topic']) + raise ValueError("%r is not a koji message." % event["topic"]) @classmethod def from_component_name(cls, session, component_name, module_id): - return session.query(cls).filter_by( - package=component_name, module_id=module_id).first() + return session.query(cls).filter_by(package=component_name, module_id=module_id).first() @classmethod def from_component_nvr(cls, session, nvr, module_id): return session.query(cls).filter_by(nvr=nvr, module_id=module_id).first() def state_trace(self, component_id): - return ComponentBuildTrace.query.filter_by( - component_id=component_id).order_by(ComponentBuildTrace.state_time).all() + return ( + ComponentBuildTrace.query.filter_by(component_id=component_id) + .order_by(ComponentBuildTrace.state_time) + .all() + ) def json(self): retval = { - 'id': self.id, - 'package': self.package, - 'format': self.format, - 'task_id': self.task_id, - 'state': self.state, - 'state_reason': self.state_reason, - 'module_build': self.module_id, - 'nvr': self.nvr + "id": self.id, + "package": self.package, + "format": self.format, + "task_id": self.task_id, + "state": self.state, + "state_reason": self.state_reason, + "module_build": self.module_id, + "nvr": self.nvr, } try: # Koji is py2 only, so this fails if the main web process is # running on py3. import koji - retval['state_name'] = koji.BUILD_STATES.get(self.state) + + retval["state_name"] = koji.BUILD_STATES.get(self.state) except ImportError: pass @@ -1070,72 +1114,91 @@ class ComponentBuild(MBSBase): json = self.json() state_url = None if show_state_url: - state_url = get_url_for('component_build', api_version=api_version, id=self.id) + state_url = get_url_for("component_build", api_version=api_version, id=self.id) json.update({ - 'batch': self.batch, - 'state_trace': [{'time': _utc_datetime_to_iso(record.state_time), - 'state': record.state, - 'state_name': INVERSE_BUILD_STATES[record.state], - 'reason': record.state_reason} - for record - in self.state_trace(self.id)], - 'state_url': state_url + "batch": self.batch, + "state_trace": [ + { + "time": _utc_datetime_to_iso(record.state_time), + "state": record.state, + "state_name": INVERSE_BUILD_STATES[record.state], + "reason": record.state_reason, + } + for record in self.state_trace(self.id) + ], + "state_url": state_url, }) return json def __repr__(self): return "" % ( - self.package, self.module_id, self.state, self.task_id, self.batch, self.state_reason) + self.package, + self.module_id, + self.state, + self.task_id, + self.batch, + self.state_reason, + ) class ComponentBuildTrace(MBSBase): __tablename__ = "component_builds_trace" id = db.Column(db.Integer, primary_key=True) - component_id = db.Column(db.Integer, db.ForeignKey('component_builds.id'), nullable=False) + component_id = db.Column(db.Integer, db.ForeignKey("component_builds.id"), nullable=False) state_time = db.Column(db.DateTime, nullable=False) state = db.Column(db.Integer, nullable=True) state_reason = db.Column(db.String, nullable=True) task_id = db.Column(db.Integer, nullable=True) - component_build = db.relationship('ComponentBuild', backref='component_builds_trace', - lazy=False) + component_build = db.relationship( + "ComponentBuild", backref="component_builds_trace", lazy=False + ) def json(self): retval = { - 'id': self.id, - 'component_id': self.component_id, - 'state_time': _utc_datetime_to_iso(self.state_time), - 'state': self.state, - 'state_reason': self.state_reason, - 'task_id': self.task_id, + "id": self.id, + "component_id": self.component_id, + "state_time": _utc_datetime_to_iso(self.state_time), + "state": self.state, + "state_reason": self.state_reason, + "task_id": self.task_id, } return retval def __repr__(self): - return ("") % (self.id, self.component_id, self.state_time, - self.state, self.state_reason, self.task_id) + return ( + "" + ) % ( + self.id, + self.component_id, + self.state_time, + self.state, + self.state_reason, + self.task_id, + ) def session_before_commit_handlers(session): # new and updated items - for item in (set(session.new) | set(session.dirty)): + for item in set(session.new) | set(session.dirty): # handlers for component builds if isinstance(item, ComponentBuild): cbt = ComponentBuildTrace( state_time=datetime.utcnow(), state=item.state, state_reason=item.state_reason, - task_id=item.task_id) + task_id=item.task_id, + ) # To fully support append, the hook must be tied to the session item.component_builds_trace.append(cbt) -@sqlalchemy.event.listens_for(ModuleBuild, 'before_insert') -@sqlalchemy.event.listens_for(ModuleBuild, 'before_update') +@sqlalchemy.event.listens_for(ModuleBuild, "before_insert") +@sqlalchemy.event.listens_for(ModuleBuild, "before_update") def new_and_update_module_handler(mapper, session, target): # Only modify time_modified if it wasn't explicitly set - if not db.inspect(target).get_history('time_modified', True).has_changes(): + if not db.inspect(target).get_history("time_modified", True).has_changes(): target.time_modified = datetime.utcnow() diff --git a/module_build_service/monitor.py b/module_build_service/monitor.py index d6e455a7..bbe9587a 100644 --- a/module_build_service/monitor.py +++ b/module_build_service/monitor.py @@ -27,77 +27,76 @@ import tempfile from flask import Blueprint, Response from prometheus_client import ( # noqa: F401 - ProcessCollector, CollectorRegistry, Counter, multiprocess, Histogram, generate_latest, - start_http_server, CONTENT_TYPE_LATEST) + ProcessCollector, + CollectorRegistry, + Counter, + multiprocess, + Histogram, + generate_latest, + start_http_server, + CONTENT_TYPE_LATEST, +) from sqlalchemy import event # Service-specific imports from module_build_service.utils import cors_header, validate_api_version -if not os.environ.get('prometheus_multiproc_dir'): - os.environ.setdefault('prometheus_multiproc_dir', tempfile.mkdtemp()) +if not os.environ.get("prometheus_multiproc_dir"): + os.environ.setdefault("prometheus_multiproc_dir", tempfile.mkdtemp()) registry = CollectorRegistry() ProcessCollector(registry=registry) multiprocess.MultiProcessCollector(registry) -if os.getenv('MONITOR_STANDALONE_METRICS_SERVER_ENABLE', 'false') == 'true': - port = os.getenv('MONITOR_STANDALONE_METRICS_SERVER_PORT', '10040') +if os.getenv("MONITOR_STANDALONE_METRICS_SERVER_ENABLE", "false") == "true": + port = os.getenv("MONITOR_STANDALONE_METRICS_SERVER_PORT", "10040") start_http_server(int(port), registry=registry) # Generic metrics messaging_rx_counter = Counter( - 'messaging_rx', - 'Total number of messages received', - registry=registry) + "messaging_rx", "Total number of messages received", registry=registry +) messaging_rx_processed_ok_counter = Counter( - 'messaging_rx_processed_ok', - 'Number of received messages, which were processed successfully', - registry=registry) + "messaging_rx_processed_ok", + "Number of received messages, which were processed successfully", + registry=registry, +) messaging_rx_failed_counter = Counter( - 'messaging_rx_failed', - 'Number of received messages, which failed during processing', - registry=registry) + "messaging_rx_failed", + "Number of received messages, which failed during processing", + registry=registry, +) messaging_tx_to_send_counter = Counter( - 'messaging_tx_to_send', - 'Total number of messages to send', - registry=registry) + "messaging_tx_to_send", "Total number of messages to send", registry=registry +) messaging_tx_sent_ok_counter = Counter( - 'messaging_tx_sent_ok', - 'Number of messages, which were sent successfully', - registry=registry) + "messaging_tx_sent_ok", "Number of messages, which were sent successfully", registry=registry +) messaging_tx_failed_counter = Counter( - 'messaging_tx_failed', - 'Number of messages, for which the sender failed', - registry=registry) + "messaging_tx_failed", "Number of messages, for which the sender failed", registry=registry +) builder_success_counter = Counter( - 'builds_success', - 'Number of successful builds', - registry=registry) + "builds_success", "Number of successful builds", registry=registry +) builder_failed_counter = Counter( - 'builds_failed_total', - 'Number of failed builds', - labelnames=['reason'], # reason could be: 'user', 'infra', 'unspec' - registry=registry) + "builds_failed_total", + "Number of failed builds", + labelnames=["reason"], # reason could be: 'user', 'infra', 'unspec' + registry=registry, +) -db_dbapi_error_counter = Counter( - 'db_dbapi_error', - 'Number of DBAPI errors', - registry=registry) +db_dbapi_error_counter = Counter("db_dbapi_error", "Number of DBAPI errors", registry=registry) db_engine_connect_counter = Counter( - 'db_engine_connect', - 'Number of \'engine_connect\' events', - registry=registry) + "db_engine_connect", "Number of 'engine_connect' events", registry=registry +) db_handle_error_counter = Counter( - 'db_handle_error', - 'Number of exceptions during connection', - registry=registry) + "db_handle_error", "Number of exceptions during connection", registry=registry +) db_transaction_rollback_counter = Counter( - 'db_transaction_rollback', - 'Number of transactions, which were rolled back', - registry=registry) + "db_transaction_rollback", "Number of transactions, which were rolled back", registry=registry +) # Service-specific metrics # XXX: TODO @@ -110,31 +109,29 @@ def db_hook_event_listeners(target=None): if not target: target = db.engine - @event.listens_for(target, 'dbapi_error', named=True) + @event.listens_for(target, "dbapi_error", named=True) def receive_dbapi_error(**kw): db_dbapi_error_counter.inc() - @event.listens_for(target, 'engine_connect') + @event.listens_for(target, "engine_connect") def receive_engine_connect(conn, branch): db_engine_connect_counter.inc() - @event.listens_for(target, 'handle_error') + @event.listens_for(target, "handle_error") def receive_handle_error(exception_context): db_handle_error_counter.inc() - @event.listens_for(target, 'rollback') + @event.listens_for(target, "rollback") def receive_rollback(conn): db_transaction_rollback_counter.inc() monitor_api = Blueprint( - 'monitor', __name__, - url_prefix='/module-build-service//monitor') + "monitor", __name__, url_prefix="/module-build-service//monitor") @cors_header() @validate_api_version() -@monitor_api.route('/metrics') +@monitor_api.route("/metrics") def metrics(api_version): - return Response(generate_latest(registry), - content_type=CONTENT_TYPE_LATEST) + return Response(generate_latest(registry), content_type=CONTENT_TYPE_LATEST) diff --git a/module_build_service/proxy.py b/module_build_service/proxy.py index 0b753514..f820add9 100644 --- a/module_build_service/proxy.py +++ b/module_build_service/proxy.py @@ -31,29 +31,30 @@ Source: http://flask.pocoo.org/snippets/35/ by Peter Hansen class ReverseProxy(object): - '''Wrap the application in this middleware and configure the + """Wrap the application in this middleware and configure the front-end server to add these headers, to let you quietly bind this to a URL other than / and to an HTTP scheme that is different than what is used locally. :param app: the WSGI application - ''' + """ + def __init__(self, app): self.app = app def __call__(self, environ, start_response): - script_name = environ.get('HTTP_X_SCRIPT_NAME', '') + script_name = environ.get("HTTP_X_SCRIPT_NAME", "") if script_name: - environ['SCRIPT_NAME'] = script_name - path_info = environ['PATH_INFO'] + environ["SCRIPT_NAME"] = script_name + path_info = environ["PATH_INFO"] if path_info.startswith(script_name): - environ['PATH_INFO'] = path_info[len(script_name):] + environ["PATH_INFO"] = path_info[len(script_name):] - server = environ.get('HTTP_X_FORWARDED_HOST', '') + server = environ.get("HTTP_X_FORWARDED_HOST", "") if server: - environ['HTTP_HOST'] = server + environ["HTTP_HOST"] = server - scheme = environ.get('HTTP_X_SCHEME', '') + scheme = environ.get("HTTP_X_SCHEME", "") if scheme: - environ['wsgi.url_scheme'] = scheme + environ["wsgi.url_scheme"] = scheme return self.app(environ, start_response) diff --git a/module_build_service/resolver/DBResolver.py b/module_build_service/resolver/DBResolver.py index 0b9080d3..fbc75a87 100644 --- a/module_build_service/resolver/DBResolver.py +++ b/module_build_service/resolver/DBResolver.py @@ -36,13 +36,14 @@ class DBResolver(GenericResolver): """ Resolver using the MBS database """ - backend = 'db' + + backend = "db" def __init__(self, config): self.config = config def _get_module( - self, name, stream, version, context, state=models.BUILD_STATES['ready'], strict=False, + self, name, stream, version, context, state=models.BUILD_STATES["ready"], strict=False ): with models.make_session(self.config) as session: mb = models.ModuleBuild.get_build_from_nsvc( @@ -52,7 +53,7 @@ class DBResolver(GenericResolver): if strict: raise UnprocessableEntity( - 'Cannot find any module builds for %s:%s' % (name, stream)) + "Cannot find any module builds for %s:%s" % (name, stream)) def get_module_count(self, **kwargs): """ @@ -79,14 +80,22 @@ class DBResolver(GenericResolver): # Cast the version as an integer so that we get proper ordering module = query.order_by( models.ModuleBuild.stream_version.desc(), - sqlalchemy.cast(models.ModuleBuild.version, db.BigInteger).desc() + sqlalchemy.cast(models.ModuleBuild.version, db.BigInteger).desc(), ).first() if module: return load_mmd(module.modulemd) - def get_module_modulemds(self, name, stream, version=None, context=None, strict=False, - stream_version_lte=False, virtual_streams=None): + def get_module_modulemds( + self, + name, + stream, + version=None, + context=None, + strict=False, + stream_version_lte=False, + virtual_streams=None, + ): """ Gets the module modulemds from the resolver. :param name: a string of the module's name @@ -108,18 +117,18 @@ class DBResolver(GenericResolver): mmd = self._get_module(name, stream, version, context, strict=strict) if mmd is None: return - return [load_mmd(mmd['modulemd'])] + return [load_mmd(mmd["modulemd"])] with models.make_session(self.config) as session: if not version and not context: - if (stream_version_lte and len(str(models.ModuleBuild.get_stream_version( - stream, right_pad=False))) >= 5): + if stream_version_lte and ( + len(str(models.ModuleBuild.get_stream_version(stream, right_pad=False))) >= 5 + ): stream_version = models.ModuleBuild.get_stream_version(stream) builds = models.ModuleBuild.get_last_builds_in_stream_version_lte( session, name, stream_version, virtual_streams) else: - builds = models.ModuleBuild.get_last_builds_in_stream( - session, name, stream) + builds = models.ModuleBuild.get_last_builds_in_stream(session, name, stream) else: raise NotImplementedError( "This combination of name/stream/version/context is not implemented") @@ -146,7 +155,7 @@ class DBResolver(GenericResolver): query = session.query(models.ModuleBuild) query = query.filter_by(name=name, stream=stream, state=models.BUILD_STATES["ready"]) - module_br_alias = aliased(models.ModuleBuild, name='module_br') + module_br_alias = aliased(models.ModuleBuild, name="module_br") # Shorten this table name for clarity in the query below mb_to_br = models.module_builds_to_module_buildrequires # The following joins get added: @@ -154,14 +163,17 @@ class DBResolver(GenericResolver): # ON module_builds_to_module_buildrequires.module_id = module_builds.id # JOIN module_builds AS module_br # ON module_builds_to_module_buildrequires.module_buildrequire_id = module_br.id - query = query.join(mb_to_br, mb_to_br.c.module_id == models.ModuleBuild.id)\ - .join(module_br_alias, mb_to_br.c.module_buildrequire_id == module_br_alias.id) + query = query.join(mb_to_br, mb_to_br.c.module_id == models.ModuleBuild.id).join( + module_br_alias, mb_to_br.c.module_buildrequire_id == module_br_alias.id) # Get only modules buildrequiring particular base_module_nsvc n, s, v, c = base_module_nsvc.split(":") query = query.filter( - module_br_alias.name == n, module_br_alias.stream == s, - module_br_alias.version == v, module_br_alias.context == c) + module_br_alias.name == n, + module_br_alias.stream == s, + module_br_alias.version == v, + module_br_alias.context == c, + ) query = query.order_by( sqlalchemy.cast(models.ModuleBuild.version, db.BigInteger).desc()) all_builds = query.all() @@ -178,8 +190,12 @@ class DBResolver(GenericResolver): builds.append(build) mmds = [build.mmd() for build in builds] - nsvcs = [":".join([mmd.get_name(), mmd.get_stream(), - str(mmd.get_version()), mmd.get_context()]) for mmd in mmds] + nsvcs = [ + ":".join( + [mmd.get_name(), mmd.get_stream(), str(mmd.get_version()), mmd.get_context()] + ) + for mmd in mmds + ] log.debug("Found: %r", nsvcs) return mmds @@ -198,12 +214,12 @@ class DBResolver(GenericResolver): for key in keys: results[key] = set() with models.make_session(self.config) as session: - for module_name, module_info in mmd.get_xmd()['mbs']['buildrequires'].items(): + for module_name, module_info in mmd.get_xmd()["mbs"]["buildrequires"].items(): local_modules = models.ModuleBuild.local_modules( - session, module_name, module_info['stream']) + session, module_name, module_info["stream"]) if local_modules: local_module = local_modules[0] - log.info('Using local module {0!r} to resolve profiles.'.format(local_module)) + log.info("Using local module {0!r} to resolve profiles.".format(local_module)) dep_mmd = local_module.mmd() for key in keys: if key in dep_mmd.get_profiles().keys(): @@ -211,12 +227,22 @@ class DBResolver(GenericResolver): continue build = models.ModuleBuild.get_build_from_nsvc( - session, module_name, module_info['stream'], module_info['version'], - module_info['context'], state=models.BUILD_STATES['ready']) + session, + module_name, + module_info["stream"], + module_info["version"], + module_info["context"], + state=models.BUILD_STATES["ready"], + ) if not build: - raise UnprocessableEntity('The module {}:{}:{}:{} was not found'.format( - module_name, module_info['stream'], module_info['version'], - module_info['context'])) + raise UnprocessableEntity( + "The module {}:{}:{}:{} was not found".format( + module_name, + module_info["stream"], + module_info["version"], + module_info["context"], + ) + ) dep_mmd = build.mmd() # Take note of what rpms are in this dep's profile @@ -227,8 +253,9 @@ class DBResolver(GenericResolver): # Return the union of all rpms in all profiles of the given keys return results - def get_module_build_dependencies(self, name=None, stream=None, version=None, context=None, - mmd=None, strict=False): + def get_module_build_dependencies( + self, name=None, stream=None, version=None, context=None, mmd=None, strict=False + ): """ Returns a dictionary of koji_tag:[mmd, ...] of all the dependencies of input module. @@ -247,44 +274,57 @@ class DBResolver(GenericResolver): :return: a dictionary """ if mmd: - log.debug('get_module_build_dependencies(mmd={0!r} strict={1!r})'.format(mmd, strict)) + log.debug("get_module_build_dependencies(mmd={0!r} strict={1!r})".format(mmd, strict)) elif any(x is None for x in [name, stream, version, context]): - raise RuntimeError('The name, stream, version, and/or context weren\'t specified') + raise RuntimeError("The name, stream, version, and/or context weren't specified") else: version = str(version) - log.debug('get_module_build_dependencies({0}, strict={1!r})'.format( - ', '.join([name, stream, str(version), context]), strict)) + log.debug( + "get_module_build_dependencies({0}, strict={1!r})".format( + ", ".join([name, stream, str(version), context]), strict) + ) module_tags = {} with models.make_session(self.config) as session: if mmd: queried_mmd = mmd - nsvc = ':'.join([ - mmd.get_name(), mmd.get_stream(), str(mmd.get_version()), - mmd.get_context() or models.DEFAULT_MODULE_CONTEXT]) + nsvc = ":".join([ + mmd.get_name(), + mmd.get_stream(), + str(mmd.get_version()), + mmd.get_context() or models.DEFAULT_MODULE_CONTEXT, + ]) else: build = models.ModuleBuild.get_build_from_nsvc( session, name, stream, version, context) if not build: - raise UnprocessableEntity('The module {} was not found'.format( - ':'.join([name, stream, version, context]))) + raise UnprocessableEntity( + "The module {} was not found".format( + ":".join([name, stream, version, context])) + ) queried_mmd = build.mmd() - nsvc = ':'.join([name, stream, version, context]) + nsvc = ":".join([name, stream, version, context]) - xmd_mbs = queried_mmd.get_xmd().get('mbs') - if not xmd_mbs or 'buildrequires' not in xmd_mbs.keys(): + xmd_mbs = queried_mmd.get_xmd().get("mbs") + if not xmd_mbs or "buildrequires" not in xmd_mbs.keys(): raise RuntimeError( - 'The module {} did not contain its modulemd or did not have ' - 'its xmd attribute filled out in MBS'.format(nsvc)) + "The module {} did not contain its modulemd or did not have " + "its xmd attribute filled out in MBS".format(nsvc) + ) - buildrequires = xmd_mbs['buildrequires'] + buildrequires = xmd_mbs["buildrequires"] for br_name, details in buildrequires.items(): build = models.ModuleBuild.get_build_from_nsvc( - session, br_name, details['stream'], details['version'], details['context'], - state=models.BUILD_STATES['ready']) + session, + br_name, + details["stream"], + details["version"], + details["context"], + state=models.BUILD_STATES["ready"], + ) if not build: raise RuntimeError( - 'Buildrequired module %s %r does not exist in MBS db' % (br_name, details)) + "Buildrequired module %s %r does not exist in MBS db" % (br_name, details)) # If the buildrequire is a meta-data only module with no Koji tag set, then just # skip it @@ -325,11 +365,11 @@ class DBResolver(GenericResolver): if local_modules: local_build = local_modules[0] new_requires[module_name] = { - 'ref': None, - 'stream': local_build.stream, - 'version': local_build.version, - 'context': local_build.context, - 'koji_tag': local_build.koji_tag, + "ref": None, + "stream": local_build.stream, + "version": local_build.version, + "context": local_build.context, + "koji_tag": local_build.koji_tag, } continue @@ -341,29 +381,31 @@ class DBResolver(GenericResolver): session, module_name, module_stream, module_version, module_context) if not build: - raise UnprocessableEntity('The module {} was not found'.format(nsvc)) + raise UnprocessableEntity("The module {} was not found".format(nsvc)) commit_hash = None mmd = build.mmd() - mbs_xmd = mmd.get_xmd().get('mbs') - if mbs_xmd and 'commit' in mbs_xmd.keys(): - commit_hash = mbs_xmd['commit'] + mbs_xmd = mmd.get_xmd().get("mbs") + if mbs_xmd and "commit" in mbs_xmd.keys(): + commit_hash = mbs_xmd["commit"] else: raise RuntimeError( - 'The module "{0}" didn\'t contain a commit hash in its xmd' - .format(module_name)) + 'The module "{0}" didn\'t contain a commit hash in its xmd'.format( + module_name) + ) if "mse" not in mbs_xmd.keys() or not mbs_xmd["mse"]: raise RuntimeError( 'The module "{}" is not built using Module Stream Expansion. ' - 'Please rebuild this module first'.format(nsvc)) + "Please rebuild this module first".format(nsvc) + ) new_requires[module_name] = { - 'ref': commit_hash, - 'stream': module_stream, - 'version': build.version, - 'context': build.context, - 'koji_tag': build.koji_tag, + "ref": commit_hash, + "stream": module_stream, + "version": build.version, + "context": build.context, + "koji_tag": build.koji_tag, } return new_requires diff --git a/module_build_service/resolver/LocalResolver.py b/module_build_service/resolver/LocalResolver.py index ffa55840..6a5a90de 100644 --- a/module_build_service/resolver/LocalResolver.py +++ b/module_build_service/resolver/LocalResolver.py @@ -31,7 +31,8 @@ class LocalResolver(DBResolver): It is subclass of DBResolver with small changes to DBResolver logic to fit the offline local module builds. See particular methods for more information. """ - backend = 'local' + + backend = "local" def get_buildrequired_modulemds(self, name, stream, base_module_nsvc): """ diff --git a/module_build_service/resolver/MBSResolver.py b/module_build_service/resolver/MBSResolver.py index d7cc24c9..2f09b8a2 100644 --- a/module_build_service/resolver/MBSResolver.py +++ b/module_build_service/resolver/MBSResolver.py @@ -64,7 +64,7 @@ class MBSResolver(GenericResolver): "stream": stream, "state": state, "verbose": True, - "order_desc_by": "version" + "order_desc_by": "version", } if version is not None: query["version"] = str(version) @@ -72,8 +72,9 @@ class MBSResolver(GenericResolver): query["context"] = context return query - def _get_modules(self, name, stream, version=None, context=None, state="ready", strict=False, - **kwargs): + def _get_modules( + self, name, stream, version=None, context=None, state="ready", strict=False, **kwargs + ): """Query and return modules from MBS with specific info :param str name: module's name. @@ -133,11 +134,7 @@ class MBSResolver(GenericResolver): :return: the number of modules that match the provided filter :rtype: int """ - query = { - "page": 1, - "per_page": 1, - "short": True, - } + query = {"page": 1, "per_page": 1, "short": True} query.update(kwargs) res = self.session.get(self.mbs_prod_url, params=query) if not res.ok: @@ -171,8 +168,16 @@ class MBSResolver(GenericResolver): if data["items"]: return load_mmd(data["items"][0]["modulemd"]) - def get_module_modulemds(self, name, stream, version=None, context=None, strict=False, - stream_version_lte=False, virtual_streams=None): + def get_module_modulemds( + self, + name, + stream, + version=None, + context=None, + strict=False, + stream_version_lte=False, + virtual_streams=None, + ): """ Gets the module modulemds from the resolver. :param name: a string of the module's name @@ -197,8 +202,9 @@ class MBSResolver(GenericResolver): return [m.mmd() for m in local_modules] extra_args = {} - if (stream_version_lte and len(str(models.ModuleBuild.get_stream_version( - stream, right_pad=False))) >= 5): + if stream_version_lte and ( + len(str(models.ModuleBuild.get_stream_version(stream, right_pad=False))) >= 5 + ): stream_version = models.ModuleBuild.get_stream_version(stream) extra_args["stream_version_lte"] = stream_version @@ -212,7 +218,7 @@ class MBSResolver(GenericResolver): mmds = [] for module in modules: if module: - yaml = module['modulemd'] + yaml = module["modulemd"] if not yaml: if strict: @@ -236,9 +242,8 @@ class MBSResolver(GenericResolver): :rtype: list :return: List of modulemd metadata. """ - modules = self._get_modules(name, stream, strict=False, - base_module_br=base_module_nsvc) - return [load_mmd(module['modulemd']) for module in modules] + modules = self._get_modules(name, stream, strict=False, base_module_br=base_module_nsvc) + return [load_mmd(module["modulemd"]) for module in modules] def resolve_profiles(self, mmd, keys): """ @@ -258,13 +263,12 @@ class MBSResolver(GenericResolver): results = {} for key in keys: results[key] = set() - for module_name, module_info in mmd.get_xmd()['mbs']['buildrequires'].items(): + for module_name, module_info in mmd.get_xmd()["mbs"]["buildrequires"].items(): local_modules = models.ModuleBuild.local_modules( - db.session, module_name, module_info['stream']) + db.session, module_name, module_info["stream"]) if local_modules: local_module = local_modules[0] - log.info("Using local module %r to resolve profiles.", - local_module) + log.info("Using local module %r to resolve profiles.", local_module) dep_mmd = local_module.mmd() for key in keys: if key in dep_mmd.get_profiles().keys(): @@ -273,11 +277,15 @@ class MBSResolver(GenericResolver): # Find the dep in the built modules in MBS modules = self._get_modules( - module_name, module_info['stream'], module_info['version'], - module_info['context'], strict=True) + module_name, + module_info["stream"], + module_info["version"], + module_info["context"], + strict=True, + ) for module in modules: - yaml = module['modulemd'] + yaml = module["modulemd"] dep_mmd = load_mmd(yaml) # Take note of what rpms are in this dep's profile. for key in keys: @@ -287,8 +295,9 @@ class MBSResolver(GenericResolver): # Return the union of all rpms in all profiles of the given keys. return results - def get_module_build_dependencies(self, name=None, stream=None, version=None, context=None, - mmd=None, strict=False): + def get_module_build_dependencies( + self, name=None, stream=None, version=None, context=None, mmd=None, strict=False + ): """ Returns a dictionary of koji_tag:[mmd, ...] of all the dependencies of input module. @@ -311,11 +320,13 @@ class MBSResolver(GenericResolver): if mmd: log.debug("get_module_build_dependencies(mmd=%r strict=%r)" % (mmd, strict)) elif any(x is None for x in [name, stream, version, context]): - raise RuntimeError('The name, stream, version, and/or context weren\'t specified') + raise RuntimeError("The name, stream, version, and/or context weren't specified") else: version = str(version) - log.debug("get_module_build_dependencies(%s, strict=%r)" - % (', '.join([name, stream, str(version), context]), strict)) + log.debug( + "get_module_build_dependencies(%s, strict=%r)" + % (", ".join([name, stream, str(version), context]), strict) + ) # This is the set we're going to build up and return. module_tags = {} @@ -323,22 +334,24 @@ class MBSResolver(GenericResolver): if mmd: queried_mmd = mmd else: - queried_module = self._get_module( - name, stream, version, context, strict=strict) - yaml = queried_module['modulemd'] + queried_module = self._get_module(name, stream, version, context, strict=strict) + yaml = queried_module["modulemd"] queried_mmd = load_mmd(yaml) - if (not queried_mmd or not queried_mmd.get_xmd().get('mbs') or - 'buildrequires' not in queried_mmd.get_xmd()['mbs'].keys()): + if ( + not queried_mmd + or not queried_mmd.get_xmd().get("mbs") + or "buildrequires" not in queried_mmd.get_xmd()["mbs"].keys() + ): raise RuntimeError( 'The module "{0!r}" did not contain its modulemd or did not have ' - 'its xmd attribute filled out in MBS'.format(queried_mmd)) + "its xmd attribute filled out in MBS".format(queried_mmd) + ) - buildrequires = queried_mmd.get_xmd()['mbs']['buildrequires'] + buildrequires = queried_mmd.get_xmd()["mbs"]["buildrequires"] # Queue up the next tier of deps that we should look at.. for name, details in buildrequires.items(): - local_modules = models.ModuleBuild.local_modules( - db.session, name, details['stream']) + local_modules = models.ModuleBuild.local_modules(db.session, name, details["stream"]) if local_modules: for m in local_modules: # If the buildrequire is a meta-data only module with no Koji tag set, then just @@ -351,8 +364,7 @@ class MBSResolver(GenericResolver): if "context" not in details: details["context"] = models.DEFAULT_MODULE_CONTEXT modules = self._get_modules( - name, details['stream'], details['version'], - details['context'], strict=True) + name, details["stream"], details["version"], details["context"], strict=True) for m in modules: if m["koji_tag"] in module_tags: continue @@ -390,21 +402,20 @@ class MBSResolver(GenericResolver): "Only N:S or N:S:V:C is accepted by resolve_requires, got %s" % nsvc) # Try to find out module dependency in the local module builds # added by utils.load_local_builds(...). - local_modules = models.ModuleBuild.local_modules( - db.session, module_name, module_stream) + local_modules = models.ModuleBuild.local_modules(db.session, module_name, module_stream) if local_modules: local_build = local_modules[0] new_requires[module_name] = { # The commit ID isn't currently saved in modules.yaml - 'ref': None, - 'stream': local_build.stream, - 'version': local_build.version, - 'context': local_build.context, - 'koji_tag': local_build.koji_tag, + "ref": None, + "stream": local_build.stream, + "version": local_build.version, + "context": local_build.context, + "koji_tag": local_build.koji_tag, # No need to set filtered_rpms for local builds, because MBS # filters the RPMs automatically when the module build is # done. - 'filtered_rpms': [] + "filtered_rpms": [], } continue @@ -412,12 +423,12 @@ class MBSResolver(GenericResolver): version = None filtered_rpms = [] module = self._get_module( - module_name, module_stream, module_version, - module_context, strict=True) - if module.get('modulemd'): - mmd = load_mmd(module['modulemd']) - if mmd.get_xmd().get('mbs') and 'commit' in mmd.get_xmd()['mbs'].keys(): - commit_hash = mmd.get_xmd()['mbs']['commit'] + module_name, module_stream, module_version, module_context, strict=True + ) + if module.get("modulemd"): + mmd = load_mmd(module["modulemd"]) + if mmd.get_xmd().get("mbs") and "commit" in mmd.get_xmd()["mbs"].keys(): + commit_hash = mmd.get_xmd()["mbs"]["commit"] # Find out the particular NVR of filtered packages if "rpms" in module and mmd.get_rpm_filter().get(): @@ -433,22 +444,23 @@ class MBSResolver(GenericResolver): continue filtered_rpms.append(nvr) - if module.get('version'): - version = module['version'] + if module.get("version"): + version = module["version"] if version and commit_hash: new_requires[module_name] = { - 'ref': commit_hash, - 'stream': module_stream, - 'version': str(version), - 'context': module["context"], - 'koji_tag': module['koji_tag'], - 'filtered_rpms': filtered_rpms, + "ref": commit_hash, + "stream": module_stream, + "version": str(version), + "context": module["context"], + "koji_tag": module["koji_tag"], + "filtered_rpms": filtered_rpms, } else: raise RuntimeError( 'The module "{0}" didn\'t contain either a commit hash or a' - ' version in MBS'.format(module_name)) + " version in MBS".format(module_name) + ) # If the module is a base module, then import it in the database so that entries in # the module_builds_to_module_buildrequires table can be created later on if module_name in conf.base_module_names: @@ -457,10 +469,10 @@ class MBSResolver(GenericResolver): return new_requires def get_modulemd_by_koji_tag(self, tag): - resp = self.session.get(self.mbs_prod_url, params={'koji_tag': tag, 'verbose': True}) + resp = self.session.get(self.mbs_prod_url, params={"koji_tag": tag, "verbose": True}) data = resp.json() - if data['items']: - modulemd = data['items'][0]['modulemd'] + if data["items"]: + modulemd = data["items"][0]["modulemd"] return load_mmd(modulemd) else: return None diff --git a/module_build_service/resolver/__init__.py b/module_build_service/resolver/__init__.py index 82cf80e3..b03c9154 100644 --- a/module_build_service/resolver/__init__.py +++ b/module_build_service/resolver/__init__.py @@ -28,7 +28,7 @@ from module_build_service.resolver.base import GenericResolver # NOTE: if you are adding a new resolver to MBS please note that you also have to add # a new resolver to your setup.py and update you egg-info -for entrypoint in pkg_resources.iter_entry_points('mbs.resolver_backends'): +for entrypoint in pkg_resources.iter_entry_points("mbs.resolver_backends"): GenericResolver.register_backend_class(entrypoint.load()) if not GenericResolver.backends: diff --git a/module_build_service/resolver/base.py b/module_build_service/resolver/base.py index 149f4adf..7d48d1d9 100644 --- a/module_build_service/resolver/base.py +++ b/module_build_service/resolver/base.py @@ -78,13 +78,13 @@ class GenericResolver(six.with_metaclass(ABCMeta)): @classmethod def supported_builders(cls): if cls is GenericResolver: - return {k: v['builders'] for k, v in cls._resolvers.items()} + return {k: v["builders"] for k, v in cls._resolvers.items()} else: try: - return cls._resolvers[cls.backend]['builders'] + return cls._resolvers[cls.backend]["builders"] except KeyError: - raise RuntimeError("No configuration of builder backends found " - "for resolver {}".format(cls)) + raise RuntimeError( + "No configuration of builder backends found for resolver {}".format(cls)) @classmethod def is_builder_compatible(cls, builder): @@ -100,9 +100,12 @@ class GenericResolver(six.with_metaclass(ABCMeta)): @staticmethod def extract_modulemd(yaml, strict=False): - log.warning('GenericResolver.extract_modulemd is deprecated. Please call ' - 'module_build_service.utils.load_mmd in new code.') + log.warning( + "GenericResolver.extract_modulemd is deprecated. Please call " + "module_build_service.utils.load_mmd in new code." + ) from module_build_service.utils import load_mmd + return load_mmd(yaml) @abstractmethod @@ -114,8 +117,16 @@ class GenericResolver(six.with_metaclass(ABCMeta)): raise NotImplementedError() @abstractmethod - def get_module_modulemds(self, name, stream, version=None, context=None, strict=False, - stream_version_lte=None, virtual_streams=None): + def get_module_modulemds( + self, + name, + stream, + version=None, + context=None, + strict=False, + stream_version_lte=None, + virtual_streams=None, + ): raise NotImplementedError() @abstractmethod @@ -127,8 +138,9 @@ class GenericResolver(six.with_metaclass(ABCMeta)): raise NotImplementedError() @abstractmethod - def get_module_build_dependencies(self, name=None, stream=None, version=None, mmd=None, - context=None, strict=False): + def get_module_build_dependencies( + self, name=None, stream=None, version=None, mmd=None, context=None, strict=False + ): raise NotImplementedError() @abstractmethod diff --git a/module_build_service/scheduler/__init__.py b/module_build_service/scheduler/__init__.py index e19f46c4..b5fe0c3f 100644 --- a/module_build_service/scheduler/__init__.py +++ b/module_build_service/scheduler/__init__.py @@ -7,6 +7,7 @@ import module_build_service.models import module_build_service.scheduler.consumer import logging + log = logging.getLogger(__name__) @@ -17,15 +18,15 @@ def main(initial_messages, stop_condition): """ config = fedmsg.config.load_config() - config['mbsconsumer'] = True - config['mbsconsumer.stop_condition'] = stop_condition - config['mbsconsumer.initial_messages'] = initial_messages + config["mbsconsumer"] = True + config["mbsconsumer.stop_condition"] = stop_condition + config["mbsconsumer.initial_messages"] = initial_messages # Moksha requires that we subscribe to *something*, so tell it /dev/null # since we'll just be doing in-memory queue-based messaging for this single # build. - config['zmq_enabled'] = True - config['zmq_subscribe_endpoints'] = 'ipc:///dev/null' + config["zmq_enabled"] = True + config["zmq_subscribe_endpoints"] = "ipc:///dev/null" consumers = [module_build_service.scheduler.consumer.MBSConsumer] @@ -56,9 +57,11 @@ def make_simple_stop_condition(session): # XXX - We ignore the message here and instead just query the DB. # Grab the latest module build. - module = session.query(module_build_service.models.ModuleBuild)\ - .order_by(module_build_service.models.ModuleBuild.id.desc())\ + module = ( + session.query(module_build_service.models.ModuleBuild) + .order_by(module_build_service.models.ModuleBuild.id.desc()) .first() + ) done = ( module_build_service.models.BUILD_STATES["failed"], module_build_service.models.BUILD_STATES["ready"], diff --git a/module_build_service/scheduler/consumer.py b/module_build_service/scheduler/consumer.py index d43fa8e2..956f2fae 100644 --- a/module_build_service/scheduler/consumer.py +++ b/module_build_service/scheduler/consumer.py @@ -58,7 +58,8 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer): """ This is triggered by running fedmsg-hub. This class is responsible for ingesting and processing messages from the message bus. """ - config_key = 'mbsconsumer' + + config_key = "mbsconsumer" # It is set to the id of currently handled module build. It is used to # group all the log messages associated with single module build to @@ -70,15 +71,15 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer): backends = module_build_service.messaging._messaging_backends prefixes = conf.messaging_topic_prefix # This is a list. - services = backends[conf.messaging]['services'] - suffix = backends[conf.messaging]['topic_suffix'] + services = backends[conf.messaging]["services"] + suffix = backends[conf.messaging]["topic_suffix"] self.topic = [ - '{}.{}{}'.format(prefix.rstrip('.'), category, suffix) + "{}.{}{}".format(prefix.rstrip("."), category, suffix) for prefix, category in itertools.product(prefixes, services) ] if not self.topic: - self.topic = '*' - log.debug('Setting topics: {}'.format(', '.join(self.topic))) + self.topic = "*" + log.debug("Setting topics: {}".format(", ".join(self.topic))) # The call to `super` takes action based on the setting of topics above super(MBSConsumer, self).__init__(hub) @@ -86,13 +87,13 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer): # Our call to `super` above should have initialized an `incoming` queue # for us.. but in certain test situations, it does not. So here, # establish a fake `incoming` queue. - if not hasattr(self, 'incoming'): + if not hasattr(self, "incoming"): self.incoming = queue.Queue() # These two values are typically provided either by the unit tests or # by the local build command. They are empty in the production environ - self.stop_condition = hub.config.get('mbsconsumer.stop_condition') - initial_messages = hub.config.get('mbsconsumer.initial_messages', []) + self.stop_condition = hub.config.get("mbsconsumer.stop_condition") + initial_messages = hub.config.get("mbsconsumer.initial_messages", []) for msg in initial_messages: self.incoming.put(msg) @@ -108,26 +109,23 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer): self.on_build_change = { koji.BUILD_STATES["BUILDING"]: NO_OP, koji.BUILD_STATES[ - "COMPLETE"]: module_build_service.scheduler.handlers.components.complete, + "COMPLETE" + ]: module_build_service.scheduler.handlers.components.complete, + koji.BUILD_STATES["FAILED"]: module_build_service.scheduler.handlers.components.failed, koji.BUILD_STATES[ - "FAILED"]: module_build_service.scheduler.handlers.components.failed, - koji.BUILD_STATES[ - "CANCELED"]: module_build_service.scheduler.handlers.components.canceled, + "CANCELED" + ]: module_build_service.scheduler.handlers.components.canceled, koji.BUILD_STATES["DELETED"]: NO_OP, } self.on_module_change = { - models.BUILD_STATES[ - "init"]: module_build_service.scheduler.handlers.modules.init, - models.BUILD_STATES[ - "wait"]: module_build_service.scheduler.handlers.modules.wait, + models.BUILD_STATES["init"]: module_build_service.scheduler.handlers.modules.init, + models.BUILD_STATES["wait"]: module_build_service.scheduler.handlers.modules.wait, models.BUILD_STATES["build"]: NO_OP, - models.BUILD_STATES[ - "failed"]: module_build_service.scheduler.handlers.modules.failed, - models.BUILD_STATES[ - "done"]: module_build_service.scheduler.handlers.modules.done, + models.BUILD_STATES["failed"]: module_build_service.scheduler.handlers.modules.failed, + models.BUILD_STATES["done"]: module_build_service.scheduler.handlers.modules.done, # XXX: DIRECT TRANSITION TO READY models.BUILD_STATES["ready"]: NO_OP, - models.BUILD_STATES["garbage"]: NO_OP + models.BUILD_STATES["garbage"]: NO_OP, } # Only one kind of repo change event, though... self.on_repo_change = module_build_service.scheduler.handlers.repos.done @@ -138,11 +136,12 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer): def shutdown(self): log.info("Scheduling shutdown.") from moksha.hub.reactor import reactor + reactor.callFromThread(self.hub.stop) reactor.callFromThread(reactor.stop) def validate(self, message): - if conf.messaging == 'fedmsg': + if conf.messaging == "fedmsg": # If this is a faked internal message, don't bother. if isinstance(message, module_build_service.messaging.BaseMessage): log.info("Skipping crypto validation for %r" % message) @@ -171,7 +170,7 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer): monitor.messaging_rx_processed_ok_counter.inc() except sqlalchemy.exc.OperationalError as error: monitor.messaging_rx_failed_counter.inc() - if 'could not translate host name' in str(error): + if "could not translate host name" in str(error): log.exception( "SQLAlchemy can't resolve DNS records. Scheduling fedmsg-hub to shutdown.") self.shutdown() @@ -179,21 +178,20 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer): raise except Exception: monitor.messaging_rx_failed_counter.inc() - log.exception('Failed while handling {0!r}'.format(msg)) + log.exception("Failed while handling {0!r}".format(msg)) if self.stop_condition and self.stop_condition(message): self.shutdown() def get_abstracted_msg(self, message): - parser = module_build_service.messaging._messaging_backends[conf.messaging].get('parser') + parser = module_build_service.messaging._messaging_backends[conf.messaging].get("parser") if parser: try: return parser.parse(message) except module_build_service.messaging.IgnoreMessage: pass else: - raise ValueError('{0} backend does not define a message parser' - .format(conf.messaging)) + raise ValueError("{0} backend does not define a message parser".format(conf.messaging)) def sanity_check(self): """ On startup, make sure our implementation is sane. """ @@ -205,17 +203,16 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer): if koji.BUILD_STATES[state] not in self.on_build_change: raise KeyError("Koji build states %r not handled." % state) - all_fns = (list(self.on_build_change.items()) + - list(self.on_module_change.items())) + all_fns = list(self.on_build_change.items()) + list(self.on_module_change.items()) for key, callback in all_fns: - expected = ['config', 'session', 'msg'] + expected = ["config", "session", "msg"] if six.PY2: argspec = inspect.getargspec(callback)[0] else: argspec = inspect.getfullargspec(callback)[0] if argspec != expected: - raise ValueError("Callback %r, state %r has argspec %r!=%r" % ( - callback, key, argspec, expected)) + raise ValueError( + "Callback %r, state %r has argspec %r!=%r" % (callback, key, argspec, expected)) def process_message(self, session, msg): # set module build to None and let's populate it later @@ -258,13 +255,17 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer): try: further_work = handler(conf, session, msg) or [] except Exception as e: - msg = 'Could not process message handler. See the traceback.' + msg = "Could not process message handler. See the traceback." log.exception(msg) session.rollback() if build: session.refresh(build) - build.transition(conf, state=models.BUILD_STATES['failed'], - state_reason=str(e), failure_type='infra') + build.transition( + conf, + state=models.BUILD_STATES["failed"], + state_reason=str(e), + failure_type="infra", + ) session.commit() log.debug("Done with %s" % idx) @@ -303,7 +304,5 @@ def work_queue_put(msg): def fake_repo_done_message(tag_name): msg = module_build_service.messaging.KojiRepoChange( - msg_id='a faked internal message', - repo_tag=tag_name + "-build", - ) + msg_id="a faked internal message", repo_tag=tag_name + "-build") work_queue_put(msg) diff --git a/module_build_service/scheduler/handlers/components.py b/module_build_service/scheduler/handlers/components.py index 38ac504f..4a853fac 100644 --- a/module_build_service/scheduler/handlers/components.py +++ b/module_build_service/scheduler/handlers/components.py @@ -40,8 +40,7 @@ def _finalize(config, session, msg, state): # First, find our ModuleBuild associated with this component, if any. component_build = models.ComponentBuild.from_component_event(session, msg) try: - nvr = "{}-{}-{}".format(msg.build_name, msg.build_version, - msg.build_release) + nvr = "{}-{}-{}".format(msg.build_name, msg.build_version, msg.build_release) except KeyError: nvr = None @@ -53,7 +52,7 @@ def _finalize(config, session, msg, state): if msg.state_reason: state_reason = msg.state_reason - elif state != koji.BUILD_STATES['COMPLETE']: + elif state != koji.BUILD_STATES["COMPLETE"]: state_reason = "Failed to build artifact %s in Koji" % (msg.build_name) else: state_reason = "" @@ -67,10 +66,13 @@ def _finalize(config, session, msg, state): parent = component_build.module_build # If the macro build failed, then the module is doomed. - if (component_build.package == 'module-build-macros' and - state != koji.BUILD_STATES['COMPLETE']): - parent.transition(config, state=models.BUILD_STATES['failed'], - state_reason=state_reason, failure_type='user') + if component_build.package == "module-build-macros" and state != koji.BUILD_STATES["COMPLETE"]: + parent.transition( + config, + state=models.BUILD_STATES["failed"], + state_reason=state_reason, + failure_type="user", + ) session.commit() return @@ -80,31 +82,36 @@ def _finalize(config, session, msg, state): # we can tag all successfully built components in the batch. unbuilt_components_in_batch = [ c for c in parent.current_batch() - if c.state == koji.BUILD_STATES['BUILDING'] or not c.state + if c.state == koji.BUILD_STATES["BUILDING"] or not c.state ] if not unbuilt_components_in_batch: failed_components_in_batch = [ c for c in parent.current_batch() - if (c.state in [koji.BUILD_STATES['FAILED'], - koji.BUILD_STATES['CANCELED']]) + if (c.state in [koji.BUILD_STATES["FAILED"], koji.BUILD_STATES["CANCELED"]]) ] built_components_in_batch = [ c for c in parent.current_batch() - if c.state == koji.BUILD_STATES['COMPLETE'] + if c.state == koji.BUILD_STATES["COMPLETE"] ] builder = module_build_service.builder.GenericBuilder.create_from_module( - session, parent, config) + session, parent, config + ) if failed_components_in_batch: - log.info("Batch done, but not tagging because of failed component builds. Will " - "transition the module to \"failed\"") - state_reason = 'Component(s) {} failed to build.'.format( - ', '.join(c.package for c in failed_components_in_batch)) - parent.transition(config, - state=models.BUILD_STATES['failed'], - state_reason=state_reason, failure_type='user') + log.info( + "Batch done, but not tagging because of failed component builds. Will " + 'transition the module to "failed"' + ) + state_reason = "Component(s) {} failed to build.".format( + ", ".join(c.package for c in failed_components_in_batch)) + parent.transition( + config, + state=models.BUILD_STATES["failed"], + state_reason=state_reason, + failure_type="user", + ) session.commit() return [] elif not built_components_in_batch: @@ -112,14 +119,17 @@ def _finalize(config, session, msg, state): # The repository won't be regenerated in this case and therefore we generate fake repo # change message here. log.info("Batch done. No component to tag") - further_work += [messaging.KojiRepoChange( - 'components::_finalize: fake msg', - builder.module_build_tag['name'])] + further_work += [ + messaging.KojiRepoChange( + "components::_finalize: fake msg", builder.module_build_tag["name"]) + ] else: built_component_nvrs_in_batch = [c.nvr for c in built_components_in_batch] # tag && add to srpm-build group if neccessary - log.info("Batch done. Tagging %i component(s) in the build tag." % len( - built_component_nvrs_in_batch)) + log.info( + "Batch done. Tagging %i component(s) in the build tag." + % len(built_component_nvrs_in_batch) + ) log.debug("%r" % built_component_nvrs_in_batch) # TODO: install=component_build.build_time_only works here because module-build-macros # is alone in its batch and the only component with build_time_only set. All other @@ -129,16 +139,17 @@ def _finalize(config, session, msg, state): built_component_nvrs_in_batch, install=component_build.build_time_only) # Do not tag packages which only belong to the build tag to the dest tag - component_nvrs_to_tag_in_dest = [c.nvr for c in built_components_in_batch - if c.build_time_only is False] - log.info("Tagging %i component(s) in the dest tag." % len( - component_nvrs_to_tag_in_dest)) + component_nvrs_to_tag_in_dest = [ + c.nvr for c in built_components_in_batch + if c.build_time_only is False + ] + log.info( + "Tagging %i component(s) in the dest tag." % len(component_nvrs_to_tag_in_dest)) if component_nvrs_to_tag_in_dest: builder.tag_artifacts(component_nvrs_to_tag_in_dest) session.commit() - elif (any([c.state != koji.BUILD_STATES['BUILDING'] - for c in unbuilt_components_in_batch])): + elif any([c.state != koji.BUILD_STATES["BUILDING"] for c in unbuilt_components_in_batch]): # We are not in the middle of the batch building and # we have some unbuilt components in this batch. We might hit the # concurrent builds threshold in previous call of continue_batch_build @@ -153,12 +164,12 @@ def _finalize(config, session, msg, state): def complete(config, session, msg): - return _finalize(config, session, msg, state=koji.BUILD_STATES['COMPLETE']) + return _finalize(config, session, msg, state=koji.BUILD_STATES["COMPLETE"]) def failed(config, session, msg): - return _finalize(config, session, msg, state=koji.BUILD_STATES['FAILED']) + return _finalize(config, session, msg, state=koji.BUILD_STATES["FAILED"]) def canceled(config, session, msg): - return _finalize(config, session, msg, state=koji.BUILD_STATES['CANCELED']) + return _finalize(config, session, msg, state=koji.BUILD_STATES["CANCELED"]) diff --git a/module_build_service/scheduler/handlers/greenwave.py b/module_build_service/scheduler/handlers/greenwave.py index 88cf7e23..7c39a13b 100644 --- a/module_build_service/scheduler/handlers/greenwave.py +++ b/module_build_service/scheduler/handlers/greenwave.py @@ -42,8 +42,7 @@ def get_corresponding_module_build(nvr): return None try: - module_build_id = build_info['extra']['typeinfo']['module'][ - 'module_build_service_id'] + module_build_id = build_info["extra"]["typeinfo"]["module"]["module_build_service_id"] except KeyError: # If any of the keys is not present, the NVR is not the one for # handling Greenwave event. @@ -64,37 +63,50 @@ def decision_update(config, session, msg): :type msg: :class:`GreenwaveDecisionUpdate` """ if not config.greenwave_decision_context: - log.debug('Skip Greenwave message %s as MBS does not have GREENWAVE_DECISION_CONTEXT ' - 'configured', msg.msg_id) + log.debug( + "Skip Greenwave message %s as MBS does not have GREENWAVE_DECISION_CONTEXT " + "configured", + msg.msg_id, + ) return if msg.decision_context != config.greenwave_decision_context: - log.debug('Skip Greenwave message %s as MBS only handles messages with the ' - 'decision context "%s"', - msg.msg_id, config.greenwave_decision_context) + log.debug( + "Skip Greenwave message %s as MBS only handles messages with the " + 'decision context "%s"', + msg.msg_id, + config.greenwave_decision_context, + ) return module_build_nvr = msg.subject_identifier if not msg.policies_satisfied: - log.debug('Skip to handle module build %s because it has not satisfied' - ' Greenwave policies.', - module_build_nvr) + log.debug( + "Skip to handle module build %s because it has not satisfied Greenwave policies.", + module_build_nvr, + ) return build = get_corresponding_module_build(module_build_nvr) if build is None: - log.debug('No corresponding module build of subject_identifier %s is ' - 'found.', module_build_nvr) + log.debug( + "No corresponding module build of subject_identifier %s is found.", module_build_nvr) return - if build.state == BUILD_STATES['done']: + if build.state == BUILD_STATES["done"]: build.transition( - conf, BUILD_STATES['ready'], - state_reason='Module build {} has satisfied Greenwave policies.' - .format(module_build_nvr)) + conf, + BUILD_STATES["ready"], + state_reason="Module build {} has satisfied Greenwave policies.".format( + module_build_nvr + ), + ) else: - log.warning('Module build %s is not in done state but Greenwave tells ' - 'it passes tests in decision context %s', - module_build_nvr, msg.decision_context) + log.warning( + "Module build %s is not in done state but Greenwave tells " + "it passes tests in decision context %s", + module_build_nvr, + msg.decision_context, + ) diff --git a/module_build_service/scheduler/handlers/modules.py b/module_build_service/scheduler/handlers/modules.py index a617bcd1..4a4d1998 100644 --- a/module_build_service/scheduler/handlers/modules.py +++ b/module_build_service/scheduler/handlers/modules.py @@ -33,7 +33,8 @@ from module_build_service.utils import ( record_component_builds, get_rpm_release, generate_koji_tag, - record_filtered_rpms) + record_filtered_rpms, +) from module_build_service.errors import UnprocessableEntity, Forbidden, ValidationError from module_build_service.utils.ursine import handle_stream_collision_modules @@ -64,17 +65,17 @@ def failed(config, session, msg): build = models.ModuleBuild.from_module_event(session, msg) module_info = build.json() - if module_info['state'] != msg.module_build_state: + if module_info["state"] != msg.module_build_state: log.warning( - "Note that retrieved module state %r doesn't match message module" - " state %r", module_info['state'], msg.module_build_state) + "Note that retrieved module state %r doesn't match message module state %r", + module_info["state"], msg.module_build_state, + ) # This is ok.. it's a race condition we can ignore. pass unbuilt_components = [ c for c in build.component_builds - if (c.state != koji.BUILD_STATES['COMPLETE'] and - c.state != koji.BUILD_STATES["FAILED"]) + if (c.state != koji.BUILD_STATES["COMPLETE"] and c.state != koji.BUILD_STATES["FAILED"]) ] if build.koji_tag: @@ -87,7 +88,7 @@ def failed(config, session, msg): for component in unbuilt_components: if component.task_id: builder.cancel_build(component.task_id) - component.state = koji.BUILD_STATES['FAILED'] + component.state = koji.BUILD_STATES["FAILED"] component.state_reason = build.state_reason session.add(component) @@ -98,13 +99,13 @@ def failed(config, session, msg): if not build.state_reason: reason = "Missing koji tag. Assuming previously failed module lookup." log.error(reason) - build.transition(config, state="failed", state_reason=reason, failure_type='infra') + build.transition(config, state="failed", state_reason=reason, failure_type="infra") session.commit() return # Don't transition it again if it's already been transitioned if build.state != models.BUILD_STATES["failed"]: - build.transition(config, state="failed", failure_type='user') + build.transition(config, state="failed", failure_type="user") session.commit() @@ -122,10 +123,11 @@ def done(config, session, msg): """ build = models.ModuleBuild.from_module_event(session, msg) module_info = build.json() - if module_info['state'] != msg.module_build_state: + if module_info["state"] != msg.module_build_state: log.warning( - "Note that retrieved module state %r doesn't match message module" - " state %r", module_info['state'], msg.module_build_state) + "Note that retrieved module state %r doesn't match message module state %r", + module_info["state"], msg.module_build_state, + ) # This is ok.. it's a race condition we can ignore. pass @@ -148,13 +150,13 @@ def init(config, session, msg): break time.sleep(1) - error_msg = '' - failure_reason = 'unspec' + error_msg = "" + failure_reason = "unspec" try: mmd = build.mmd() record_component_builds(mmd, build, session=session) # The ursine.handle_stream_collision_modules is Koji specific. - if conf.system in ['koji', 'test']: + if conf.system in ["koji", "test"]: handle_stream_collision_modules(mmd) mmd = record_filtered_rpms(mmd) build.modulemd = to_text_type(mmd.dumps()) @@ -163,15 +165,15 @@ def init(config, session, msg): except (UnprocessableEntity, Forbidden, ValidationError, RuntimeError) as e: log.exception(str(e)) error_msg = str(e) - failure_reason = 'user' + failure_reason = "user" except (xmlrpclib.ProtocolError, koji.GenericError) as e: log.exception(str(e)) error_msg = 'Koji communication error: "{0}"'.format(str(e)) - failure_reason = 'infra' + failure_reason = "infra" except Exception as e: log.exception(str(e)) error_msg = "An unknown error occurred while validating the modulemd" - failure_reason = 'user' + failure_reason = "user" else: session.add(build) session.commit() @@ -179,8 +181,12 @@ def init(config, session, msg): if error_msg: # Rollback changes underway session.rollback() - build.transition(conf, models.BUILD_STATES["failed"], state_reason=error_msg, - failure_type=failure_reason) + build.transition( + conf, + models.BUILD_STATES["failed"], + state_reason=error_msg, + failure_type=failure_reason, + ) def generate_module_build_koji_tag(build): @@ -191,17 +197,23 @@ def generate_module_build_koji_tag(build): :return: generated koji tag. :rtype: str """ - log.info('Getting tag for %s:%s:%s', build.name, build.stream, build.version) - if conf.system in ['koji', 'test']: - return generate_koji_tag(build.name, build.stream, build.version, build.context, - scratch=build.scratch, scratch_id=build.id) + log.info("Getting tag for %s:%s:%s", build.name, build.stream, build.version) + if conf.system in ["koji", "test"]: + return generate_koji_tag( + build.name, + build.stream, + build.version, + build.context, + scratch=build.scratch, + scratch_id=build.id, + ) else: - return '-'.join(['module', build.name, build.stream, build.version]) + return "-".join(["module", build.name, build.stream, build.version]) @module_build_service.utils.retry( - interval=10, timeout=120, - wait_on=(ValueError, RuntimeError, ConnectionError)) + interval=10, timeout=120, wait_on=(ValueError, RuntimeError, ConnectionError) +) def get_module_build_dependencies(build): """Used by wait handler to get module's build dependencies @@ -212,10 +224,10 @@ def get_module_build_dependencies(build): :rtype: dict[str, Modulemd.Module] """ resolver = module_build_service.resolver.system_resolver - if conf.system in ['koji', 'test']: + if conf.system in ["koji", "test"]: # For Koji backend, query for the module we are going to # build to get the koji_tag and deps from it. - log.info('Getting tag for %s:%s:%s', build.name, build.stream, build.version) + log.info("Getting tag for %s:%s:%s", build.name, build.stream, build.version) return resolver.get_module_build_dependencies( build.name, build.stream, build.version, build.context, strict=True) else: @@ -235,7 +247,7 @@ def get_content_generator_build_koji_tag(module_deps): :return: the koji tag. :rtype: str """ - if conf.system in ['koji', 'test']: + if conf.system in ["koji", "test"]: # Find out the name of Koji tag to which the module's Content # Generator build should be tagged once the build finishes. module_names_streams = { @@ -246,9 +258,11 @@ def get_content_generator_build_koji_tag(module_deps): return conf.koji_cg_build_tag_template.format( module_names_streams[base_module_name]) - log.debug('No configured base module is a buildrequire. Hence use' - ' default content generator build koji tag %s', - conf.koji_cg_default_build_tag) + log.debug( + "No configured base module is a buildrequire. Hence use" + " default content generator build koji tag %s", + conf.koji_cg_default_build_tag, + ) return conf.koji_cg_default_build_tag else: return conf.koji_cg_default_build_tag @@ -270,11 +284,10 @@ def wait(config, session, msg): @module_build_service.utils.retry(interval=10, timeout=120, wait_on=RuntimeError) def _get_build_containing_xmd_for_mbs(): build = models.ModuleBuild.from_module_event(session, msg) - if 'mbs' in build.mmd().get_xmd(): + if "mbs" in build.mmd().get_xmd(): return build session.expire(build) - raise RuntimeError("{!r} doesn't contain xmd information for MBS." - .format(build)) + raise RuntimeError("{!r} doesn't contain xmd information for MBS.".format(build)) build = _get_build_containing_xmd_for_mbs() build_logs.start(build) @@ -283,8 +296,10 @@ def wait(config, session, msg): log.info("%r", build.modulemd) if build.state != msg.module_build_state: - log.warning("Note that retrieved module state %r doesn't match message" - " module state %r", build.state, msg.module_build_state) + log.warning( + "Note that retrieved module state %r doesn't match message module state %r", + build.state, msg.module_build_state, + ) # This is ok.. it's a race condition we can ignore. pass @@ -293,7 +308,7 @@ def wait(config, session, msg): except ValueError: reason = "Failed to get module info from MBS. Max retries reached." log.exception(reason) - build.transition(config, state="failed", state_reason=reason, failure_type='infra') + build.transition(config, state="failed", state_reason=reason, failure_type="infra") session.commit() raise @@ -307,23 +322,24 @@ def wait(config, session, msg): build.koji_tag = tag if build.scratch: - log.debug('Assigning Content Generator build koji tag is skipped for' - ' scratch module build.') + log.debug( + "Assigning Content Generator build koji tag is skipped for scratch module build.") elif conf.koji_cg_tag_build: cg_build_koji_tag = get_content_generator_build_koji_tag(build_deps) - log.debug("Assigning Content Generator build koji tag=%s to module build", - cg_build_koji_tag) + log.debug( + "Assigning Content Generator build koji tag=%s to module build", cg_build_koji_tag) build.cg_build_koji_tag = cg_build_koji_tag else: - log.debug('It is disabled to tag module build during importing into' - ' Koji by Content Generator.') - log.debug('Skip to assign Content Generator build koji tag to module build.') + log.debug( + "It is disabled to tag module build during importing into Koji by Content Generator.") + log.debug("Skip to assign Content Generator build koji tag to module build.") - builder = module_build_service.builder.GenericBuilder.create_from_module( - session, build, config) + builder = module_build_service.builder.GenericBuilder.create_from_module(session, build, config) - log.debug("Adding dependencies %s into buildroot for module %s:%s:%s", - build_deps.keys(), build.name, build.stream, build.version) + log.debug( + "Adding dependencies %s into buildroot for module %s:%s:%s", + build_deps.keys(), build.name, build.stream, build.version, + ) builder.buildroot_add_repos(build_deps) if not build.component_builds: @@ -333,14 +349,15 @@ def wait(config, session, msg): session.commit() # Return a KojiRepoChange message so that the build can be transitioned to done # in the repos handler - return [module_build_service.messaging.KojiRepoChange( - 'handlers.modules.wait: fake msg', builder.module_build_tag['name'])] + return [ + module_build_service.messaging.KojiRepoChange( + "handlers.modules.wait: fake msg", builder.module_build_tag["name"]) + ] # If all components in module build will be reused, we don't have to build # module-build-macros, because there won't be any build done. if attempt_to_reuse_all_components(builder, session, build): - log.info("All components have been reused for module %r, " - "skipping build" % build) + log.info("All components have been reused for module %r, skipping build" % build) build.transition(config, state="build") session.add(build) session.commit() @@ -352,12 +369,9 @@ def wait(config, session, msg): artifact_name = "module-build-macros" - component_build = models.ComponentBuild.from_component_name( - session, artifact_name, build.id) + component_build = models.ComponentBuild.from_component_name(session, artifact_name, build.id) further_work = [] - srpm = builder.get_disttag_srpm( - disttag=".%s" % get_rpm_release(build), - module_build=build) + srpm = builder.get_disttag_srpm(disttag=".%s" % get_rpm_release(build), module_build=build) if not component_build: component_build = models.ComponentBuild( module_id=build.id, @@ -365,7 +379,7 @@ def wait(config, session, msg): format="rpms", scmurl=srpm, batch=1, - build_time_only=True + build_time_only=True, ) session.add(component_build) # Commit and refresh so that the SQLAlchemy relationships are available @@ -373,7 +387,7 @@ def wait(config, session, msg): session.refresh(component_build) msgs = builder.recover_orphaned_artifact(component_build) if msgs: - log.info('Found an existing module-build-macros build') + log.info("Found an existing module-build-macros build") further_work += msgs # There was no existing artifact found, so lets submit the build instead else: @@ -382,13 +396,13 @@ def wait(config, session, msg): component_build.state = state component_build.reason = reason component_build.nvr = nvr - elif component_build.state != koji.BUILD_STATES['COMPLETE']: + elif component_build.state != koji.BUILD_STATES["COMPLETE"]: # It's possible that the build succeeded in the builder but some other step failed which # caused module-build-macros to be marked as failed in MBS, so check to see if it exists # first msgs = builder.recover_orphaned_artifact(component_build) if msgs: - log.info('Found an existing module-build-macros build') + log.info("Found an existing module-build-macros build") further_work += msgs else: task_id, state, reason, nvr = builder.build(artifact_name=artifact_name, source=srpm) @@ -405,11 +419,12 @@ def wait(config, session, msg): # We always have to regenerate the repository. if config.system == "koji": log.info("Regenerating the repository") - task_id = builder.koji_session.newRepo( - builder.module_build_tag['name']) + task_id = builder.koji_session.newRepo(builder.module_build_tag["name"]) build.new_repo_task_id = task_id session.commit() else: - further_work.append(module_build_service.messaging.KojiRepoChange( - 'fake msg', builder.module_build_tag['name'])) + further_work.append( + module_build_service.messaging.KojiRepoChange( + "fake msg", builder.module_build_tag["name"]) + ) return further_work diff --git a/module_build_service/scheduler/handlers/repos.py b/module_build_service/scheduler/handlers/repos.py index be822a58..d0e457ec 100644 --- a/module_build_service/scheduler/handlers/repos.py +++ b/module_build_service/scheduler/handlers/repos.py @@ -38,10 +38,10 @@ def done(config, session, msg): # First, find our ModuleBuild associated with this repo, if any. tag = msg.repo_tag - if config.system in ('koji', 'test') and not tag.endswith('-build'): + if config.system in ("koji", "test") and not tag.endswith("-build"): log.debug("Tag %r does not end with '-build' suffix, ignoring" % tag) return - tag = tag[:-6] if tag.endswith('-build') else tag + tag = tag[:-6] if tag.endswith("-build") else tag module_build = models.ModuleBuild.from_repo_done_event(session, msg) if not module_build: log.debug("No module build found associated with koji tag %r" % tag) @@ -50,17 +50,17 @@ def done(config, session, msg): # It is possible that we have already failed.. but our repo is just being # routinely regenerated. Just ignore that. If module_build_service says the module is # dead, then the module is dead. - if module_build.state == models.BUILD_STATES['failed']: + if module_build.state == models.BUILD_STATES["failed"]: log.info("Ignoring repo regen for already failed %r" % module_build) return # Get the list of untagged components in current/previous batches which # have been built successfully - if config.system in ('koji', 'test') and module_build.component_builds: + if config.system in ("koji", "test") and module_build.component_builds: untagged_components = [ c for c in module_build.up_to_current_batch() - if (not c.tagged or (not c.tagged_in_final and not c.build_time_only)) and - c.state == koji.BUILD_STATES['COMPLETE'] + if (not c.tagged or (not c.tagged_in_final and not c.build_time_only)) + and c.state == koji.BUILD_STATES["COMPLETE"] ] if untagged_components: log.info("Ignoring repo regen, because not all components are tagged.") @@ -76,20 +76,19 @@ def done(config, session, msg): current_batch = module_build.current_batch() # If any in the current batch are still running.. just wait. - running = [c.state == koji.BUILD_STATES['BUILDING'] for c in current_batch] + running = [c.state == koji.BUILD_STATES["BUILDING"] for c in current_batch] if any(running): log.info( "%r has %r of %r components still " - "building in this batch (%r total)" % ( - module_build, len(running), len(current_batch), - len(module_build.component_builds))) + "building in this batch (%r total)" + % (module_build, len(running), len(current_batch), len(module_build.component_builds)) + ) return # Assemble the list of all successful components in the batch. - good = [c for c in current_batch if c.state == koji.BUILD_STATES['COMPLETE']] + good = [c for c in current_batch if c.state == koji.BUILD_STATES["COMPLETE"]] - failed_states = (koji.BUILD_STATES['FAILED'], - koji.BUILD_STATES['CANCELED']) + failed_states = (koji.BUILD_STATES["FAILED"], koji.BUILD_STATES["CANCELED"]) # If *none* of the components completed for this batch, then obviously the # module fails. However! We shouldn't reach this scenario. There is @@ -97,10 +96,10 @@ def done(config, session, msg): # first before we ever get here. This is here as a race condition safety # valve. if module_build.component_builds and not good: - state_reason = 'Component(s) {} failed to build.'.format( - ', '.join(c.package for c in current_batch if c.state in failed_states)) - module_build.transition(config, models.BUILD_STATES['failed'], state_reason, - failure_type='infra') + state_reason = "Component(s) {} failed to build.".format( + ", ".join(c.package for c in current_batch if c.state in failed_states)) + module_build.transition( + config, models.BUILD_STATES["failed"], state_reason, failure_type="infra") session.commit() log.warning("Odd! All components in batch failed for %r." % module_build) return @@ -109,8 +108,13 @@ def done(config, session, msg): session, module_build) builder = module_build_service.builder.GenericBuilder.create( - module_build.owner, module_build, config.system, config, - tag_name=tag, components=[c.package for c in module_build.component_builds]) + module_build.owner, + module_build, + config.system, + config, + tag_name=tag, + components=[c.package for c in module_build.component_builds], + ) builder.buildroot_connect(groups) # If we have reached here then we know the following things: @@ -122,8 +126,8 @@ def done(config, session, msg): # So now we can either start a new batch if there are still some to build # or, if everything is built successfully, then we can bless the module as # complete. - has_unbuilt_components = any(c.state in [None, koji.BUILD_STATES['BUILDING']] - for c in module_build.component_builds) + has_unbuilt_components = any( + c.state in [None, koji.BUILD_STATES["BUILDING"]] for c in module_build.component_builds) has_failed_components = any(c.state in failed_states for c in module_build.component_builds) further_work = [] @@ -137,25 +141,27 @@ def done(config, session, msg): # Try to start next batch build, because there are still unbuilt # components in a module. - further_work += start_next_batch_build( - config, module_build, session, builder) + further_work += start_next_batch_build(config, module_build, session, builder) else: if has_failed_components: - state_reason = 'Component(s) {} failed to build.'.format( - ', '.join(c.package for c in module_build.component_builds - if c.state in failed_states) + state_reason = "Component(s) {} failed to build.".format( + ", ".join( + c.package for c in module_build.component_builds if c.state in failed_states + ) + ) + module_build.transition( + config, + state=models.BUILD_STATES["failed"], + state_reason=state_reason, + failure_type="user", ) - module_build.transition(config, - state=models.BUILD_STATES['failed'], - state_reason=state_reason, - failure_type='user') else: # Tell the external buildsystem to wrap up (CG import, createrepo, etc.) module_build.time_completed = datetime.utcnow() builder.finalize(succeeded=True) - module_build.transition(config, state=models.BUILD_STATES['done']) + module_build.transition(config, state=models.BUILD_STATES["done"]) session.commit() return further_work diff --git a/module_build_service/scheduler/handlers/tags.py b/module_build_service/scheduler/handlers/tags.py index b4f1c7c6..77b7f732 100644 --- a/module_build_service/scheduler/handlers/tags.py +++ b/module_build_service/scheduler/handlers/tags.py @@ -44,17 +44,15 @@ def tagged(config, session, msg): return # Find tagged component. - component = models.ComponentBuild.from_component_nvr( - session, msg.nvr, module_build.id) + component = models.ComponentBuild.from_component_nvr(session, msg.nvr, module_build.id) if not component: log.error("No component %s in module %r", msg.nvr, module_build) return - log.info("Saw relevant component tag of %r from %r." % (component.nvr, - msg.msg_id)) + log.info("Saw relevant component tag of %r from %r." % (component.nvr, msg.msg_id)) # Mark the component as tagged - if tag.endswith('-build'): + if tag.endswith("-build"): component.tagged = True else: component.tagged_in_final = True @@ -62,19 +60,21 @@ def tagged(config, session, msg): unbuilt_components_in_batch = [ c for c in module_build.current_batch() - if c.state == koji.BUILD_STATES['BUILDING'] or not c.state + if c.state == koji.BUILD_STATES["BUILDING"] or not c.state ] if unbuilt_components_in_batch: - log.info("Not regenerating repo for tag %s, there are still " - "building components in a batch", tag) + log.info( + "Not regenerating repo for tag %s, there are still building components in a batch", + tag, + ) return [] # Get the list of untagged components in current/previous batches which # have been built successfully. untagged_components = [ c for c in module_build.up_to_current_batch() - if (not c.tagged or (not c.tagged_in_final and not c.build_time_only)) and - c.state == koji.BUILD_STATES['COMPLETE'] + if (not c.tagged or (not c.tagged_in_final and not c.build_time_only)) + and c.state == koji.BUILD_STATES["COMPLETE"] ] further_work = [] @@ -86,10 +86,10 @@ def tagged(config, session, msg): unbuilt_components = [ c for c in module_build.component_builds - if c.state == koji.BUILD_STATES['BUILDING'] or not c.state + if c.state == koji.BUILD_STATES["BUILDING"] or not c.state ] if unbuilt_components: - repo_tag = builder.module_build_tag['name'] + repo_tag = builder.module_build_tag["name"] log.info("All components in batch tagged, regenerating repo for tag %s", repo_tag) task_id = builder.koji_session.newRepo(repo_tag) module_build.new_repo_task_id = task_id @@ -97,11 +97,12 @@ def tagged(config, session, msg): # In case this is the last batch, we do not need to regenerate the # buildroot, because we will not build anything else in it. It # would be useless to wait for a repository we will not use anyway. - log.info("All components in module tagged and built, skipping the " - "last repo regeneration") - further_work += [messaging.KojiRepoChange( - 'components::_finalize: fake msg', - builder.module_build_tag['name'])] + log.info( + "All components in module tagged and built, skipping the last repo regeneration") + further_work += [ + messaging.KojiRepoChange( + "components::_finalize: fake msg", builder.module_build_tag["name"]) + ] session.commit() return further_work diff --git a/module_build_service/scheduler/producer.py b/module_build_service/scheduler/producer.py index be85c9a0..5876ddcb 100644 --- a/module_build_service/scheduler/producer.py +++ b/module_build_service/scheduler/producer.py @@ -54,11 +54,10 @@ class MBSProducer(PollingProducer): self.cleanup_stale_failed_builds(conf, session) self.sync_koji_build_tags(conf, session) except Exception: - msg = 'Error in poller execution:' + msg = "Error in poller execution:" log.exception(msg) - log.info('Poller will now sleep for "{}" seconds' - .format(conf.polling_interval)) + log.info('Poller will now sleep for "{}" seconds'.format(conf.polling_interval)) def fail_lost_builds(self, session): # This function is supposed to be handling only the part which can't be @@ -66,15 +65,17 @@ class MBSProducer(PollingProducer): # fit `n` slim. We do want rest to be processed elsewhere # TODO re-use - if conf.system == 'koji': + if conf.system == "koji": # We don't do this on behalf of users koji_session = KojiModuleBuilder.get_session(conf, login=False) - log.info('Querying tasks for statuses:') - res = models.ComponentBuild.query.filter_by( - state=koji.BUILD_STATES['BUILDING']).options( - lazyload('module_build')).all() + log.info("Querying tasks for statuses:") + res = ( + models.ComponentBuild.query.filter_by(state=koji.BUILD_STATES["BUILDING"]) + .options(lazyload("module_build")) + .all() + ) - log.info('Checking status for {0} tasks'.format(len(res))) + log.info("Checking status for {0} tasks".format(len(res))) for component_build in res: log.debug(component_build.json()) # Don't check tasks which haven't been triggered yet @@ -85,10 +86,11 @@ class MBSProducer(PollingProducer): # they may have BUILDING state temporarily before we tag them # to new module tag. Checking them would be waste of resources. if component_build.reused_component_id: - log.debug('Skipping check for task "{0}", ' - 'the component has been reused ("{1}").'.format( - component_build.task_id, - component_build.reused_component_id)) + log.debug( + 'Skipping check for task "{0}", ' + 'the component has been reused ("{1}").'.format( + component_build.task_id, component_build.reused_component_id) + ) continue task_id = component_build.task_id @@ -98,42 +100,41 @@ class MBSProducer(PollingProducer): state_mapping = { # Cancelled and failed builds should be marked as failed. - koji.TASK_STATES['CANCELED']: koji.BUILD_STATES['FAILED'], - koji.TASK_STATES['FAILED']: koji.BUILD_STATES['FAILED'], + koji.TASK_STATES["CANCELED"]: koji.BUILD_STATES["FAILED"], + koji.TASK_STATES["FAILED"]: koji.BUILD_STATES["FAILED"], # Completed tasks should be marked as complete. - koji.TASK_STATES['CLOSED']: koji.BUILD_STATES['COMPLETE'], + koji.TASK_STATES["CLOSED"]: koji.BUILD_STATES["COMPLETE"], } # If it is a closed/completed task, then we can extract the NVR build_version, build_release = None, None # defaults - if task_info['state'] == koji.TASK_STATES['CLOSED']: + if task_info["state"] == koji.TASK_STATES["CLOSED"]: builds = koji_session.listBuilds(taskID=task_id) if not builds: - log.warning("Task ID %r is closed, but we found no " - "builds in koji." % task_id) + log.warning( + "Task ID %r is closed, but we found no builds in koji." % task_id) elif len(builds) > 1: - log.warning("Task ID %r is closed, but more than one " - "build is present!" % task_id) + log.warning( + "Task ID %r is closed, but more than one build is present!" % task_id) else: - build_version = builds[0]['version'] - build_release = builds[0]['release'] + build_version = builds[0]["version"] + build_release = builds[0]["release"] - log.info(' task {0!r} is in state {1!r}'.format( - task_id, task_info['state'])) - if task_info['state'] in state_mapping: + log.info(" task {0!r} is in state {1!r}".format(task_id, task_info["state"])) + if task_info["state"] in state_mapping: # Fake a fedmsg message on our internal queue msg = module_build_service.messaging.KojiBuildChange( - msg_id='producer::fail_lost_builds fake msg', + msg_id="producer::fail_lost_builds fake msg", build_id=component_build.task_id, task_id=component_build.task_id, build_name=component_build.package, - build_new_state=state_mapping[task_info['state']], + build_new_state=state_mapping[task_info["state"]], build_release=build_release, build_version=build_version, ) module_build_service.scheduler.consumer.work_queue_put(msg) - elif conf.system == 'mock': + elif conf.system == "mock": pass def cleanup_stale_failed_builds(self, conf, session): @@ -141,60 +142,71 @@ class MBSProducer(PollingProducer): :param conf: the MBS configuration object :param session: a SQLAlchemy database session """ - if conf.system == 'koji': - stale_date = datetime.utcnow() - timedelta( - days=conf.cleanup_failed_builds_time) - stale_module_builds = session.query(models.ModuleBuild).filter( - models.ModuleBuild.state == models.BUILD_STATES['failed'], - models.ModuleBuild.time_modified <= stale_date).all() + if conf.system == "koji": + stale_date = datetime.utcnow() - timedelta(days=conf.cleanup_failed_builds_time) + stale_module_builds = ( + session.query(models.ModuleBuild) + .filter( + models.ModuleBuild.state == models.BUILD_STATES["failed"], + models.ModuleBuild.time_modified <= stale_date, + ) + .all() + ) if stale_module_builds: - log.info('{0} stale failed module build(s) will be cleaned up'.format( - len(stale_module_builds))) + log.info( + "{0} stale failed module build(s) will be cleaned up".format( + len(stale_module_builds)) + ) for module in stale_module_builds: - log.info('{0!r} is stale and is being cleaned up'.format(module)) + log.info("{0!r} is stale and is being cleaned up".format(module)) # Find completed artifacts in the stale build - artifacts = [c for c in module.component_builds - if c.state == koji.BUILD_STATES['COMPLETE']] + artifacts = [ + c for c in module.component_builds + if c.state == koji.BUILD_STATES["COMPLETE"] + ] # If there are no completed artifacts, then there is nothing to tag if artifacts: # Set buildroot_connect=False so it doesn't recreate the Koji target and etc. builder = GenericBuilder.create_from_module( - session, module, conf, buildroot_connect=False) + session, module, conf, buildroot_connect=False + ) builder.untag_artifacts([c.nvr for c in artifacts]) # Mark the artifacts as untagged in the database for c in artifacts: c.tagged = False c.tagged_in_final = False session.add(c) - state_reason = ('The module was garbage collected since it has failed over {0}' - ' day(s) ago'.format(conf.cleanup_failed_builds_time)) + state_reason = ( + "The module was garbage collected since it has failed over {0}" + " day(s) ago".format(conf.cleanup_failed_builds_time) + ) module.transition( - conf, models.BUILD_STATES['garbage'], state_reason=state_reason, - failure_type='user') + conf, + models.BUILD_STATES["garbage"], + state_reason=state_reason, + failure_type="user", + ) session.add(module) session.commit() def log_summary(self, session): - log.info('Current status:') + log.info("Current status:") consumer = module_build_service.scheduler.consumer.get_global_consumer() backlog = consumer.incoming.qsize() - log.info(' * internal queue backlog is {0}'.format(backlog)) + log.info(" * internal queue backlog is {0}".format(backlog)) states = sorted(models.BUILD_STATES.items(), key=operator.itemgetter(1)) for name, code in states: query = models.ModuleBuild.query.filter_by(state=code) count = query.count() if count: - log.info(' * {0} module builds in the {1} state'.format( - count, name)) - if name == 'build': + log.info(" * {0} module builds in the {1} state".format(count, name)) + if name == "build": for module_build in query.all(): - log.info(' * {0!r}'.format(module_build)) + log.info(" * {0!r}".format(module_build)) # First batch is number '1'. for i in range(1, module_build.batch + 1): - n = len([c for c in module_build.component_builds - if c.batch == i]) - log.info(' * {0} components in batch {1}' - .format(n, i)) + n = len([c for c in module_build.component_builds if c.batch == i]) + log.info(" * {0} components in batch {1}".format(n, i)) def _nudge_module_builds_in_state(self, session, state_name, older_than_minutes): """ @@ -202,9 +214,9 @@ class MBSProducer(PollingProducer): than `older_than_minutes` and adds fake MBSModule message to the work queue. """ - log.info('Looking for module builds stuck in the %s state', state_name) + log.info("Looking for module builds stuck in the %s state", state_name) builds = models.ModuleBuild.by_state(session, state_name) - log.info(' %r module builds in the %s state...', len(builds), state_name) + log.info(" %r module builds in the %s state...", len(builds), state_name) now = datetime.utcnow() time_modified_threshold = timedelta(minutes=older_than_minutes) for build in builds: @@ -220,32 +232,38 @@ class MBSProducer(PollingProducer): # Fake a message to kickstart the build anew in the consumer state = module_build_service.models.BUILD_STATES[state_name] msg = module_build_service.messaging.MBSModule( - 'nudge_module_builds_fake_message', build.id, state) + "nudge_module_builds_fake_message", build.id, state) log.info(" Scheduling faked event %r" % msg) module_build_service.scheduler.consumer.work_queue_put(msg) def process_waiting_module_builds(self, session): - for state in ['init', 'wait']: + for state in ["init", "wait"]: self._nudge_module_builds_in_state(session, state, 10) def process_open_component_builds(self, session): - log.warning('process_open_component_builds is not yet implemented...') + log.warning("process_open_component_builds is not yet implemented...") def process_paused_module_builds(self, config, session): - log.info('Looking for paused module builds in the build state') - if module_build_service.utils.at_concurrent_component_threshold( - config, session): - log.debug('Will not attempt to start paused module builds due to ' - 'the concurrent build threshold being met') + log.info("Looking for paused module builds in the build state") + if module_build_service.utils.at_concurrent_component_threshold(config, session): + log.debug( + "Will not attempt to start paused module builds due to " + "the concurrent build threshold being met" + ) return ten_minutes = timedelta(minutes=10) # Check for module builds that are in the build state but don't have any active component # builds. Exclude module builds in batch 0. This is likely a build of a module without # components. - module_builds = session.query(models.ModuleBuild).filter( - models.ModuleBuild.state == models.BUILD_STATES['build'], - models.ModuleBuild.batch > 0).all() + module_builds = ( + session.query(models.ModuleBuild) + .filter( + models.ModuleBuild.state == models.BUILD_STATES["build"], + models.ModuleBuild.batch > 0, + ) + .all() + ) for module_build in module_builds: now = datetime.utcnow() # Only give builds a nudge if stuck for more than ten minutes @@ -255,12 +273,13 @@ class MBSProducer(PollingProducer): # then no possible event will start off new component builds. # But do not try to start new builds when we are waiting for the # repo-regen. - if (not module_build.current_batch(koji.BUILD_STATES['BUILDING']) and - not module_build.new_repo_task_id): - log.info(' Processing the paused module build %r', module_build) + if ( + not module_build.current_batch(koji.BUILD_STATES["BUILDING"]) + and not module_build.new_repo_task_id + ): + log.info(" Processing the paused module build %r", module_build) # Initialize the builder... - builder = GenericBuilder.create_from_module( - session, module_build, config) + builder = GenericBuilder.create_from_module(session, module_build, config) further_work = module_build_service.utils.start_next_batch_build( config, module_build, session, builder) @@ -269,8 +288,7 @@ class MBSProducer(PollingProducer): module_build_service.scheduler.consumer.work_queue_put(event) # Check if we have met the threshold. - if module_build_service.utils.at_concurrent_component_threshold( - config, session): + if module_build_service.utils.at_concurrent_component_threshold(config, session): break def trigger_new_repo_when_stalled(self, config, session): @@ -279,22 +297,24 @@ class MBSProducer(PollingProducer): doing anything and our module build stucks. In case the module build gets stuck on that, we trigger newRepo again to rebuild it. """ - if config.system != 'koji': + if config.system != "koji": return - koji_session = module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder\ - .get_session(config) + koji_session = module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session( + config) - for module_build in session.query(models.ModuleBuild) \ - .filter_by(state=models.BUILD_STATES['build']).all(): + for module_build in ( + session.query(models.ModuleBuild).filter_by(state=models.BUILD_STATES["build"]).all() + ): if not module_build.new_repo_task_id: continue task_info = koji_session.getTaskInfo(module_build.new_repo_task_id) - if (task_info["state"] in [koji.TASK_STATES['CANCELED'], - koji.TASK_STATES['FAILED']]): - log.info("newRepo task %s for %r failed, starting another one", - str(module_build.new_repo_task_id), module_build) + if task_info["state"] in [koji.TASK_STATES["CANCELED"], koji.TASK_STATES["FAILED"]]: + log.info( + "newRepo task %s for %r failed, starting another one", + str(module_build.new_repo_task_id), module_build, + ) taginfo = koji_session.getTag(module_build.koji_tag + "-build") module_build.new_repo_task_id = koji_session.newRepo(taginfo["name"]) else: @@ -307,39 +327,42 @@ class MBSProducer(PollingProducer): Deletes targets older than `config.koji_target_delete_time` seconds from Koji to cleanup after the module builds. """ - if config.system != 'koji': + if config.system != "koji": return - log.info('Looking for module builds which Koji target can be removed') + log.info("Looking for module builds which Koji target can be removed") now = datetime.utcnow() koji_session = KojiModuleBuilder.get_session(config) for target in koji_session.getBuildTargets(): koji_tag = target["dest_tag_name"] - module = session.query(models.ModuleBuild).filter_by( - koji_tag=koji_tag).first() - if not module or module.name in conf.base_module_names or module.state in [ + module = session.query(models.ModuleBuild).filter_by(koji_tag=koji_tag).first() + if ( + not module + or module.name in conf.base_module_names + or module.state in [ models.BUILD_STATES["init"], models.BUILD_STATES["wait"], - models.BUILD_STATES["build"]]: + models.BUILD_STATES["build"], + ] + ): continue # Double-check that the target we are going to remove is prefixed # by our prefix, so we won't remove f26 when there is some garbage # in DB or Koji. for allowed_prefix in config.koji_tag_prefixes: - if target['name'].startswith(allowed_prefix + "-"): + if target["name"].startswith(allowed_prefix + "-"): break else: - log.error("Module %r has Koji target with not allowed prefix.", - module) + log.error("Module %r has Koji target with not allowed prefix.", module) continue delta = now - module.time_completed if delta.total_seconds() > config.koji_target_delete_time: log.info("Removing target of module %r", module) - koji_session.deleteBuildTarget(target['id']) + koji_session.deleteBuildTarget(target["id"]) def cancel_stuck_module_builds(self, config, session): """ @@ -347,34 +370,45 @@ class MBSProducer(PollingProducer): The states are defined with the "cleanup_stuck_builds_states" config option and the time is defined by the "cleanup_stuck_builds_time" config option. """ - log.info(('Looking for module builds stuck in the states "{states}" ' - 'more than {days} days').format( - states=' and '.join(config.cleanup_stuck_builds_states), - days=config.cleanup_stuck_builds_time - )) + log.info( + 'Looking for module builds stuck in the states "{states}" more than {days} days' + .format( + states=" and ".join(config.cleanup_stuck_builds_states), + days=config.cleanup_stuck_builds_time, + ) + ) delta = timedelta(days=config.cleanup_stuck_builds_time) now = datetime.utcnow() threshold = now - delta - states = [module_build_service.models.BUILD_STATES[state] - for state in config.cleanup_stuck_builds_states] + states = [ + module_build_service.models.BUILD_STATES[state] + for state in config.cleanup_stuck_builds_states + ] - module_builds = session.query(models.ModuleBuild).filter( - models.ModuleBuild.state.in_(states), - models.ModuleBuild.time_modified < threshold).all() + module_builds = ( + session.query(models.ModuleBuild) + .filter( + models.ModuleBuild.state.in_(states), models.ModuleBuild.time_modified < threshold + ) + .all() + ) - log.info(' {0!r} module builds are stuck...'.format(len(module_builds))) + log.info(" {0!r} module builds are stuck...".format(len(module_builds))) for build in module_builds: nsvc = ":".join([build.name, build.stream, build.version, build.context]) log.info('Transitioning build "{nsvc}" to "Failed" state.'.format(nsvc=nsvc)) state_reason = "The module was in {state} for more than {days} days".format( - state=build.state, - days=config.cleanup_stuck_builds_time + state=build.state, days=config.cleanup_stuck_builds_time + ) + build.transition( + config, + state=models.BUILD_STATES["failed"], + state_reason=state_reason, + failure_type="user", ) - build.transition(config, state=models.BUILD_STATES["failed"], - state_reason=state_reason, failure_type='user') session.commit() def sync_koji_build_tags(self, config, session): @@ -386,15 +420,14 @@ class MBSProducer(PollingProducer): In case the Koji shows the build as tagged/tagged_in_final, fake "tagged" message is added to work queue. """ - if conf.system != 'koji': + if conf.system != "koji": return koji_session = KojiModuleBuilder.get_session(conf, login=False) module_builds = models.ModuleBuild.by_state(session, "build") for module_build in module_builds: - complete_components = module_build.current_batch( - koji.BUILD_STATES['COMPLETE']) + complete_components = module_build.current_batch(koji.BUILD_STATES["COMPLETE"]) for c in complete_components: # In case the component is tagged in the build tag and # also tagged in the final tag (or it is build_time_only @@ -402,8 +435,11 @@ class MBSProducer(PollingProducer): if c.tagged and (c.tagged_in_final or c.build_time_only): continue - log.info("%r: Component %r is complete, but not tagged in the " - "final and/or build tags.", module_build, c) + log.info( + "%r: Component %r is complete, but not tagged in the " + "final and/or build tags.", + module_build, c, + ) # Check in which tags the component is tagged. tag_dicts = koji_session.listTags(c.nvr) @@ -413,8 +449,8 @@ class MBSProducer(PollingProducer): # schedule fake message. if not c.tagged_in_final and module_build.koji_tag in tags: msg = module_build_service.messaging.KojiTagChange( - 'sync_koji_build_tags_fake_message', - module_build.koji_tag, c.package, c.nvr) + "sync_koji_build_tags_fake_message", module_build.koji_tag, c.package, c.nvr + ) log.info(" Scheduling faked event %r" % msg) module_build_service.scheduler.consumer.work_queue_put(msg) @@ -423,7 +459,6 @@ class MBSProducer(PollingProducer): build_tag = module_build.koji_tag + "-build" if not c.tagged and build_tag in tags: msg = module_build_service.messaging.KojiTagChange( - 'sync_koji_build_tags_fake_message', - build_tag, c.package, c.nvr) + "sync_koji_build_tags_fake_message", build_tag, c.package, c.nvr) log.info(" Scheduling faked event %r" % msg) module_build_service.scheduler.consumer.work_queue_put(msg) diff --git a/module_build_service/scm.py b/module_build_service/scm.py index 7e7e43bc..7588d0e3 100644 --- a/module_build_service/scm.py +++ b/module_build_service/scm.py @@ -36,7 +36,11 @@ import datetime from module_build_service import log, conf from module_build_service.errors import ( - Forbidden, ValidationError, UnprocessableEntity, ProgrammingError) + Forbidden, + ValidationError, + UnprocessableEntity, + ProgrammingError, +) from module_build_service.utils.general import scm_url_schemes, retry @@ -57,16 +61,16 @@ class SCM(object): """ if allowed_scm: - if not (url.startswith(tuple(allowed_scm)) or - (allow_local and url.startswith("file://"))): - raise Forbidden( - '%s is not in the list of allowed SCMs' % url) + if not ( + url.startswith(tuple(allowed_scm)) or (allow_local and url.startswith("file://")) + ): + raise Forbidden("%s is not in the list of allowed SCMs" % url) # If we are given the option for the git protocol or the http(s) protocol, # then just use http(s) - if re.match(r'(git\+http(?:s)?:\/\/)', url): + if re.match(r"(git\+http(?:s)?:\/\/)", url): url = url[4:] - url = url.rstrip('/') + url = url.rstrip("/") self.url = url self.sourcedir = None @@ -78,14 +82,14 @@ class SCM(object): self.scheme = scmtype break else: - raise ValidationError('Invalid SCM URL: %s' % url) + raise ValidationError("Invalid SCM URL: %s" % url) # git is the only one supported SCM provider atm if self.scheme == "git": match = re.search(r"^(?P.*/(?P[^?]*))(\?#(?P.*))?", url) self.repository = match.group("repository") self.name = match.group("name") - self.repository_root = self.repository[:-len(self.name)] + self.repository_root = self.repository[: -len(self.name)] if self.name.endswith(".git"): self.name = self.name[:-4] self.commit = match.group("commit") @@ -108,9 +112,10 @@ class SCM(object): raise ProgrammingError("Do .checkout() first.") found = False - branches = SCM._run(["git", "branch", "-r", "--contains", self.commit], - chdir=self.sourcedir)[1] - for branch in branches.decode('utf-8').split("\n"): + branches = SCM._run( + ["git", "branch", "-r", "--contains", self.commit], chdir=self.sourcedir + )[1] + for branch in branches.decode("utf-8").split("\n"): branch = branch.strip() if branch[len("origin/"):] == self.branch: found = True @@ -137,15 +142,17 @@ class SCM(object): if stderr: log.warning(stderr) if proc.returncode != 0: - raise UnprocessableEntity("Failed on %r, retcode %r, out %r, err %r" % ( - cmd, proc.returncode, stdout, stderr)) + raise UnprocessableEntity( + "Failed on %r, retcode %r, out %r, err %r" % (cmd, proc.returncode, stdout, stderr) + ) return proc.returncode, stdout, stderr @staticmethod @retry( timeout=conf.scm_net_timeout, interval=conf.scm_net_retry_interval, - wait_on=UnprocessableEntity) + wait_on=UnprocessableEntity, + ) def _run(cmd, chdir=None, log_stdout=False): return SCM._run_without_retry(cmd, chdir, log_stdout) @@ -158,14 +165,14 @@ class SCM(object): """ # TODO: sanity check arguments if self.scheme == "git": - self.sourcedir = '%s/%s' % (scmdir, self.name) + self.sourcedir = "%s/%s" % (scmdir, self.name) - module_clone_cmd = ['git', 'clone', '-q'] + module_clone_cmd = ["git", "clone", "-q"] if self.commit: - module_clone_cmd.append('--no-checkout') - module_checkout_cmd = ['git', 'checkout', '-q', self.commit] + module_clone_cmd.append("--no-checkout") + module_checkout_cmd = ["git", "checkout", "-q", self.commit] else: - module_clone_cmd.extend(['--depth', '1']) + module_clone_cmd.extend(["--depth", "1"]) module_clone_cmd.extend([self.repository, self.sourcedir]) # perform checkouts @@ -174,13 +181,15 @@ class SCM(object): try: SCM._run(module_checkout_cmd, chdir=self.sourcedir) except RuntimeError as e: - if (e.message.endswith( - " did not match any file(s) known to git.\\n\"") or - "fatal: reference is not a tree: " in e.message): + if ( + e.message.endswith(' did not match any file(s) known to git.\\n"') + or "fatal: reference is not a tree: " in e.message + ): raise UnprocessableEntity( "checkout: The requested commit hash was not found " "within the repository. Perhaps you forgot to push. " - "The original message was: %s" % e.message) + "The original message was: %s" % e.message + ) raise timestamp = SCM._run(["git", "show", "-s", "--format=%ct"], chdir=self.sourcedir)[1] @@ -190,7 +199,7 @@ class SCM(object): raise RuntimeError("checkout: Unhandled SCM scheme.") return self.sourcedir - def get_latest(self, ref='master'): + def get_latest(self, ref="master"): """ Get the latest commit hash based on the provided git ref :param ref: a string of a git ref (either a branch or commit hash) @@ -198,7 +207,7 @@ class SCM(object): :raises: RuntimeError """ if ref is None: - ref = 'master' + ref = "master" if self.scheme == "git": log.debug("Getting/verifying commit hash for %s" % self.repository) try: @@ -208,9 +217,9 @@ class SCM(object): # fallbac to `get_full_commit_hash`. We do not want to retry here, because # in case module contains only commit hashes, it would block for very long # time. - _, output, _ = SCM._run_without_retry([ - "git", "ls-remote", "--exit-code", self.repository, 'refs/heads/' + ref - ]) + _, output, _ = SCM._run_without_retry( + ["git", "ls-remote", "--exit-code", self.repository, "refs/heads/" + ref] + ) except UnprocessableEntity: # The call below will either return the commit hash as is (if a full one was # provided) or the full commit hash (if a short hash was provided). If ref is not @@ -220,7 +229,7 @@ class SCM(object): # git-ls-remote prints output like this, where the first commit # hash is what to return. # bf028e573e7c18533d89c7873a411de92d4d913e refs/heads/master - return output.split()[0].decode('utf-8') + return output.split()[0].decode("utf-8") else: raise RuntimeError("get_latest: Unhandled SCM scheme.") @@ -236,30 +245,28 @@ class SCM(object): elif self.commit: commit_to_check = self.commit else: - raise RuntimeError('No commit hash was specified for "{0}"'.format( - self.url)) + raise RuntimeError('No commit hash was specified for "{0}"'.format(self.url)) - if self.scheme == 'git': - log.debug('Getting the full commit hash for "{0}"' - .format(self.repository)) + if self.scheme == "git": + log.debug('Getting the full commit hash for "{0}"'.format(self.repository)) td = None try: td = tempfile.mkdtemp() - SCM._run(['git', 'clone', '-q', self.repository, td, '--bare']) - output = SCM._run( - ['git', 'rev-parse', commit_to_check], chdir=td)[1] + SCM._run(["git", "clone", "-q", self.repository, td, "--bare"]) + output = SCM._run(["git", "rev-parse", commit_to_check], chdir=td)[1] finally: if td and os.path.exists(td): shutil.rmtree(td) if output: - return str(output.decode('utf-8').strip('\n')) + return str(output.decode("utf-8").strip("\n")) raise UnprocessableEntity( - 'The full commit hash of "{0}" for "{1}" could not be found' - .format(commit_hash, self.repository)) + 'The full commit hash of "{0}" for "{1}" could not be found'.format( + commit_hash, self.repository) + ) else: - raise RuntimeError('get_full_commit_hash: Unhandled SCM scheme.') + raise RuntimeError("get_full_commit_hash: Unhandled SCM scheme.") def get_module_yaml(self): """ @@ -276,8 +283,10 @@ class SCM(object): with open(path_to_yaml): return path_to_yaml except IOError: - log.error("get_module_yaml: The SCM repository doesn't contain a modulemd file. " - "Couldn't access: %s" % path_to_yaml) + log.error( + "get_module_yaml: The SCM repository doesn't contain a modulemd file. " + "Couldn't access: %s" % path_to_yaml + ) raise UnprocessableEntity("The SCM repository doesn't contain a modulemd file") @staticmethod @@ -289,11 +298,11 @@ class SCM(object): :param commit: a string containing the commit :return: boolean """ - if scheme == 'git': - sha1_pattern = re.compile(r'^[0-9a-f]{40}$') + if scheme == "git": + sha1_pattern = re.compile(r"^[0-9a-f]{40}$") return bool(re.match(sha1_pattern, commit)) else: - raise RuntimeError('is_full_commit_hash: Unhandled SCM scheme.') + raise RuntimeError("is_full_commit_hash: Unhandled SCM scheme.") def is_available(self, strict=False): """Check whether the scmurl is available for checkout. @@ -316,9 +325,7 @@ class SCM(object): if td is not None: shutil.rmtree(td) except Exception as e: - log.warning( - "Failed to remove temporary directory {!r}: {}".format( - td, str(e))) + log.warning("Failed to remove temporary directory {!r}: {}".format(td, str(e))) @property def url(self): diff --git a/module_build_service/utils/batches.py b/module_build_service/utils/batches.py index 0c591db6..b2e3ca55 100644 --- a/module_build_service/utils/batches.py +++ b/module_build_service/utils/batches.py @@ -52,15 +52,15 @@ def at_concurrent_component_threshold(config, session): import koji # Placed here to avoid py2/py3 conflicts... - if config.num_concurrent_builds and config.num_concurrent_builds <= \ - session.query(models.ComponentBuild).filter_by( - state=koji.BUILD_STATES['BUILDING'], - # Components which are reused should not be counted in, because - # we do not submit new build for them. They are in BUILDING state - # just internally in MBS to be handled by - # scheduler.handlers.components.complete. - reused_component_id=None).count(): - return True + # Components which are reused should not be counted in, because + # we do not submit new build for them. They are in BUILDING state + # just internally in MBS to be handled by + # scheduler.handlers.components.complete. + if config.num_concurrent_builds: + count = session.query(models.ComponentBuild).filter_by( + state=koji.BUILD_STATES["BUILDING"], reused_component_id=None).count() + if config.num_concurrent_builds <= count: + return True return False @@ -71,21 +71,21 @@ def start_build_component(builder, c): by QueueBasedThreadPool in continue_batch_build. """ import koji + try: c.task_id, c.state, c.state_reason, c.nvr = builder.build( artifact_name=c.package, source=c.scmurl) except Exception as e: - c.state = koji.BUILD_STATES['FAILED'] + c.state = koji.BUILD_STATES["FAILED"] c.state_reason = "Failed to build artifact %s: %s" % (c.package, str(e)) log.exception(e) - c.module_build.transition(conf, models.BUILD_STATES['failed'], failure_type='infra') + c.module_build.transition(conf, models.BUILD_STATES["failed"], failure_type="infra") return - if not c.task_id and c.state == koji.BUILD_STATES['BUILDING']: - c.state = koji.BUILD_STATES['FAILED'] - c.state_reason = ("Failed to build artifact %s: " - "Builder did not return task ID" % (c.package)) - c.module_build.transition(conf, models.BUILD_STATES['failed'], failure_type='infra') + if not c.task_id and c.state == koji.BUILD_STATES["BUILDING"]: + c.state = koji.BUILD_STATES["FAILED"] + c.state_reason = "Failed to build artifact %s: Builder did not return task ID" % (c.package) + c.module_build.transition(conf, models.BUILD_STATES["failed"], failure_type="infra") return @@ -104,10 +104,12 @@ def continue_batch_build(config, module, session, builder, components=None): # successfully built yet or isn't currently being built. unbuilt_components = components or [ c for c in module.component_builds - if (c.state != koji.BUILD_STATES['COMPLETE'] and - c.state != koji.BUILD_STATES['BUILDING'] and - c.state != koji.BUILD_STATES['FAILED'] and - c.batch == module.batch) + if ( + c.state != koji.BUILD_STATES["COMPLETE"] + and c.state != koji.BUILD_STATES["BUILDING"] + and c.state != koji.BUILD_STATES["FAILED"] + and c.batch == module.batch + ) ] if not unbuilt_components: @@ -134,17 +136,17 @@ def continue_batch_build(config, module, session, builder, components=None): for c in unbuilt_components: # If a previous build of the component was found, then the state will be marked as # COMPLETE so we should skip this - if c.state == koji.BUILD_STATES['COMPLETE']: + if c.state == koji.BUILD_STATES["COMPLETE"]: continue # Check the concurrent build threshold. if at_concurrent_component_threshold(config, session): - log.info('Concurrent build threshold met') + log.info("Concurrent build threshold met") break # We set state to "BUILDING" here because at this point we are committed # to build the component and at_concurrent_component_threshold() works by # counting the number of components in the "BUILDING" state. - c.state = koji.BUILD_STATES['BUILDING'] + c.state = koji.BUILD_STATES["BUILDING"] components_to_build.append(c) # Start build of components in this batch. @@ -152,8 +154,9 @@ def continue_batch_build(config, module, session, builder, components=None): if config.num_concurrent_builds > 0: max_workers = config.num_concurrent_builds with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor: - futures = {executor.submit(start_build_component, builder, c): - c for c in components_to_build} + futures = { + executor.submit(start_build_component, builder, c): c for c in components_to_build + } concurrent.futures.wait(futures) # In case there has been an excepion generated directly in the # start_build_component, the future.result() will re-raise it in the @@ -186,16 +189,15 @@ def start_next_batch_build(config, module, session, builder, components=None): # later on in the code all_reused_in_prev_batch = True for c in module.component_builds: - if c.state in [None, koji.BUILD_STATES['BUILDING']]: + if c.state in [None, koji.BUILD_STATES["BUILDING"]]: has_unbuilt_components = True if c.batch == module.batch: if not c.state: has_unbuilt_components_in_batch = True - elif c.state == koji.BUILD_STATES['BUILDING']: + elif c.state == koji.BUILD_STATES["BUILDING"]: has_building_components_in_batch = True - elif (c.state in [koji.BUILD_STATES['FAILED'], - koji.BUILD_STATES['CANCELED']]): + elif c.state in [koji.BUILD_STATES["FAILED"], koji.BUILD_STATES["CANCELED"]]: has_failed_components = True if c.batch == module.batch and not c.reused_component_id: @@ -203,57 +205,60 @@ def start_next_batch_build(config, module, session, builder, components=None): # Do not start new batch if there are no components to build. if not has_unbuilt_components: - log.debug("Not starting new batch, there is no component to build " - "for module %s" % module) + log.debug( + "Not starting new batch, there is no component to build for module %s" % module) return [] # Check that there is something to build in current batch before starting # the new one. If there is, continue building current batch. if has_unbuilt_components_in_batch: log.info("Continuing building batch %d", module.batch) - return continue_batch_build( - config, module, session, builder, components) + return continue_batch_build(config, module, session, builder, components) # Check that there are no components in BUILDING state in current batch. # If there are, wait until they are built. if has_building_components_in_batch: - log.debug("Not starting new batch, there are still components in " - "BUILDING state in current batch for module %s", module) + log.debug( + "Not starting new batch, there are still components in " + "BUILDING state in current batch for module %s", + module, + ) return [] # Check that there are no failed components in this batch. If there are, # do not start the new batch. if has_failed_components: - log.info("Not starting new batch, there are failed components for " - "module %s", module) + log.info("Not starting new batch, there are failed components for module %s", module) return [] # Identify active tasks which might contain relicts of previous builds # and fail the module build if this^ happens. - active_tasks = builder.list_tasks_for_components(module.component_builds, - state='active') + active_tasks = builder.list_tasks_for_components(module.component_builds, state="active") if isinstance(active_tasks, list) and active_tasks: - state_reason = ("Cannot start a batch, because some components are already" - " in 'building' state.") + state_reason = \ + "Cannot start a batch, because some components are already in 'building' state." state_reason += " See tasks (ID): {}".format( - ', '.join([str(t['id']) for t in active_tasks]) + ", ".join([str(t["id"]) for t in active_tasks]) + ) + module.transition( + config, + state=models.BUILD_STATES["failed"], + state_reason=state_reason, + failure_type="infra", ) - module.transition(config, state=models.BUILD_STATES['failed'], - state_reason=state_reason, failure_type='infra') session.commit() return [] else: - log.debug("Builder {} doesn't provide information about active tasks." - .format(builder)) + log.debug("Builder {} doesn't provide information about active tasks.".format(builder)) # Find out if there is repo regeneration in progress for this module. # If there is, wait until the repo is regenerated before starting a new # batch. artifacts = [c.nvr for c in module.current_batch()] if not builder.buildroot_ready(artifacts): - log.info("Not starting new batch, not all of %r are in the buildroot. " - "Waiting." % artifacts) + log.info( + "Not starting new batch, not all of %r are in the buildroot. Waiting." % artifacts) return [] # Although this variable isn't necessary, it is easier to read code later on with it @@ -265,21 +270,21 @@ def start_next_batch_build(config, module, session, builder, components=None): # successfully built yet or isn't currently being built. unbuilt_components = components or [ c for c in module.component_builds - if (c.state != koji.BUILD_STATES['COMPLETE'] and - c.state != koji.BUILD_STATES['BUILDING'] and - c.state != koji.BUILD_STATES['FAILED'] and - c.batch == module.batch) + if ( + c.state != koji.BUILD_STATES["COMPLETE"] + and c.state != koji.BUILD_STATES["BUILDING"] + and c.state != koji.BUILD_STATES["FAILED"] + and c.batch == module.batch + ) ] # If there are no components to build, skip the batch and start building # the new one. This can happen when resubmitting the failed module build. if not unbuilt_components and not components: - log.info("Skipping build of batch %d, no component to build.", - module.batch) + log.info("Skipping build of batch %d, no component to build.", module.batch) return start_next_batch_build(config, module, session, builder) - log.info("Starting build of next batch %d, %s" % (module.batch, - unbuilt_components)) + log.info("Starting build of next batch %d, %s" % (module.batch, unbuilt_components)) # Attempt to reuse any components possible in the batch before attempting to build any further_work = [] @@ -288,14 +293,13 @@ def start_next_batch_build(config, module, session, builder, components=None): should_try_reuse = True # If the rebuild strategy is "changed-and-after", try to figure out if it's worth checking if # the components can be reused to save on resources - if module.rebuild_strategy == 'changed-and-after': + if module.rebuild_strategy == "changed-and-after": # Check to see if the previous batch had all their builds reused except for when the # previous batch was 1 because that always has the module-build-macros component built should_try_reuse = all_reused_in_prev_batch or prev_batch == 1 if should_try_reuse: component_names = [c.package for c in unbuilt_components] - reusable_components = get_reusable_components( - session, module, component_names) + reusable_components = get_reusable_components(session, module, component_names) for c, reusable_c in zip(unbuilt_components, reusable_components): if reusable_c: components_reused = True @@ -309,8 +313,10 @@ def start_next_batch_build(config, module, session, builder, components=None): # If all the components were reused in the batch then make a KojiRepoChange # message and return if components_reused and not unbuilt_components_after_reuse: - further_work.append(module_build_service.messaging.KojiRepoChange( - 'start_build_batch: fake msg', builder.module_build_tag['name'])) + further_work.append( + module_build_service.messaging.KojiRepoChange( + "start_build_batch: fake msg", builder.module_build_tag["name"]) + ) return further_work return further_work + continue_batch_build( diff --git a/module_build_service/utils/general.py b/module_build_service/utils/general.py index 9ab7d622..5891e387 100644 --- a/module_build_service/utils/general.py +++ b/module_build_service/utils/general.py @@ -30,8 +30,7 @@ from datetime import datetime from six import text_type, string_types from module_build_service import conf, log, models, Modulemd, glib -from module_build_service.errors import ( - ValidationError, ProgrammingError, UnprocessableEntity) +from module_build_service.errors import ValidationError, ProgrammingError, UnprocessableEntity def to_text_type(s): @@ -62,8 +61,15 @@ def scm_url_schemes(terse=False): """ scm_types = { - "git": ("git://", "git+http://", "git+https://", - "git+rsync://", "http://", "https://", "file://") + "git": ( + "git://", + "git+http://", + "git+https://", + "git+rsync://", + "http://", + "https://", + "file://", + ) } if not terse: @@ -79,6 +85,7 @@ def retry(timeout=conf.net_timeout, interval=conf.net_retry_interval, wait_on=Ex """ A decorator that allows to retry a section of code... ...until success or timeout. """ + def wrapper(function): @functools.wraps(function) def inner(*args, **kwargs): @@ -87,21 +94,26 @@ def retry(timeout=conf.net_timeout, interval=conf.net_retry_interval, wait_on=Ex try: return function(*args, **kwargs) except wait_on as e: - log.warning("Exception %r raised from %r. Retry in %rs" % ( - e, function, interval)) + log.warning( + "Exception %r raised from %r. Retry in %rs" % (e, function, interval) + ) time.sleep(interval) if (time.time() - start) >= timeout: raise # This re-raises the last exception. + return inner + return wrapper def module_build_state_from_msg(msg): state = int(msg.module_build_state) # TODO better handling - assert state in models.BUILD_STATES.values(), ( - 'state=%s(%s) is not in %s' - % (state, type(state), list(models.BUILD_STATES.values()))) + assert state in models.BUILD_STATES.values(), "state=%s(%s) is not in %s" % ( + state, + type(state), + list(models.BUILD_STATES.values()), + ) return state @@ -125,23 +137,23 @@ def generate_koji_tag(name, stream, version, context, max_length=256, scratch=Fa :rtype: str """ if scratch: - prefix = 'scrmod-' + prefix = "scrmod-" # use unique suffix so same commit can be resubmitted - suffix = '+' + str(scratch_id) + suffix = "+" + str(scratch_id) else: - prefix = 'module-' - suffix = '' + prefix = "module-" + suffix = "" nsvc_list = [name, stream, str(version), context] - nsvc_tag = prefix + '-'.join(nsvc_list) + suffix - if len(nsvc_tag) + len('-build') > max_length: + nsvc_tag = prefix + "-".join(nsvc_list) + suffix + if len(nsvc_tag) + len("-build") > max_length: # Fallback to the old format of 'module-' if the generated koji tag # name is longer than max_length - nsvc_hash = hashlib.sha1('.'.join(nsvc_list).encode('utf-8')).hexdigest()[:16] + nsvc_hash = hashlib.sha1(".".join(nsvc_list).encode("utf-8")).hexdigest()[:16] return prefix + nsvc_hash + suffix return nsvc_tag -def validate_koji_tag(tag_arg_names, pre='', post='-', dict_key='name'): +def validate_koji_tag(tag_arg_names, pre="", post="-", dict_key="name"): """ Used as a decorator validates koji tag arg(s)' value(s) against configurable list of koji tag prefixes. @@ -168,24 +180,29 @@ def validate_koji_tag(tag_arg_names, pre='', post='-', dict_key='name'): # If any of them don't appear in the function, then fail. if tag_arg_name not in call_args: raise ProgrammingError( - '{} Inspected argument {} is not within function args.' - ' The function was: {}.' - .format(err_subject, tag_arg_name, function.__name__)) + "{} Inspected argument {} is not within function args." + " The function was: {}.".format( + err_subject, tag_arg_name, function.__name__ + ) + ) tag_arg_val = call_args[tag_arg_name] # First, check that we have some value if not tag_arg_val: - raise ValidationError('{} Can not validate {}. No value provided.' - .format(err_subject, tag_arg_name)) + raise ValidationError( + "{} Can not validate {}. No value provided.".format( + err_subject, tag_arg_name) + ) # If any of them are a dict, then use the provided dict_key if isinstance(tag_arg_val, dict): if dict_key not in tag_arg_val: raise ProgrammingError( - '{} Inspected dict arg {} does not contain {} key.' - ' The function was: {}.' - .format(err_subject, tag_arg_name, dict_key, function.__name__)) + "{} Inspected dict arg {} does not contain {} key." + " The function was: {}.".format( + err_subject, tag_arg_name, dict_key, function.__name__) + ) tag_list = [tag_arg_val[dict_key]] elif isinstance(tag_arg_val, list): tag_list = tag_arg_val @@ -200,9 +217,9 @@ def validate_koji_tag(tag_arg_names, pre='', post='-', dict_key='name'): # Only raise this error if the given tags don't start with # *any* of our allowed prefixes. raise ValidationError( - 'Koji tag validation: {} does not satisfy any of allowed prefixes: {}' - .format(tag_list, - [pre + p + post for p in conf.koji_tag_prefixes])) + "Koji tag validation: {} does not satisfy any of allowed prefixes: {}" + .format(tag_list, [pre + p + post for p in conf.koji_tag_prefixes]) + ) # Finally.. after all that validation, call the original function # and return its value. @@ -223,8 +240,12 @@ def get_rpm_release(module_build): :param module_build: a models.ModuleBuild object :return: a string of the module's dist tag """ - dist_str = '.'.join([module_build.name, module_build.stream, str(module_build.version), - str(module_build.context)]).encode('utf-8') + dist_str = ".".join([ + module_build.name, + module_build.stream, + str(module_build.version), + str(module_build.context), + ]).encode("utf-8") dist_hash = hashlib.sha1(dist_str).hexdigest()[:8] # We need to share the same auto-incrementing index in dist tag between all MSE builds. @@ -234,14 +255,14 @@ def get_rpm_release(module_build): mse_build_ids.sort() index = mse_build_ids[0] try: - buildrequires = module_build.mmd().get_xmd()['mbs']['buildrequires'] + buildrequires = module_build.mmd().get_xmd()["mbs"]["buildrequires"] except (ValueError, KeyError): - log.warning('Module build {0} does not have buildrequires in its xmd' - .format(module_build.id)) + log.warning( + "Module build {0} does not have buildrequires in its xmd".format(module_build.id)) buildrequires = None # Determine which buildrequired module will influence the disttag - br_module_marking = '' + br_module_marking = "" # If the buildrequires are recorded in the xmd then we can try to find the base module that # is buildrequired if buildrequires: @@ -256,13 +277,17 @@ def get_rpm_release(module_build): with models.make_session(conf) as session: module_obj = models.ModuleBuild.get_build_from_nsvc( - session, module, module_in_xmd['stream'], module_in_xmd['version'], - module_in_xmd['context']) + session, + module, + module_in_xmd["stream"], + module_in_xmd["version"], + module_in_xmd["context"], + ) if not module_obj: continue try: - marking = module_obj.mmd().get_xmd()['mbs']['disttag_marking'] + marking = module_obj.mmd().get_xmd()["mbs"]["disttag_marking"] # We must check for a KeyError because a Variant object doesn't support the `get` # method except KeyError: @@ -272,20 +297,19 @@ def get_rpm_release(module_build): # conf.allowed_disttag_marking_module_names, and the base module doesn't have # the disttag_marking set, then default to the stream of the first base module marking = module_obj.stream - br_module_marking = marking + '+' + br_module_marking = marking + "+" break else: - log.warning('Module build {0} does not buildrequire a base module ({1})' - .format(module_build.id, ' or '.join(conf.base_module_names))) + log.warning( + "Module build {0} does not buildrequire a base module ({1})".format( + module_build.id, " or ".join(conf.base_module_names)) + ) # use alternate prefix for scratch module build components so they can be identified - prefix = ('scrmod+' if module_build.scratch else conf.default_dist_tag_prefix) + prefix = "scrmod+" if module_build.scratch else conf.default_dist_tag_prefix - return '{prefix}{base_module_marking}{index}+{dist_hash}'.format( - prefix=prefix, - base_module_marking=br_module_marking, - index=index, - dist_hash=dist_hash, + return "{prefix}{base_module_marking}{index}+{dist_hash}".format( + prefix=prefix, base_module_marking=br_module_marking, index=index, dist_hash=dist_hash ) @@ -302,6 +326,7 @@ def create_dogpile_key_generator_func(skip_first_n_args=0): when the db.session is part of cached method call, and the caching should work no matter what session instance is passed to cached method argument. """ + def key_generator(namespace, fn): fname = fn.__name__ @@ -315,6 +340,7 @@ def create_dogpile_key_generator_func(skip_first_n_args=0): return key_template return generate_key + return key_generator @@ -354,8 +380,8 @@ def import_mmd(session, mmd, check_buildrequires=True): # Verify that the virtual streams are the correct type if virtual_streams and ( - not isinstance(virtual_streams, list) or - any(not isinstance(vs, string_types) for vs in virtual_streams) + not isinstance(virtual_streams, list) + or any(not isinstance(vs, string_types) for vs in virtual_streams) ): msg = "The virtual streams must be a list of strings" log.error(msg) @@ -398,19 +424,19 @@ def import_mmd(session, mmd, check_buildrequires=True): xmd_brs = set(xmd["mbs"].get("buildrequires", {}).keys()) if brs - xmd_brs: raise UnprocessableEntity( - 'The imported module buildrequires other modules, but the metadata in the ' - 'xmd["mbs"]["buildrequires"] dictionary is missing entries') + "The imported module buildrequires other modules, but the metadata in the " + 'xmd["mbs"]["buildrequires"] dictionary is missing entries' + ) elif "buildrequires" not in xmd["mbs"]: xmd["mbs"]["buildrequires"] = {} mmd.set_xmd(glib.dict_values(xmd)) - koji_tag = xmd['mbs'].get('koji_tag') + koji_tag = xmd["mbs"].get("koji_tag") if koji_tag is None: log.warning("'koji_tag' is not set in xmd['mbs'] for module {}".format(nsvc)) # Get the ModuleBuild from DB. - build = models.ModuleBuild.get_build_from_nsvc( - session, name, stream, version, context) + build = models.ModuleBuild.get_build_from_nsvc(session, name, stream, version, context) if build: msg = "Updating existing module build {}.".format(nsvc) log.info(msg) @@ -422,11 +448,11 @@ def import_mmd(session, mmd, check_buildrequires=True): build.stream = stream build.version = version build.koji_tag = koji_tag - build.state = models.BUILD_STATES['ready'] + build.state = models.BUILD_STATES["ready"] build.modulemd = to_text_type(mmd.dumps()) build.context = context build.owner = "mbs_import" - build.rebuild_strategy = 'all' + build.rebuild_strategy = "all" build.time_submitted = datetime.utcnow() build.time_modified = datetime.utcnow() build.time_completed = datetime.utcnow() @@ -492,15 +518,15 @@ def import_fake_base_module(nsvc): srpm_buildroot.add_rpm(rpm) mmd.add_profile(srpm_buildroot) - xmd = {'mbs': {}} - xmd_mbs = xmd['mbs'] - xmd_mbs['buildrequires'] = {} - xmd_mbs['requires'] = {} - xmd_mbs['commit'] = 'ref_%s' % context - xmd_mbs['mse'] = 'true' + xmd = {"mbs": {}} + xmd_mbs = xmd["mbs"] + xmd_mbs["buildrequires"] = {} + xmd_mbs["requires"] = {} + xmd_mbs["commit"] = "ref_%s" % context + xmd_mbs["mse"] = "true" # Use empty "repofile://" URI for base module. The base module will use the # `conf.base_module_names` list as list of default repositories. - xmd_mbs['koji_tag'] = 'repofile://' + xmd_mbs["koji_tag"] = "repofile://" mmd.set_xmd(glib.dict_values(xmd)) with models.make_session(conf) as session: @@ -513,6 +539,7 @@ def get_local_releasever(): """ # Import DNF here to not force it as a hard MBS dependency. import dnf + dnf_base = dnf.Base() return dnf_base.conf.releasever @@ -578,8 +605,8 @@ def get_mmd_from_scm(url): """ from module_build_service.utils.submit import _fetch_mmd - mmd, _ = _fetch_mmd(url, branch=None, allow_local_url=False, - whitelist_url=False, mandatory_checks=False) + mmd, _ = _fetch_mmd( + url, branch=None, allow_local_url=False, whitelist_url=False, mandatory_checks=False) return mmd diff --git a/module_build_service/utils/mse.py b/module_build_service/utils/mse.py index 33453957..6da01c19 100644 --- a/module_build_service/utils/mse.py +++ b/module_build_service/utils/mse.py @@ -60,11 +60,9 @@ def _expand_mse_streams(session, name, streams, default_streams, raise_if_stream if name in default_streams: expanded_streams = [default_streams[name]] elif raise_if_stream_ambigous: - raise StreamAmbigous( - "There are multiple streams to choose from for module %s." % name) + raise StreamAmbigous("There are multiple streams to choose from for module %s." % name) else: - builds = models.ModuleBuild.get_last_build_in_all_streams( - session, name) + builds = models.ModuleBuild.get_last_build_in_all_streams(session, name) expanded_streams = [build.stream for build in builds] else: expanded_streams = [] @@ -79,8 +77,10 @@ def _expand_mse_streams(session, name, streams, default_streams, raise_if_stream if name in default_streams: expanded_streams = [default_streams[name]] elif raise_if_stream_ambigous: - raise StreamAmbigous("There are multiple streams %r to choose from for module %s." - % (expanded_streams, name)) + raise StreamAmbigous( + "There are multiple streams %r to choose from for module %s." + % (expanded_streams, name) + ) return expanded_streams @@ -102,23 +102,32 @@ def expand_mse_streams(session, mmd, default_streams=None, raise_if_stream_ambig expanded = {} for name, streams in deps.get_requires().items(): streams_set = Modulemd.SimpleSet() - streams_set.set(_expand_mse_streams( - session, name, streams.get(), default_streams, raise_if_stream_ambigous)) + streams_set.set( + _expand_mse_streams( + session, name, streams.get(), default_streams, raise_if_stream_ambigous) + ) expanded[name] = streams_set deps.set_requires(expanded) expanded = {} for name, streams in deps.get_buildrequires().items(): streams_set = Modulemd.SimpleSet() - streams_set.set(_expand_mse_streams( - session, name, streams.get(), default_streams, raise_if_stream_ambigous)) + streams_set.set( + _expand_mse_streams( + session, name, streams.get(), default_streams, raise_if_stream_ambigous) + ) expanded[name] = streams_set deps.set_buildrequires(expanded) -def _get_mmds_from_requires(requires, mmds, recursive=False, - default_streams=None, raise_if_stream_ambigous=False, - base_module_mmds=None): +def _get_mmds_from_requires( + requires, + mmds, + recursive=False, + default_streams=None, + raise_if_stream_ambigous=False, + base_module_mmds=None, +): """ Helper method for get_mmds_required_by_module_recursively returning the list of module metadata objects defined by `requires` dict. @@ -153,8 +162,10 @@ def _get_mmds_from_requires(requires, mmds, recursive=False, if name in default_streams: streams_to_try = [default_streams[name]] elif len(streams_to_try) > 1 and raise_if_stream_ambigous: - raise StreamAmbigous("There are multiple streams %r to choose from for module %s." - % (streams_to_try, name)) + raise StreamAmbigous( + "There are multiple streams %r to choose from for module %s." + % (streams_to_try, name) + ) # For each valid stream, find the last build in a stream and also all # its contexts and add mmds of these builds to `mmds` and `added_mmds`. @@ -170,10 +181,12 @@ def _get_mmds_from_requires(requires, mmds, recursive=False, if base_module_mmds: for base_module_mmd in base_module_mmds: base_module_nsvc = ":".join([ - base_module_mmd.get_name(), base_module_mmd.get_stream(), - str(base_module_mmd.get_version()), base_module_mmd.get_context()]) - mmds[ns] += resolver.get_buildrequired_modulemds( - name, stream, base_module_nsvc) + base_module_mmd.get_name(), + base_module_mmd.get_stream(), + str(base_module_mmd.get_version()), + base_module_mmd.get_context(), + ]) + mmds[ns] += resolver.get_buildrequired_modulemds(name, stream, base_module_nsvc) else: mmds[ns] = resolver.get_module_modulemds(name, stream, strict=True) added_mmds[ns] += mmds[ns] @@ -262,7 +275,8 @@ def _get_base_module_mmds(mmd): def get_mmds_required_by_module_recursively( - mmd, default_streams=None, raise_if_stream_ambigous=False): + mmd, default_streams=None, raise_if_stream_ambigous=False +): """ Returns the list of Module metadata objects of all modules required while building the module defined by `mmd` module metadata. This presumes the @@ -295,10 +309,11 @@ def get_mmds_required_by_module_recursively( # Get the MMDs of all compatible base modules based on the buildrequires. base_module_mmds = _get_base_module_mmds(mmd) if not base_module_mmds: - base_module_choices = ' or '.join(conf.base_module_names) + base_module_choices = " or ".join(conf.base_module_names) raise UnprocessableEntity( "None of the base module ({}) streams in the buildrequires section could be found" - .format(base_module_choices)) + .format(base_module_choices) + ) # Add base modules to `mmds`. for base_module in base_module_mmds: @@ -309,23 +324,32 @@ def get_mmds_required_by_module_recursively( # Get all the buildrequires of the module of interest. for deps in mmd.get_dependencies(): mmds = _get_mmds_from_requires( - deps.get_buildrequires(), mmds, False, default_streams, - raise_if_stream_ambigous, base_module_mmds) + deps.get_buildrequires(), + mmds, + False, + default_streams, + raise_if_stream_ambigous, + base_module_mmds, + ) # Now get the requires of buildrequires recursively. for mmd_key in list(mmds.keys()): for mmd in mmds[mmd_key]: for deps in mmd.get_dependencies(): mmds = _get_mmds_from_requires( - deps.get_requires(), mmds, True, default_streams, - raise_if_stream_ambigous, base_module_mmds) + deps.get_requires(), + mmds, + True, + default_streams, + raise_if_stream_ambigous, + base_module_mmds, + ) # Make single list from dict of lists. res = [] for ns, mmds_list in mmds.items(): if len(mmds_list) == 0: - raise UnprocessableEntity( - "Cannot find any module builds for %s" % (ns)) + raise UnprocessableEntity("Cannot find any module builds for %s" % (ns)) res += mmds_list return res @@ -374,7 +398,8 @@ def generate_expanded_mmds(session, mmd, raise_if_stream_ambigous=False, default # Show log.info message with the NSVCs we have added to mmd_resolver. nsvcs_to_solve = [ ":".join([m.get_name(), m.get_stream(), str(m.get_version()), str(m.get_context())]) - for m in mmds_for_resolving] + for m in mmds_for_resolving + ] log.info("Starting resolving with following input modules: %r", nsvcs_to_solve) # Resolve the dependencies between modules and get the list of all valid @@ -407,7 +432,7 @@ def generate_expanded_mmds(session, mmd, raise_if_stream_ambigous=False, default # Get the values for dependencies_id, self_nsvca and req_name_stream variables. for nsvca in requires: req_name, req_stream, _, req_context, req_arch = nsvca.split(":") - if req_arch == 'src': + if req_arch == "src": assert req_name == current_mmd.get_name() assert req_stream == current_mmd.get_stream() assert dependencies_id is None @@ -418,8 +443,9 @@ def generate_expanded_mmds(session, mmd, raise_if_stream_ambigous=False, default req_name_stream[req_name] = req_stream if dependencies_id is None or self_nsvca is None: raise RuntimeError( - "%s:%s not found in requires %r" % ( - current_mmd.get_name(), current_mmd.get_stream(), requires)) + "%s:%s not found in requires %r" + % (current_mmd.get_name(), current_mmd.get_stream(), requires) + ) # The name:[streams, ...] pairs do not have to be the same in both # buildrequires/requires. In case they are the same, we replace the streams @@ -455,7 +481,7 @@ def generate_expanded_mmds(session, mmd, raise_if_stream_ambigous=False, default new_dep.add_buildrequires(req_name, [req_name_stream[req_name]]) # Set the new dependencies. - mmd_copy.set_dependencies((new_dep, )) + mmd_copy.set_dependencies((new_dep,)) # The Modulemd.Dependencies() stores only streams, but to really build this # module, we need NSVC of buildrequires, so we have to store this data in XMD. @@ -471,11 +497,11 @@ def generate_expanded_mmds(session, mmd, raise_if_stream_ambigous=False, default br_list.append(nsvc) # Resolve the buildrequires and store the result in XMD. - if 'mbs' not in xmd: - xmd['mbs'] = {} + if "mbs" not in xmd: + xmd["mbs"] = {} resolver = module_build_service.resolver.system_resolver - xmd['mbs']['buildrequires'] = resolver.resolve_requires(br_list) - xmd['mbs']['mse'] = True + xmd["mbs"]["buildrequires"] = resolver.resolve_requires(br_list) + xmd["mbs"]["mse"] = True mmd_copy.set_xmd(glib.dict_values(xmd)) diff --git a/module_build_service/utils/reuse.py b/module_build_service/utils/reuse.py index 6fa05ba5..12c35a87 100644 --- a/module_build_service/utils/reuse.py +++ b/module_build_service/utils/reuse.py @@ -28,8 +28,7 @@ import module_build_service.messaging from module_build_service import log, models, conf -def reuse_component(component, previous_component_build, - change_state_now=False): +def reuse_component(component, previous_component_build, change_state_now=False): """ Reuses component build `previous_component_build` instead of building component `component` @@ -42,8 +41,8 @@ def reuse_component(component, previous_component_build, log.info( 'Reusing component "{0}" from a previous module ' - 'build with the nvr "{1}"'.format( - component.package, previous_component_build.nvr)) + 'build with the nvr "{1}"'.format(component.package, previous_component_build.nvr) + ) component.reused_component_id = previous_component_build.id component.task_id = previous_component_build.task_id if change_state_now: @@ -54,24 +53,23 @@ def reuse_component(component, previous_component_build, # few lines below. If we would set it to the right state right # here, we would miss the code path handling the KojiBuildChange # which works only when switching from BUILDING to COMPLETE. - component.state = koji.BUILD_STATES['BUILDING'] - component.state_reason = \ - 'Reused component from previous module build' + component.state = koji.BUILD_STATES["BUILDING"] + component.state_reason = "Reused component from previous module build" component.nvr = previous_component_build.nvr nvr_dict = kobo.rpmlib.parse_nvr(component.nvr) # Add this message to further_work so that the reused # component will be tagged properly return [ module_build_service.messaging.KojiBuildChange( - msg_id='reuse_component: fake msg', + msg_id="reuse_component: fake msg", build_id=None, task_id=component.task_id, build_new_state=previous_component_build.state, - build_name=nvr_dict['name'], - build_version=nvr_dict['version'], - build_release=nvr_dict['release'], + build_name=nvr_dict["name"], + build_version=nvr_dict["version"], + build_release=nvr_dict["release"], module_build_id=component.module_id, - state_reason=component.state_reason + state_reason=component.state_reason, ) ] @@ -90,18 +88,20 @@ def get_reusable_module(session, module): mmd = module.mmd() # Find the latest module that is in the done or ready state - previous_module_build = session.query(models.ModuleBuild)\ - .filter_by(name=mmd.get_name())\ - .filter_by(stream=mmd.get_stream())\ - .filter_by(state=models.BUILD_STATES["ready"])\ - .filter(models.ModuleBuild.scmurl.isnot(None))\ - .filter_by(build_context=module.build_context)\ + previous_module_build = ( + session.query(models.ModuleBuild) + .filter_by(name=mmd.get_name()) + .filter_by(stream=mmd.get_stream()) + .filter_by(state=models.BUILD_STATES["ready"]) + .filter(models.ModuleBuild.scmurl.isnot(None)) + .filter_by(build_context=module.build_context) .order_by(models.ModuleBuild.time_completed.desc()) + ) # If we are rebuilding with the "changed-and-after" option, then we can't reuse # components from modules that were built more liberally - if module.rebuild_strategy == 'changed-and-after': + if module.rebuild_strategy == "changed-and-after": previous_module_build = previous_module_build.filter( - models.ModuleBuild.rebuild_strategy.in_(['all', 'changed-and-after'])) + models.ModuleBuild.rebuild_strategy.in_(["all", "changed-and-after"])) previous_module_build = previous_module_build.filter_by( ref_build_context=module.ref_build_context) previous_module_build = previous_module_build.first() @@ -139,9 +139,13 @@ def attempt_to_reuse_all_components(builder, session, module): if c.package == "module-build-macros": continue component_to_reuse = get_reusable_component( - session, module, c.package, - previous_module_build=previous_module_build, mmd=mmd, - old_mmd=old_mmd) + session, + module, + c.package, + previous_module_build=previous_module_build, + mmd=mmd, + old_mmd=old_mmd, + ) if not component_to_reuse: return False @@ -188,7 +192,7 @@ def get_reusable_components(session, module, component_names, previous_module_bu order as `component_names` """ # We support components reusing only for koji and test backend. - if conf.system not in ['koji', 'test']: + if conf.system not in ["koji", "test"]: return [None] * len(component_names) if not previous_module_build: @@ -201,15 +205,17 @@ def get_reusable_components(session, module, component_names, previous_module_bu ret = [] for component_name in component_names: - ret.append(get_reusable_component( - session, module, component_name, previous_module_build, mmd, - old_mmd)) + ret.append( + get_reusable_component( + session, module, component_name, previous_module_build, mmd, old_mmd) + ) return ret -def get_reusable_component(session, module, component_name, - previous_module_build=None, mmd=None, old_mmd=None): +def get_reusable_component( + session, module, component_name, previous_module_build=None, mmd=None, old_mmd=None +): """ Returns the component (RPM) build of a module that can be reused instead of needing to rebuild it @@ -235,11 +241,11 @@ def get_reusable_component(session, module, component_name, """ # We support component reusing only for koji and test backend. - if conf.system not in ['koji', 'test']: + if conf.system not in ["koji", "test"]: return None # If the rebuild strategy is "all", that means that nothing can be reused - if module.rebuild_strategy == 'all': + if module.rebuild_strategy == "all": log.info('Cannot re-use the component because the rebuild strategy is "all".') return None @@ -258,39 +264,46 @@ def get_reusable_component(session, module, component_name, # be reused new_module_build_component = models.ComponentBuild.from_component_name( session, component_name, module.id) - if not new_module_build_component or not new_module_build_component.batch \ - or not new_module_build_component.ref: - log.info('Cannot re-use. New component not found in the db.') + if ( + not new_module_build_component + or not new_module_build_component.batch + or not new_module_build_component.ref + ): + log.info("Cannot re-use. New component not found in the db.") return None prev_module_build_component = models.ComponentBuild.from_component_name( - session, component_name, previous_module_build.id) + session, component_name, previous_module_build.id + ) # If the component to reuse for some reason was not found in the database, # or the ref is missing, something has gone wrong and the component cannot # be reused - if not prev_module_build_component or not prev_module_build_component.batch\ - or not prev_module_build_component.ref: - log.info('Cannot re-use. Previous component not found in the db.') + if ( + not prev_module_build_component + or not prev_module_build_component.batch + or not prev_module_build_component.ref + ): + log.info("Cannot re-use. Previous component not found in the db.") return None # Make sure the ref for the component that is trying to be reused # hasn't changed since the last build if prev_module_build_component.ref != new_module_build_component.ref: - log.info('Cannot re-use. Component commit hashes do not match.') + log.info("Cannot re-use. Component commit hashes do not match.") return None # At this point we've determined that both module builds contain the component # and the components share the same commit hash - if module.rebuild_strategy == 'changed-and-after': + if module.rebuild_strategy == "changed-and-after": # Make sure the batch number for the component that is trying to be reused # hasn't changed since the last build if prev_module_build_component.batch != new_module_build_component.batch: - log.info('Cannot re-use. Batch numbers do not match.') + log.info("Cannot re-use. Batch numbers do not match.") return None # If the mmd.buildopts.macros.rpms changed, we cannot reuse - if mmd.get_rpm_buildopts().get('macros') != old_mmd.get_rpm_buildopts().get('macros'): - log.info('Cannot re-use. Old modulemd macros do not match the new.') + if mmd.get_rpm_buildopts().get("macros") != old_mmd.get_rpm_buildopts().get("macros"): + log.info("Cannot re-use. Old modulemd macros do not match the new.") return None # At this point we've determined that both module builds contain the component @@ -318,32 +331,34 @@ def get_reusable_component(session, module, component_name, continue new_module_build_components.append(set([ - (value.package, value.ref) for value in - new_component_builds if value.batch == i + 1 + (value.package, value.ref) + for value in new_component_builds + if value.batch == i + 1 ])) previous_module_build_components.append(set([ - (value.package, value.ref) for value in - prev_component_builds if value.batch == i + 1 + (value.package, value.ref) + for value in prev_component_builds + if value.batch == i + 1 ])) # If the previous batches don't have the same ordering and hashes, then the # component can't be reused if previous_module_build_components != new_module_build_components: - log.info('Cannot re-use. Ordering or commit hashes of ' - 'previous batches differ.') + log.info("Cannot re-use. Ordering or commit hashes of previous batches differ.") return None for pkg_name, pkg in mmd.get_rpm_components().items(): if pkg_name not in old_mmd.get_rpm_components(): - log.info('Cannot re-use. Package lists are different.') + log.info("Cannot re-use. Package lists are different.") return None - if set(pkg.get_arches().get()) != \ - set(old_mmd.get_rpm_components()[pkg_name].get_arches().get()): - log.info('Cannot re-use. Architectures are different for package: %s.' % pkg_name) + if set(pkg.get_arches().get()) != set( + old_mmd.get_rpm_components()[pkg_name].get_arches().get() + ): + log.info("Cannot re-use. Architectures are different for package: %s." % pkg_name) return None reusable_component = models.ComponentBuild.query.filter_by( package=component_name, module_id=previous_module_build.id).one() - log.debug('Found reusable component!') + log.debug("Found reusable component!") return reusable_component diff --git a/module_build_service/utils/submit.py b/module_build_service/utils/submit.py index 76175206..79bec145 100644 --- a/module_build_service/utils/submit.py +++ b/module_build_service/utils/submit.py @@ -40,8 +40,7 @@ from gi.repository import GLib import module_build_service.scm from module_build_service import conf, db, log, models, Modulemd -from module_build_service.errors import ( - ValidationError, UnprocessableEntity, Forbidden, Conflict) +from module_build_service.errors import ValidationError, UnprocessableEntity, Forbidden, Conflict from module_build_service import glib from module_build_service.utils import to_text_type @@ -106,18 +105,15 @@ def _scm_get_latest(pkg): # we want to pull from, we need to resolve that f25 branch # to the specific commit available at the time of # submission (now). - pkgref = module_build_service.scm.SCM( - pkg.get_repository()).get_latest(pkg.get_ref()) + pkgref = module_build_service.scm.SCM(pkg.get_repository()).get_latest(pkg.get_ref()) except Exception as e: log.exception(e) - return {'error': "Failed to get the latest commit for %s#%s" % ( - pkg.get_repository(), pkg.get_ref())} + return { + "error": "Failed to get the latest commit for %s#%s" + % (pkg.get_repository(), pkg.get_ref()) + } - return { - 'pkg_name': pkg.get_name(), - 'pkg_ref': pkgref, - 'error': None - } + return {"pkg_name": pkg.get_name(), "pkg_ref": pkgref, "error": None} def format_mmd(mmd, scmurl, module=None, session=None): @@ -136,12 +132,12 @@ def format_mmd(mmd, scmurl, module=None, session=None): from module_build_service.scm import SCM xmd = glib.from_variant_dict(mmd.get_xmd()) - if 'mbs' not in xmd: - xmd['mbs'] = {} - if 'scmurl' not in xmd['mbs']: - xmd['mbs']['scmurl'] = scmurl or '' - if 'commit' not in xmd['mbs']: - xmd['mbs']['commit'] = '' + if "mbs" not in xmd: + xmd["mbs"] = {} + if "scmurl" not in xmd["mbs"]: + xmd["mbs"]["scmurl"] = scmurl or "" + if "commit" not in xmd["mbs"]: + xmd["mbs"]["commit"] = "" # If module build was submitted via yaml file, there is no scmurl if scmurl: @@ -154,35 +150,37 @@ def format_mmd(mmd, scmurl, module=None, session=None): else: full_scm_hash = scm.get_full_commit_hash() - xmd['mbs']['commit'] = full_scm_hash + xmd["mbs"]["commit"] = full_scm_hash # If a commit hash wasn't provided then just get the latest from master else: - xmd['mbs']['commit'] = scm.get_latest() + xmd["mbs"]["commit"] = scm.get_latest() if mmd.get_rpm_components() or mmd.get_module_components(): - if 'rpms' not in xmd['mbs']: - xmd['mbs']['rpms'] = {} + if "rpms" not in xmd["mbs"]: + xmd["mbs"]["rpms"] = {} # Add missing data in RPM components for pkgname, pkg in mmd.get_rpm_components().items(): # In case of resubmit of existing module which have been # cancelled/failed during the init state, the package # was maybe already handled by MBS, so skip it in this case. - if pkgname in xmd['mbs']['rpms']: + if pkgname in xmd["mbs"]["rpms"]: continue if pkg.get_repository() and not conf.rpms_allow_repository: raise Forbidden( "Custom component repositories aren't allowed. " - "%r bears repository %r" % (pkgname, pkg.get_repository())) + "%r bears repository %r" % (pkgname, pkg.get_repository()) + ) if pkg.get_cache() and not conf.rpms_allow_cache: raise Forbidden( "Custom component caches aren't allowed. " - "%r bears cache %r" % (pkgname, pkg.cache)) + "%r bears cache %r" % (pkgname, pkg.cache) + ) if not pkg.get_repository(): pkg.set_repository(conf.rpms_default_repository + pkgname) if not pkg.get_cache(): pkg.set_cache(conf.rpms_default_cache + pkgname) if not pkg.get_ref(): - pkg.set_ref('master') + pkg.set_ref("master") if pkg.get_arches().size() == 0: arches = Modulemd.SimpleSet() arches.set(conf.arches) @@ -193,11 +191,12 @@ def format_mmd(mmd, scmurl, module=None, session=None): if mod.get_repository() and not conf.modules_allow_repository: raise Forbidden( "Custom module repositories aren't allowed. " - "%r bears repository %r" % (modname, mod.get_repository())) + "%r bears repository %r" % (modname, mod.get_repository()) + ) if not mod.get_repository(): mod.set_repository(conf.modules_default_repository + modname) if not mod.get_ref(): - mod.set_ref('master') + mod.set_ref("master") # Check that SCM URL is valid and replace potential branches in pkg refs # by real SCM hash and store the result to our private xmd place in modulemd. @@ -205,8 +204,10 @@ def format_mmd(mmd, scmurl, module=None, session=None): try: # Filter out the packages which we have already resolved in possible # previous runs of this method (can be caused by module build resubmition). - pkgs_to_resolve = [pkg for pkg in mmd.get_rpm_components().values() - if pkg.get_name() not in xmd['mbs']['rpms']] + pkgs_to_resolve = [ + pkg for pkg in mmd.get_rpm_components().values() + if pkg.get_name() not in xmd["mbs"]["rpms"] + ] async_result = pool.map_async(_scm_get_latest, pkgs_to_resolve) # For modules with lot of components, the _scm_get_latest can take a lot of time. @@ -228,7 +229,7 @@ def format_mmd(mmd, scmurl, module=None, session=None): else: pkg_name = pkg_dict["pkg_name"] pkg_ref = pkg_dict["pkg_ref"] - xmd['mbs']['rpms'][pkg_name] = {'ref': pkg_ref} + xmd["mbs"]["rpms"][pkg_name] = {"ref": pkg_ref} if err_msg: raise UnprocessableEntity(err_msg) @@ -251,32 +252,38 @@ def get_prefixed_version(mmd): for base_module in conf.base_module_names: # xmd is a GLib Variant and doesn't support .get() syntax try: - base_module_stream = xmd['mbs']['buildrequires'].get( - base_module, {}).get('stream') + base_module_stream = xmd["mbs"]["buildrequires"].get(base_module, {}).get("stream") if base_module_stream: # Break after finding the first base module that is buildrequired break except KeyError: - log.warning('The module\'s mmd is missing information in the xmd section') + log.warning("The module's mmd is missing information in the xmd section") return version else: - log.warning('This module does not buildrequire a base module ({0})' - .format(' or '.join(conf.base_module_names))) + log.warning( + "This module does not buildrequire a base module ({0})".format( + " or ".join(conf.base_module_names) + ) + ) return version # The platform version (e.g. prefix1.2.0 => 010200) version_prefix = models.ModuleBuild.get_stream_version(base_module_stream, right_pad=False) if version_prefix is None: - log.warning('The "{0}" stream "{1}" couldn\'t be used to prefix the module\'s ' - 'version'.format(base_module, base_module_stream)) + log.warning( + 'The "{0}" stream "{1}" couldn\'t be used to prefix the module\'s ' + "version".format(base_module, base_module_stream) + ) return version # Strip the stream suffix because Modulemd requires version to be an integer new_version = int(str(int(math.floor(version_prefix))) + str(version)) if new_version > GLib.MAXUINT64: - log.warning('The "{0}" stream "{1}" caused the module\'s version prefix to be ' - 'too long'.format(base_module, base_module_stream)) + log.warning( + 'The "{0}" stream "{1}" caused the module\'s version prefix to be ' + "too long".format(base_module, base_module_stream) + ) return version return new_version @@ -297,13 +304,14 @@ def validate_mmd(mmd): if mod.get_repository() and not conf.modules_allow_repository: raise Forbidden( "Custom module repositories aren't allowed. " - "%r bears repository %r" % (modname, mod.get_repository())) + "%r bears repository %r" % (modname, mod.get_repository()) + ) name = mmd.get_name() xmd = mmd.get_xmd() - if 'mbs' in xmd: + if "mbs" in xmd: allowed_to_mark_disttag = name in conf.allowed_disttag_marking_module_names - if not (xmd['mbs'].keys() == ['disttag_marking'] and allowed_to_mark_disttag): + if not (xmd["mbs"].keys() == ["disttag_marking"] and allowed_to_mark_disttag): raise ValidationError('The "mbs" xmd field is reserved for MBS') if name in conf.base_module_names: @@ -317,12 +325,12 @@ def merge_included_mmd(mmd, included_mmd): the `main` when it includes another module defined by `included_mmd` """ included_xmd = glib.from_variant_dict(included_mmd.get_xmd()) - if 'rpms' in included_xmd['mbs']: + if "rpms" in included_xmd["mbs"]: xmd = glib.from_variant_dict(mmd.get_xmd()) - if 'rpms' not in xmd['mbs']: - xmd['mbs']['rpms'] = included_xmd['mbs']['rpms'] + if "rpms" not in xmd["mbs"]: + xmd["mbs"]["rpms"] = included_xmd["mbs"]["rpms"] else: - xmd['mbs']['rpms'].update(included_xmd['mbs']['rpms']) + xmd["mbs"]["rpms"].update(included_xmd["mbs"]["rpms"]) # Set the modified xmd back to the modulemd mmd.set_xmd(glib.dict_values(xmd)) @@ -351,10 +359,10 @@ def get_module_srpm_overrides(module): raise ValueError("Invalid srpms list encountered: {}".format(module.srpms)) for source in srpms: - if source.startswith('cli-build/') and source.endswith('.src.rpm'): + if source.startswith("cli-build/") and source.endswith(".src.rpm"): # This is a custom srpm that has been uploaded to koji by rpkg # using the package name as the basename suffixed with .src.rpm - rpm_name = os.path.basename(source)[:-len('.src.rpm')] + rpm_name = os.path.basename(source)[: -len(".src.rpm")] else: # This should be a local custom srpm path if not os.path.exists(source): @@ -362,13 +370,15 @@ def get_module_srpm_overrides(module): # Get package name from rpm headers try: rpm_hdr = kobo.rpmlib.get_rpm_header(source) - rpm_name = kobo.rpmlib.get_header_field(rpm_hdr, 'name').decode('utf-8') + rpm_name = kobo.rpmlib.get_header_field(rpm_hdr, "name").decode("utf-8") except Exception: raise ValueError("Provided srpm is invalid: {}".format(source)) if rpm_name in overrides: - log.warning('Encountered duplicate custom SRPM "{0}"' - ' for package {1}'.format(source, rpm_name)) + log.warning( + 'Encountered duplicate custom SRPM "{0}" for package {1}' + .format(source, rpm_name) + ) continue log.debug('Using custom SRPM "{0}" for package {1}'.format(source, rpm_name)) @@ -377,8 +387,9 @@ def get_module_srpm_overrides(module): return overrides -def record_component_builds(mmd, module, initial_batch=1, - previous_buildorder=None, main_mmd=None, session=None): +def record_component_builds( + mmd, module, initial_batch=1, previous_buildorder=None, main_mmd=None, session=None +): # Imported here to allow import of utils in GenericBuilder. import module_build_service.builder @@ -394,13 +405,15 @@ def record_component_builds(mmd, module, initial_batch=1, if main_mmd: # Check for components that are in both MMDs before merging since MBS # currently can't handle that situation. - duplicate_components = [rpm for rpm in main_mmd.get_rpm_components().keys() - if rpm in mmd.get_rpm_components()] + duplicate_components = [ + rpm for rpm in main_mmd.get_rpm_components().keys() if rpm in mmd.get_rpm_components() + ] if duplicate_components: error_msg = ( 'The included module "{0}" in "{1}" have the following ' - 'conflicting components: {2}'.format( - mmd.get_name(), main_mmd.get_name(), ', '.join(duplicate_components))) + "conflicting components: {2}".format( + mmd.get_name(), main_mmd.get_name(), ", ".join(duplicate_components)) + ) raise UnprocessableEntity(error_msg) merge_included_mmd(main_mmd, mmd) else: @@ -417,7 +430,8 @@ def record_component_builds(mmd, module, initial_batch=1, srpm_overrides = get_module_srpm_overrides(module) rpm_weights = module_build_service.builder.GenericBuilder.get_build_weights( - [c.get_name() for c in rpm_components]) + [c.get_name() for c in rpm_components] + ) all_components.sort(key=lambda x: x.get_buildorder()) # We do not start with batch = 0 here, because the first batch is # reserved for module-build-macros. First real components must be @@ -439,33 +453,35 @@ def record_component_builds(mmd, module, initial_batch=1, # It is OK to whitelist all URLs here, because the validity # of every URL have been already checked in format_mmd(...). included_mmd = _fetch_mmd(full_url, whitelist_url=True)[0] - batch = record_component_builds(included_mmd, module, batch, - previous_buildorder, main_mmd, session=session) + batch = record_component_builds( + included_mmd, module, batch, previous_buildorder, main_mmd, session=session) continue package = component.get_name() if package in srpm_overrides: component_ref = None full_url = srpm_overrides[package] - log.info('Building custom SRPM "{0}"' - ' for package {1}'.format(full_url, package)) + log.info('Building custom SRPM "{0}"' " for package {1}".format(full_url, package)) else: - component_ref = mmd.get_xmd()['mbs']['rpms'][package]['ref'] + component_ref = mmd.get_xmd()["mbs"]["rpms"][package]["ref"] full_url = component.get_repository() + "?#" + component_ref # Skip the ComponentBuild if it already exists in database. This can happen # in case of module build resubmition. - existing_build = models.ComponentBuild.from_component_name( - db.session, package, module.id) + existing_build = models.ComponentBuild.from_component_name(db.session, package, module.id) if existing_build: # Check that the existing build has the same most important attributes. # This should never be a problem, but it's good to be defensive here so # we do not mess things during resubmition. - if (existing_build.batch != batch or existing_build.scmurl != full_url or - existing_build.ref != component_ref): + if ( + existing_build.batch != batch + or existing_build.scmurl != full_url + or existing_build.ref != component_ref + ): raise ValidationError( "Module build %s already exists in database, but its attributes " - " are different from resubmitted one." % component.get_name()) + " are different from resubmitted one." % component.get_name() + ) continue build = models.ComponentBuild( @@ -475,7 +491,7 @@ def record_component_builds(mmd, module, initial_batch=1, scmurl=full_url, batch=batch, ref=component_ref, - weight=rpm_weights[package] + weight=rpm_weights[package], ) session.add(build) @@ -486,12 +502,13 @@ def submit_module_build_from_yaml(username, handle, params, stream=None, skiptes yaml_file = to_text_type(handle.read()) mmd = load_mmd(yaml_file) dt = datetime.utcfromtimestamp(int(time.time())) - if hasattr(handle, 'filename'): + if hasattr(handle, "filename"): def_name = str(os.path.splitext(os.path.basename(handle.filename))[0]) elif not mmd.get_name(): raise ValidationError( "The module's name was not present in the modulemd file. Please use the " - "\"module_name\" parameter") + '"module_name" parameter' + ) def_version = int(dt.strftime("%Y%m%d%H%M%S")) mmd.set_name(mmd.get_name() or def_name) mmd.set_stream(stream or mmd.get_stream() or "master") @@ -507,12 +524,11 @@ _url_check_re = re.compile(r"^[^:/]+:.*$") def submit_module_build_from_scm(username, params, allow_local_url=False): - url = params['scmurl'] - branch = params['branch'] + url = params["scmurl"] + branch = params["branch"] # Translate local paths into file:// URL if allow_local_url and not _url_check_re.match(url): - log.info( - "'{}' is not a valid URL, assuming local path".format(url)) + log.info("'{}' is not a valid URL, assuming local path".format(url)) url = os.path.abspath(url) url = "file://" + url mmd, scm = _fetch_mmd(url, branch, allow_local_url) @@ -529,27 +545,29 @@ def _apply_dep_overrides(mmd, params): :raises ValidationError: if one of the overrides doesn't apply """ dep_overrides = { - 'buildrequires': copy.copy(params.get('buildrequire_overrides', {})), - 'requires': copy.copy(params.get('require_overrides', {})) + "buildrequires": copy.copy(params.get("buildrequire_overrides", {})), + "requires": copy.copy(params.get("require_overrides", {})), } # Parse the module's branch to determine if it should override the stream of the buildrequired # module defined in conf.br_stream_override_module branch_search = None - if params.get('branch') and conf.br_stream_override_module and conf.br_stream_override_regexes: + if params.get("branch") and conf.br_stream_override_module and conf.br_stream_override_regexes: # Only parse the branch for a buildrequire override if the user didn't manually specify an # override for the module specified in conf.br_stream_override_module - if not dep_overrides['buildrequires'].get(conf.br_stream_override_module): + if not dep_overrides["buildrequires"].get(conf.br_stream_override_module): branch_search = None for regex in conf.br_stream_override_regexes: - branch_search = re.search(regex, params['branch']) + branch_search = re.search(regex, params["branch"]) if branch_search: log.debug( - 'The stream override regex `%s` matched the branch %s', - regex, params['branch']) + "The stream override regex `%s` matched the branch %s", + regex, + params["branch"], + ) break else: - log.debug('No stream override regexes matched the branch "%s"', params['branch']) + log.debug('No stream override regexes matched the branch "%s"', params["branch"]) # If a stream was parsed from the branch, then add it as a stream override for the module # specified in conf.br_stream_override_module @@ -557,21 +575,23 @@ def _apply_dep_overrides(mmd, params): # Concatenate all the groups that are not None together to get the desired stream. # This approach is taken in case there are sections to ignore. # For instance, if we need to parse `el8.0.0` from `rhel-8.0.0`. - parsed_stream = ''.join(group for group in branch_search.groups() if group) + parsed_stream = "".join(group for group in branch_search.groups() if group) if parsed_stream: - dep_overrides['buildrequires'][conf.br_stream_override_module] = [parsed_stream] + dep_overrides["buildrequires"][conf.br_stream_override_module] = [parsed_stream] log.info( 'The buildrequired stream of "%s" was overriden with "%s" based on the branch "%s"', - conf.br_stream_override_module, parsed_stream, params['branch']) + conf.br_stream_override_module, parsed_stream, params["branch"], + ) else: log.warning( - ('The regex `%s` only matched empty capture groups on the branch "%s". The regex ' - 'is invalid and should be rewritten.'), - regex, params['branch']) + 'The regex `%s` only matched empty capture groups on the branch "%s". The regex is ' + " invalid and should be rewritten.", + regex, params["branch"], + ) unused_dep_overrides = { - 'buildrequires': set(dep_overrides['buildrequires'].keys()), - 'requires': set(dep_overrides['requires'].keys()) + "buildrequires": set(dep_overrides["buildrequires"].keys()), + "requires": set(dep_overrides["requires"].keys()), } deps = mmd.get_dependencies() @@ -579,7 +599,7 @@ def _apply_dep_overrides(mmd, params): for dep_type, overrides in dep_overrides.items(): overridden = False # Get the existing streams (e.g. dep.get_buildrequires()) - reqs = getattr(dep, 'get_' + dep_type)() + reqs = getattr(dep, "get_" + dep_type)() for name, streams in dep_overrides[dep_type].items(): if name in reqs: reqs[name].set(streams) @@ -587,7 +607,7 @@ def _apply_dep_overrides(mmd, params): overridden = True if overridden: # Set the overridden streams (e.g. dep.set_buildrequires(reqs)) - getattr(dep, 'set_' + dep_type)(reqs) + getattr(dep, "set_" + dep_type)(reqs) for dep_type in unused_dep_overrides.keys(): # If a stream override was applied from parsing the branch and it wasn't applicable, @@ -596,8 +616,9 @@ def _apply_dep_overrides(mmd, params): unused_dep_overrides[dep_type].remove(conf.br_stream_override_module) if unused_dep_overrides[dep_type]: raise ValidationError( - 'The {} overrides for the following modules aren\'t applicable: {}' - .format(dep_type[:-1], ', '.join(sorted(unused_dep_overrides[dep_type])))) + "The {} overrides for the following modules aren't applicable: {}".format( + dep_type[:-1], ", ".join(sorted(unused_dep_overrides[dep_type]))) + ) mmd.set_dependencies(deps) @@ -624,7 +645,7 @@ def _handle_base_module_virtual_stream_br(mmd): for i, stream in enumerate(streams): # Ignore streams that start with a minus sign, since those are handled in the # MSE code - if stream.startswith('-'): + if stream.startswith("-"): continue # Check if the base module stream is available @@ -636,30 +657,25 @@ def _handle_base_module_virtual_stream_br(mmd): # If the base module stream is not available, check if there's a virtual stream log.debug( 'Checking to see if there is a base module "%s" with the virtual stream "%s"', - base_module, - stream + base_module, stream, ) base_module_mmd = system_resolver.get_latest_with_virtual_stream( - name=base_module, virtual_stream=stream) + name=base_module, virtual_stream=stream + ) if not base_module_mmd: # If there isn't this base module stream or virtual stream available, skip it, # and let the dep solving code deal with it like it normally would log.warning( 'There is no base module "%s" with stream/virtual stream "%s"', - base_module, - stream + base_module, stream, ) continue latest_stream = base_module_mmd.get_stream() log.info( - ('Replacing the buildrequire "%s:%s" with "%s:%s", since "%s" is a virtual ' - 'stream'), - base_module, - stream, - base_module, - latest_stream, - stream + 'Replacing the buildrequire "%s:%s" with "%s:%s", since "%s" is a virtual ' + "stream", + base_module, stream, base_module, latest_stream, stream ) new_streams[i] = latest_stream overridden = True @@ -687,9 +703,13 @@ def submit_module_build(username, mmd, params): import koji # Placed here to avoid py2/py3 conflicts... from .mse import generate_expanded_mmds - log.debug('Submitted %s module build for %s:%s:%s', - ("scratch" if params.get('scratch', False) else "normal"), - mmd.get_name(), mmd.get_stream(), mmd.get_version()) + log.debug( + "Submitted %s module build for %s:%s:%s", + ("scratch" if params.get("scratch", False) else "normal"), + mmd.get_name(), + mmd.get_stream(), + mmd.get_version(), + ) validate_mmd(mmd) raise_if_stream_ambigous = False @@ -706,8 +726,10 @@ def submit_module_build(username, mmd, params): mmds = generate_expanded_mmds(db.session, mmd, raise_if_stream_ambigous, default_streams) if not mmds: - raise ValidationError('No dependency combination was satisfied. Please verify the ' - 'buildrequires in your modulemd have previously been built.') + raise ValidationError( + "No dependency combination was satisfied. Please verify the " + "buildrequires in your modulemd have previously been built." + ) modules = [] # True if all module builds are skipped so MBS will actually not rebuild @@ -722,52 +744,57 @@ def submit_module_build(username, mmd, params): version_str = str(version) nsvc = ":".join([mmd.get_name(), mmd.get_stream(), version_str, mmd.get_context()]) - log.debug('Checking whether module build already exists: %s.', nsvc) + log.debug("Checking whether module build already exists: %s.", nsvc) module = models.ModuleBuild.get_build_from_nsvc( db.session, mmd.get_name(), mmd.get_stream(), version_str, mmd.get_context()) - if module and not params.get('scratch', False): - if module.state != models.BUILD_STATES['failed']: - log.info("Skipping rebuild of %s, only rebuild of modules in failed state " - "is allowed.", nsvc) + if module and not params.get("scratch", False): + if module.state != models.BUILD_STATES["failed"]: + log.info( + "Skipping rebuild of %s, only rebuild of modules in failed state is allowed.", + nsvc, + ) modules.append(module) continue - rebuild_strategy = params.get('rebuild_strategy') + rebuild_strategy = params.get("rebuild_strategy") if rebuild_strategy and module.rebuild_strategy != rebuild_strategy: raise ValidationError( 'You cannot change the module\'s "rebuild_strategy" when ' - 'resuming a module build') + "resuming a module build" + ) - log.debug('Resuming existing module build %r' % module) + log.debug("Resuming existing module build %r" % module) # Reset all component builds that didn't complete for component in module.component_builds: - if component.state and component.state != koji.BUILD_STATES['COMPLETE']: + if component.state and component.state != koji.BUILD_STATES["COMPLETE"]: component.state = None component.state_reason = None db.session.add(component) module.username = username prev_state = module.previous_non_failed_state - if prev_state == models.BUILD_STATES['init']: - transition_to = models.BUILD_STATES['init'] + if prev_state == models.BUILD_STATES["init"]: + transition_to = models.BUILD_STATES["init"] else: - transition_to = models.BUILD_STATES['wait'] + transition_to = models.BUILD_STATES["wait"] module.batch = 0 module.transition(conf, transition_to, "Resubmitted by %s" % username) log.info("Resumed existing module build in previous state %s" % module.state) else: # make NSVC unique for every scratch build - context_suffix = '' - if params.get('scratch', False): - log.debug('Checking for existing scratch module builds by NSVC') + context_suffix = "" + if params.get("scratch", False): + log.debug("Checking for existing scratch module builds by NSVC") scrmods = models.ModuleBuild.get_scratch_builds_from_nsvc( db.session, mmd.get_name(), mmd.get_stream(), version_str, mmd.get_context()) scrmod_contexts = [scrmod.context for scrmod in scrmods] - log.debug('Found %d previous scratch module build context(s): %s', - len(scrmods), ",".join(scrmod_contexts)) + log.debug( + "Found %d previous scratch module build context(s): %s", + len(scrmods), ",".join(scrmod_contexts), + ) # append incrementing counter to context - context_suffix = '_' + str(len(scrmods) + 1) + context_suffix = "_" + str(len(scrmods) + 1) mmd.set_context(mmd.get_context() + context_suffix) - log.debug('Creating new module build') + log.debug("Creating new module build") module = models.ModuleBuild.create( db.session, conf, @@ -775,27 +802,35 @@ def submit_module_build(username, mmd, params): stream=mmd.get_stream(), version=version_str, modulemd=to_text_type(mmd.dumps()), - scmurl=params.get('scmurl'), + scmurl=params.get("scmurl"), username=username, - rebuild_strategy=params.get('rebuild_strategy'), - scratch=params.get('scratch'), - srpms=params.get('srpms') + rebuild_strategy=params.get("rebuild_strategy"), + scratch=params.get("scratch"), + srpms=params.get("srpms"), ) - (module.ref_build_context, module.build_context, module.runtime_context, - module.context) = module.contexts_from_mmd(module.modulemd) + ( + module.ref_build_context, + module.build_context, + module.runtime_context, + module.context, + ) = module.contexts_from_mmd(module.modulemd) module.context += context_suffix all_modules_skipped = False db.session.add(module) db.session.commit() modules.append(module) - log.info("%s submitted build of %s, stream=%s, version=%s, context=%s", username, - mmd.get_name(), mmd.get_stream(), version_str, mmd.get_context()) + log.info( + "%s submitted build of %s, stream=%s, version=%s, context=%s", + username, mmd.get_name(), mmd.get_stream(), version_str, mmd.get_context() + ) if all_modules_skipped: - err_msg = ('Module (state=%s) already exists. Only a new build, resubmission of ' - 'a failed build or build against new buildrequirements is ' - 'allowed.' % module.state) + err_msg = ( + "Module (state=%s) already exists. Only a new build, resubmission of " + "a failed build or build against new buildrequirements is " + "allowed." % module.state + ) log.error(err_msg) raise Conflict(err_msg) @@ -805,25 +840,24 @@ def submit_module_build(username, mmd, params): def _is_eol_in_pdc(name, stream): """ Check PDC if the module name:stream is no longer active. """ - params = {'type': 'module', 'global_component': name, 'name': stream} - url = conf.pdc_url + '/component-branches/' + params = {"type": "module", "global_component": name, "name": stream} + url = conf.pdc_url + "/component-branches/" response = requests.get(url, params=params) if not response: raise ValidationError("Failed to talk to PDC {}{}".format(response, response.text)) data = response.json() - results = data['results'] + results = data["results"] if not results: - raise ValidationError("No such module {}:{} found at {}".format( - name, stream, response.request.url)) + raise ValidationError( + "No such module {}:{} found at {}".format(name, stream, response.request.url)) # If the module is active, then it is not EOL and vice versa. - return not results[0]['active'] + return not results[0]["active"] -def _fetch_mmd(url, branch=None, allow_local_url=False, whitelist_url=False, - mandatory_checks=True): +def _fetch_mmd(url, branch=None, allow_local_url=False, whitelist_url=False, mandatory_checks=True): # Import it here, because SCM uses utils methods # and fails to import them because of dep-chain. import module_build_service.scm @@ -831,7 +865,7 @@ def _fetch_mmd(url, branch=None, allow_local_url=False, whitelist_url=False, td = None scm = None try: - log.debug('Verifying modulemd') + log.debug("Verifying modulemd") td = tempfile.mkdtemp() if whitelist_url: scm = module_build_service.scm.SCM(url, branch, [url], allow_local_url) @@ -847,14 +881,12 @@ def _fetch_mmd(url, branch=None, allow_local_url=False, whitelist_url=False, if td is not None: shutil.rmtree(td) except Exception as e: - log.warning( - "Failed to remove temporary directory {!r}: {}".format( - td, str(e))) + log.warning("Failed to remove temporary directory {!r}: {}".format(td, str(e))) if conf.check_for_eol: if _is_eol_in_pdc(scm.name, scm.branch): raise ValidationError( - 'Module {}:{} is marked as EOL in PDC.'.format(scm.name, scm.branch)) + "Module {}:{} is marked as EOL in PDC.".format(scm.name, scm.branch)) if not mandatory_checks: return mmd, scm @@ -863,8 +895,8 @@ def _fetch_mmd(url, branch=None, allow_local_url=False, whitelist_url=False, # says it should be if mmd.get_name() and mmd.get_name() != scm.name: if not conf.allow_name_override_from_scm: - raise ValidationError('The name "{0}" that is stored in the modulemd ' - 'is not valid'.format(mmd.get_name())) + raise ValidationError( + 'The name "{0}" that is stored in the modulemd is not valid'.format(mmd.get_name())) else: mmd.set_name(scm.name) @@ -872,19 +904,20 @@ def _fetch_mmd(url, branch=None, allow_local_url=False, whitelist_url=False, # branch is if mmd.get_stream() and mmd.get_stream() != scm.branch: if not conf.allow_stream_override_from_scm: - raise ValidationError('The stream "{0}" that is stored in the modulemd ' - 'does not match the branch "{1}"'.format( - mmd.get_stream(), scm.branch)) + raise ValidationError( + 'The stream "{0}" that is stored in the modulemd does not match the branch "{1}"' + .format(mmd.get_stream(), scm.branch) + ) else: mmd.set_stream(scm.branch) # If the version is in the modulemd, throw an exception since the version # since the version is generated by MBS if mmd.get_version(): - raise ValidationError('The version "{0}" is already defined in the ' - 'modulemd but it shouldn\'t be since the version ' - 'is generated based on the commit time'.format( - mmd.get_version())) + raise ValidationError( + 'The version "{0}" is already defined in the modulemd but it shouldn\'t be since the ' + "version is generated based on the commit time".format(mmd.get_version()) + ) else: mmd.set_version(int(scm.version)) @@ -901,18 +934,17 @@ def load_mmd(yaml, is_file=False): mmd.upgrade() except Exception: if is_file: - error = 'The modulemd {} is invalid. Please verify the syntax is correct'.format( - os.path.basename(yaml) - ) + error = "The modulemd {} is invalid. Please verify the syntax is correct".format( + os.path.basename(yaml)) if os.path.exists(yaml): - with open(yaml, 'rt') as yaml_hdl: - log.debug('Modulemd content:\n%s', yaml_hdl.read()) + with open(yaml, "rt") as yaml_hdl: + log.debug("Modulemd content:\n%s", yaml_hdl.read()) else: - error = 'The modulemd file {} not found!'.format(os.path.basename(yaml)) - log.error('The modulemd file %s not found!', yaml) + error = "The modulemd file {} not found!".format(os.path.basename(yaml)) + log.error("The modulemd file %s not found!", yaml) else: - error = 'The modulemd is invalid. Please verify the syntax is correct.' - log.debug('Modulemd content:\n%s', yaml) + error = "The modulemd is invalid. Please verify the syntax is correct." + log.debug("Modulemd content:\n%s", yaml) log.exception(error) raise UnprocessableEntity(error) @@ -943,7 +975,7 @@ def load_local_builds(local_build_nsvs, session=None): builds = [] try: for d in os.listdir(conf.mock_resultsdir): - m = re.match('^module-(.*)-([^-]*)-([0-9]+)$', d) + m = re.match("^module-(.*)-([^-]*)-([0-9]+)$", d) if m: builds.append((m.group(1), m.group(2), int(m.group(3)), d)) except OSError: @@ -958,11 +990,12 @@ def load_local_builds(local_build_nsvs, session=None): builds.sort(key=lambda a: a[2], reverse=True) for nsv in local_build_nsvs: - parts = nsv.split(':') + parts = nsv.split(":") if len(parts) < 1 or len(parts) > 3: raise RuntimeError( - 'The local build "{0}" couldn\'t be be parsed into ' - 'NAME[:STREAM[:VERSION]]'.format(nsv)) + 'The local build "{0}" couldn\'t be be parsed into NAME[:STREAM[:VERSION]]' + .format(nsv) + ) name = parts[0] stream = parts[1] if len(parts) > 1 else None @@ -983,11 +1016,12 @@ def load_local_builds(local_build_nsvs, session=None): if not found_build: raise RuntimeError( 'The local build "{0}" couldn\'t be found in "{1}"'.format( - nsv, conf.mock_resultsdir)) + nsv, conf.mock_resultsdir) + ) # Load the modulemd metadata. - path = os.path.join(conf.mock_resultsdir, found_build[3], 'results') - mmd = load_mmd(os.path.join(path, 'modules.yaml'), is_file=True) + path = os.path.join(conf.mock_resultsdir, found_build[3], "results") + mmd = load_mmd(os.path.join(path, "modules.yaml"), is_file=True) # Create ModuleBuild in database. module = models.ModuleBuild.create( @@ -1000,14 +1034,19 @@ def load_local_builds(local_build_nsvs, session=None): modulemd=to_text_type(mmd.dumps()), scmurl="", username="mbs", - publish_msg=False) + publish_msg=False, + ) module.koji_tag = path - module.state = models.BUILD_STATES['ready'] + module.state = models.BUILD_STATES["ready"] session.commit() - if (found_build[0] != module.name or found_build[1] != module.stream or - str(found_build[2]) != module.version): + if ( + found_build[0] != module.name + or found_build[1] != module.stream + or str(found_build[2]) != module.version + ): raise RuntimeError( - 'Parsed metadata results for "{0}" don\'t match the directory name' - .format(found_build[3])) + 'Parsed metadata results for "{0}" don\'t match the directory name'.format( + found_build[3]) + ) log.info("Loaded local module build %r", module) diff --git a/module_build_service/utils/ursine.py b/module_build_service/utils/ursine.py index d9a60157..da23131a 100644 --- a/module_build_service/utils/ursine.py +++ b/module_build_service/utils/ursine.py @@ -63,22 +63,25 @@ def find_build_tags_from_external_repos(koji_session, repo_infos): :return: a list of tag names. :rtype: list[str] """ - re_external_repo_url = r'^{}/repos/(.+-build)/latest/\$arch/?$'.format( - conf.koji_external_repo_url_prefix.rstrip('/')) + re_external_repo_url = r"^{}/repos/(.+-build)/latest/\$arch/?$".format( + conf.koji_external_repo_url_prefix.rstrip("/")) tag_names = [] for info in repo_infos: - match = re.match(re_external_repo_url, info['url']) + match = re.match(re_external_repo_url, info["url"]) if match: name = match.groups()[0] if koji_session.getTag(name) is None: - log.warning('Ignoring the found tag %s because no tag info was found ' - 'with this name.', name) + log.warning( + "Ignoring the found tag %s because no tag info was found with this name.", + name, + ) else: tag_names.append(name) else: - log.warning('The build tag could not be parsed from external repo ' - '%s whose url is %s.', - info['external_repo_name'], info['url']) + log.warning( + "The build tag could not be parsed from external repo %s whose url is %s.", + info["external_repo_name"], info["url"], + ) return tag_names @@ -98,8 +101,9 @@ def find_module_koji_tags(koji_session, build_tag): :rtype: list[str] """ return [ - data['name'] for data in koji_session.getFullInheritance(build_tag) - if any(data['name'].startswith(prefix) for prefix in conf.koji_tag_prefixes) + data["name"] + for data in koji_session.getFullInheritance(build_tag) + if any(data["name"].startswith(prefix) for prefix in conf.koji_tag_prefixes) ] @@ -126,11 +130,12 @@ def get_modulemds_from_ursine_content(tag): :rtype: list[Modulemd.Module] """ from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder + koji_session = KojiModuleBuilder.get_session(conf, login=False) repos = koji_session.getExternalRepoList(tag) build_tags = find_build_tags_from_external_repos(koji_session, repos) if not build_tags: - log.debug('No external repo containing ursine content is found.') + log.debug("No external repo containing ursine content is found.") return [] modulemds = [] for tag in build_tags: @@ -140,7 +145,7 @@ def get_modulemds_from_ursine_content(tag): if md: modulemds.append(md) else: - log.warning('No module is found by koji_tag \'%s\'', koji_tag) + log.warning("No module is found by koji_tag '%s'", koji_tag) return modulemds @@ -161,7 +166,7 @@ def find_stream_collision_modules(buildrequired_modules, koji_tag): """ ursine_modulemds = get_modulemds_from_ursine_content(koji_tag) if not ursine_modulemds: - log.debug('No module metadata is found from ursine content.') + log.debug("No module metadata is found from ursine content.") return [] collision_modules = [ @@ -171,15 +176,19 @@ def find_stream_collision_modules(buildrequired_modules, koji_tag): # different stream, that is what we want to record here, whose RPMs will be # excluded from buildroot by adding them into SRPM module-build-macros as # Conflicts. - if (item.get_name() in buildrequired_modules and - item.get_stream() != buildrequired_modules[item.get_name()]['stream']) + if ( + item.get_name() in buildrequired_modules + and item.get_stream() != buildrequired_modules[item.get_name()]["stream"] + ) ] for item in collision_modules: - name, stream, _ = item.split(':', 2) - log.info('Buildrequired module %s exists in ursine content with ' - 'different stream %s, whose RPMs will be excluded.', - name, stream) + name, stream, _ = item.split(":", 2) + log.info( + "Buildrequired module %s exists in ursine content with " + "different stream %s, whose RPMs will be excluded.", + name, stream, + ) return collision_modules @@ -206,17 +215,18 @@ def handle_stream_collision_modules(mmd): :param mmd: a module's metadata which will be built. :type mmd: Modulemd.Module """ - log.info('Start to find out stream collision modules.') + log.info("Start to find out stream collision modules.") unpacked_xmd = glib.from_variant_dict(mmd.get_xmd()) - buildrequires = unpacked_xmd['mbs']['buildrequires'] + buildrequires = unpacked_xmd["mbs"]["buildrequires"] for module_name in conf.base_module_names: base_module_info = buildrequires.get(module_name) if base_module_info is None: log.info( - 'Base module %s is not a buildrequire of module %s. ' - 'Skip handling module stream collision for this base module.', - module_name, mmd.get_name()) + "Base module %s is not a buildrequire of module %s. " + "Skip handling module stream collision for this base module.", + module_name, mmd.get_name(), + ) continue # Module stream collision is handled only for newly created module @@ -225,27 +235,26 @@ def handle_stream_collision_modules(mmd): # base module. # Just check the existence, and following code ensures this key exists # even if no stream collision module is found. - if ('stream_collision_modules' in base_module_info and - 'ursine_rpms' in base_module_info): - log.debug('Base module %s has stream collision modules and ursine ' - 'rpms. Skip to handle stream collision again for it.', - module_name) + if "stream_collision_modules" in base_module_info and "ursine_rpms" in base_module_info: + log.debug( + "Base module %s has stream collision modules and ursine " + "rpms. Skip to handle stream collision again for it.", + module_name, + ) continue - modules_nsvc = find_stream_collision_modules( - buildrequires, base_module_info['koji_tag']) + modules_nsvc = find_stream_collision_modules(buildrequires, base_module_info["koji_tag"]) if modules_nsvc: # Save modules NSVC for later use in subsequent event handlers to # log readable messages. - base_module_info['stream_collision_modules'] = modules_nsvc - base_module_info['ursine_rpms'] = find_module_built_rpms(modules_nsvc) + base_module_info["stream_collision_modules"] = modules_nsvc + base_module_info["ursine_rpms"] = find_module_built_rpms(modules_nsvc) else: - log.info('No stream collision module is found against base module %s.', - module_name) + log.info("No stream collision module is found against base module %s.", module_name) # Always set in order to mark it as handled already. - base_module_info['stream_collision_modules'] = None - base_module_info['ursine_rpms'] = None + base_module_info["stream_collision_modules"] = None + base_module_info["ursine_rpms"] = None mmd.set_xmd(glib.dict_values(unpacked_xmd)) @@ -262,18 +271,17 @@ def find_module_built_rpms(modules_nsvc): import kobo.rpmlib from module_build_service.resolver import GenericResolver from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder + resolver = GenericResolver.create(conf) built_rpms = [] koji_session = KojiModuleBuilder.get_session(conf, login=False) for nsvc in modules_nsvc: - name, stream, version, context = nsvc.split(':') + name, stream, version, context = nsvc.split(":") module = resolver._get_module(name, stream, version, context, strict=True) - rpms = koji_session.listTaggedRPMS(module['koji_tag'], latest=True)[0] - built_rpms.extend( - kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms - ) + rpms = koji_session.listTaggedRPMS(module["koji_tag"], latest=True)[0] + built_rpms.extend(kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms) # In case there is duplicate NEVRs, ensure every NEVR is unique in the final list. # And, sometimes, sorted list of RPMs would be easier to read. diff --git a/module_build_service/utils/views.py b/module_build_service/utils/views.py index 86f7452e..8c8faf89 100644 --- a/module_build_service/utils/views.py +++ b/module_build_service/utils/views.py @@ -41,10 +41,11 @@ def get_scm_url_re(): """ Returns a regular expression for SCM URL extraction and validation. """ - schemes_re = '|'.join(map(re.escape, scm_url_schemes(terse=True))) + schemes_re = "|".join(map(re.escape, scm_url_schemes(terse=True))) regex = ( r"(?P(?P(?:" + schemes_re + r"))://(?P[^/]+)?" - r"(?P/[^\?]+))(?:\?(?P[^#]+)?)?#(?P.+)") + r"(?P/[^\?]+))(?:\?(?P[^#]+)?)?#(?P.+)" + ) return re.compile(regex) @@ -62,35 +63,56 @@ def pagination_metadata(p_query, api_version, request_args): # Remove pagination related args because those are handled elsewhere # Also, remove any args that url_for accepts in case the user entered # those in - for key in ['page', 'per_page', 'endpoint']: + for key in ["page", "per_page", "endpoint"]: if key in request_args_wo_page: request_args_wo_page.pop(key) for key in request_args: - if key.startswith('_'): + if key.startswith("_"): request_args_wo_page.pop(key) pagination_data = { - 'page': p_query.page, - 'pages': p_query.pages, - 'per_page': p_query.per_page, - 'prev': None, - 'next': None, - 'total': p_query.total, - 'first': url_for(request.endpoint, api_version=api_version, page=1, - per_page=p_query.per_page, _external=True, **request_args_wo_page), - 'last': url_for(request.endpoint, api_version=api_version, page=p_query.pages, - per_page=p_query.per_page, _external=True, - **request_args_wo_page) + "page": p_query.page, + "pages": p_query.pages, + "per_page": p_query.per_page, + "prev": None, + "next": None, + "total": p_query.total, + "first": url_for( + request.endpoint, + api_version=api_version, + page=1, + per_page=p_query.per_page, + _external=True, + **request_args_wo_page + ), + "last": url_for( + request.endpoint, + api_version=api_version, + page=p_query.pages, + per_page=p_query.per_page, + _external=True, + **request_args_wo_page + ), } if p_query.has_prev: - pagination_data['prev'] = url_for(request.endpoint, api_version=api_version, - page=p_query.prev_num, per_page=p_query.per_page, - _external=True, **request_args_wo_page) + pagination_data["prev"] = url_for( + request.endpoint, + api_version=api_version, + page=p_query.prev_num, + per_page=p_query.per_page, + _external=True, + **request_args_wo_page + ) if p_query.has_next: - pagination_data['next'] = url_for(request.endpoint, api_version=api_version, - page=p_query.next_num, per_page=p_query.per_page, - _external=True, **request_args_wo_page) + pagination_data["next"] = url_for( + request.endpoint, + api_version=api_version, + page=p_query.next_num, + per_page=p_query.per_page, + _external=True, + **request_args_wo_page + ) return pagination_data @@ -104,14 +126,14 @@ def _add_order_by_clause(flask_request, query, column_source): :param column_source: a SQLAlchemy database model :return: a SQLAlchemy query object """ - order_by = flask_request.args.getlist('order_by') - order_desc_by = flask_request.args.getlist('order_desc_by') + order_by = flask_request.args.getlist("order_by") + order_desc_by = flask_request.args.getlist("order_desc_by") # Default to ordering by ID in descending order descending = True - requested_order = ['id'] + requested_order = ["id"] if order_by and order_desc_by: - raise ValidationError('You may not specify both order_by and order_desc_by') + raise ValidationError("You may not specify both order_by and order_desc_by") elif order_by: descending = False requested_order = order_by @@ -127,7 +149,7 @@ def _add_order_by_clause(flask_request, query, column_source): 'An invalid ordering key of "{}" was supplied'.format(column_name)) column = column_dict[column_name] # If the version column is provided, cast it as an integer so the sorting is correct - if column_name == 'version': + if column_name == "version": column = sqlalchemy.cast(column, sqlalchemy.BigInteger) if descending: column = column.desc() @@ -155,7 +177,7 @@ def filter_component_builds(flask_request): search_query = dict() for key in request.args.keys(): # Search by state will be handled separately - if key == 'state': + if key == "state": continue # Only filter on valid database columns if key in models.ComponentBuild.__table__.columns.keys(): @@ -165,7 +187,7 @@ def filter_component_builds(flask_request): search_query[key] = flask_request.args[key] # Multiple states can be supplied => or-ing will take place - states = flask_request.args.getlist('state') + states = flask_request.args.getlist("state") search_states = [] for state in states: if state.isdigit(): @@ -174,16 +196,16 @@ def filter_component_builds(flask_request): try: import koji except ImportError: - raise ValidationError('Cannot filter by state names because koji isn\'t installed') + raise ValidationError("Cannot filter by state names because koji isn't installed") if state.upper() in koji.BUILD_STATES: search_states.append(koji.BUILD_STATES[state.upper()]) else: - raise ValidationError('Invalid state was supplied: %s' % state) + raise ValidationError("Invalid state was supplied: %s" % state) # Allow the user to specify the module build ID with a more intuitive key name - if 'module_build' in flask_request.args: - search_query['module_id'] = flask_request.args['module_build'] + if "module_build" in flask_request.args: + search_query["module_id"] = flask_request.args["module_build"] query = models.ComponentBuild.query @@ -194,8 +216,8 @@ def filter_component_builds(flask_request): query = _add_order_by_clause(flask_request, query, models.ComponentBuild) - page = flask_request.args.get('page', 1, type=int) - per_page = flask_request.args.get('per_page', 10, type=int) + page = flask_request.args.get("page", 1, type=int) + per_page = flask_request.args.get("per_page", 10, type=int) return query.paginate(page, per_page, False) @@ -207,8 +229,13 @@ def filter_module_builds(flask_request): """ search_query = dict() special_columns = set(( - 'time_submitted', 'time_modified', 'time_completed', 'state', 'stream_version_lte', - 'virtual_stream',)) + "time_submitted", + "time_modified", + "time_completed", + "state", + "stream_version_lte", + "virtual_stream", + )) columns = models.ModuleBuild.__table__.columns.keys() for key in set(request.args.keys()) - special_columns: # Only filter on valid database columns but skip columns that are treated specially or @@ -217,7 +244,7 @@ def filter_module_builds(flask_request): search_query[key] = flask_request.args[key] # Multiple states can be supplied => or-ing will take place - states = flask_request.args.getlist('state') + states = flask_request.args.getlist("state") search_states = [] for state in states: if state.isdigit(): @@ -226,22 +253,23 @@ def filter_module_builds(flask_request): if state in models.BUILD_STATES: search_states.append(models.BUILD_STATES[state]) else: - raise ValidationError('Invalid state was supplied: %s' % state) + raise ValidationError("Invalid state was supplied: %s" % state) - nsvc = flask_request.args.get('nsvc', None) + nsvc = flask_request.args.get("nsvc", None) if nsvc: nsvc_parts = nsvc.split(":") query_keys = ["name", "stream", "version", "context"] for key, part in zip(query_keys, nsvc_parts): search_query[key] = part - rpm = flask_request.args.get('rpm', None) + rpm = flask_request.args.get("rpm", None) koji_tags = [] if rpm: if conf.system == "koji": # we are importing the koji builder here so we can search for the rpm metadata # from koji. If we imported this regulary we would have gotten a circular import error. - from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder # noqa + from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder # noqa + koji_tags = KojiModuleBuilder.get_rpm_module_tag(rpm) else: raise ValidationError("Configured builder does not allow to search by rpm binary name!") @@ -257,41 +285,43 @@ def filter_module_builds(flask_request): # This is used when filtering the date request parameters, but it is here to avoid recompiling utc_iso_datetime_regex = re.compile( - r'^(?P\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})(?:\.\d+)?' - r'(?:Z|[-+]00(?::00)?)?$') + r"^(?P\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})(?:\.\d+)?(?:Z|[-+]00(?::00)?)?$") # Filter the query based on date request parameters - for item in ('submitted', 'modified', 'completed'): - for context in ('before', 'after'): - request_arg = '%s_%s' % (item, context) # i.e. submitted_before + for item in ("submitted", "modified", "completed"): + for context in ("before", "after"): + request_arg = "%s_%s" % (item, context) # i.e. submitted_before iso_datetime_arg = request.args.get(request_arg, None) if iso_datetime_arg: iso_datetime_matches = re.match(utc_iso_datetime_regex, iso_datetime_arg) - if not iso_datetime_matches or not iso_datetime_matches.group('datetime'): - raise ValidationError(('An invalid Zulu ISO 8601 timestamp was provided' - ' for the "%s" parameter') - % request_arg) + if not iso_datetime_matches or not iso_datetime_matches.group("datetime"): + raise ValidationError( + 'An invalid Zulu ISO 8601 timestamp was provided for the "%s" parameter' + % request_arg + ) # Converts the ISO 8601 string to a datetime object for SQLAlchemy to use to filter - item_datetime = datetime.strptime(iso_datetime_matches.group('datetime'), - '%Y-%m-%dT%H:%M:%S') + item_datetime = datetime.strptime( + iso_datetime_matches.group("datetime"), "%Y-%m-%dT%H:%M:%S") # Get the database column to filter against - column = getattr(models.ModuleBuild, 'time_' + item) + column = getattr(models.ModuleBuild, "time_" + item) - if context == 'after': + if context == "after": query = query.filter(column >= item_datetime) - elif context == 'before': + elif context == "before": query = query.filter(column <= item_datetime) # Multiple virtual_streams can be supplied for "or" logic filtering - virtual_streams = flask_request.args.getlist('virtual_stream') + virtual_streams = flask_request.args.getlist("virtual_stream") query = models.ModuleBuild._add_virtual_streams_filter(db.session, query, virtual_streams) - stream_version_lte = flask_request.args.get('stream_version_lte') + stream_version_lte = flask_request.args.get("stream_version_lte") if stream_version_lte is not None: - invalid_error = ('An invalid value of stream_version_lte was provided. It must be an ' - 'integer greater than or equal to 10000.') + invalid_error = ( + "An invalid value of stream_version_lte was provided. It must be an " + "integer greater than or equal to 10000." + ) try: stream_version_lte = int(stream_version_lte) except (TypeError, ValueError): @@ -305,19 +335,27 @@ def filter_module_builds(flask_request): br_joined = False module_br_alias = None - for item in ('base_module_br', 'name', 'stream', 'version', 'context', 'stream_version', - 'stream_version_lte', 'stream_version_gte'): - if item == 'base_module_br': + for item in ( + "base_module_br", + "name", + "stream", + "version", + "context", + "stream_version", + "stream_version_lte", + "stream_version_gte", + ): + if item == "base_module_br": request_arg_name = item else: - request_arg_name = 'base_module_br_{}'.format(item) + request_arg_name = "base_module_br_{}".format(item) request_arg = flask_request.args.get(request_arg_name) if not request_arg: continue if not br_joined: - module_br_alias = aliased(models.ModuleBuild, name='module_br') + module_br_alias = aliased(models.ModuleBuild, name="module_br") # Shorten this table name for clarity in the query below mb_to_br = models.module_builds_to_module_buildrequires # The following joins get added: @@ -325,14 +363,13 @@ def filter_module_builds(flask_request): # ON module_builds_to_module_buildrequires.module_id = module_builds.id # JOIN module_builds AS module_br # ON module_builds_to_module_buildrequires.module_buildrequire_id = module_br.id - query = query.join(mb_to_br, mb_to_br.c.module_id == models.ModuleBuild.id)\ - .join(module_br_alias, - mb_to_br.c.module_buildrequire_id == module_br_alias.id) + query = query.join(mb_to_br, mb_to_br.c.module_id == models.ModuleBuild.id).join( + module_br_alias, mb_to_br.c.module_buildrequire_id == module_br_alias.id) br_joined = True - if item == 'base_module_br': + if item == "base_module_br": try: - name, stream, version, context = flask_request.args['base_module_br'].split(':') + name, stream, version, context = flask_request.args["base_module_br"].split(":") except ValueError: raise ValidationError( 'The filter argument for "base_module_br" must be in the format of N:S:V:C') @@ -340,12 +377,12 @@ def filter_module_builds(flask_request): module_br_alias.name == name, module_br_alias.stream == stream, module_br_alias.version == version, - module_br_alias.context == context + module_br_alias.context == context, ) - elif item.endswith('_lte'): + elif item.endswith("_lte"): column = getattr(module_br_alias, item[:-4]) query = query.filter(column <= request_arg) - elif item.endswith('_gte'): + elif item.endswith("_gte"): column = getattr(module_br_alias, item[:-4]) query = query.filter(column >= request_arg) else: @@ -354,17 +391,18 @@ def filter_module_builds(flask_request): query = _add_order_by_clause(flask_request, query, models.ModuleBuild) - page = flask_request.args.get('page', 1, type=int) - per_page = flask_request.args.get('per_page', 10, type=int) + page = flask_request.args.get("page", 1, type=int) + per_page = flask_request.args.get("per_page", 10, type=int) return query.paginate(page, per_page, False) -def cors_header(allow='*'): +def cors_header(allow="*"): """ A decorator that sets the Access-Control-Allow-Origin header to the desired value on a Flask route :param allow: a string of the domain to allow. This defaults to '*'. """ + def decorator(func): @wraps(func) def wrapper(*args, **kwargs): @@ -377,9 +415,11 @@ def cors_header(allow='*'): response = rv # Make sure we are dealing with a Flask Response object if isinstance(response, Response): - response.headers.add('Access-Control-Allow-Origin', allow) + response.headers.add("Access-Control-Allow-Origin", allow) return rv + return wrapper + return decorator @@ -387,12 +427,15 @@ def validate_api_version(): """ A decorator that validates the requested API version on a route """ + def decorator(func): @wraps(func) def wrapper(*args, **kwargs): - req_api_version = kwargs.get('api_version', 1) + req_api_version = kwargs.get("api_version", 1) if req_api_version > api_version or req_api_version < 1: - raise NotFound('The requested API version is not available') + raise NotFound("The requested API version is not available") return func(*args, **kwargs) + return wrapper + return decorator diff --git a/module_build_service/views.py b/module_build_service/views.py index ac82908b..a40f7d26 100644 --- a/module_build_service/views.py +++ b/module_build_service/views.py @@ -35,67 +35,56 @@ from io import BytesIO from module_build_service import app, conf, log, models, db, version, api_version as max_api_version from module_build_service.utils import ( - pagination_metadata, filter_module_builds, filter_component_builds, - submit_module_build_from_scm, submit_module_build_from_yaml, - get_scm_url_re, cors_header, validate_api_version, import_mmd, - get_mmd_from_scm, str_to_bool) -from module_build_service.errors import ( - ValidationError, Forbidden, NotFound, ProgrammingError) + pagination_metadata, + filter_module_builds, + filter_component_builds, + submit_module_build_from_scm, + submit_module_build_from_yaml, + get_scm_url_re, + cors_header, + validate_api_version, + import_mmd, + get_mmd_from_scm, + str_to_bool, +) +from module_build_service.errors import ValidationError, Forbidden, NotFound, ProgrammingError from module_build_service.backports import jsonify from module_build_service.monitor import monitor_api api_routes = { - 'module_builds': { - 'url': '/module-build-service//module-builds/', - 'options': { - 'methods': ['POST'], - } + "module_builds": { + "url": "/module-build-service//module-builds/", + "options": {"methods": ["POST"]}, }, - 'module_builds_list': { - 'url': '/module-build-service//module-builds/', - 'options': { - 'defaults': {'id': None}, - 'methods': ['GET'], - } + "module_builds_list": { + "url": "/module-build-service//module-builds/", + "options": {"defaults": {"id": None}, "methods": ["GET"]}, }, - 'module_build': { - 'url': '/module-build-service//module-builds/', - 'options': { - 'methods': ['GET', 'PATCH'], - } + "module_build": { + "url": "/module-build-service//module-builds/", + "options": {"methods": ["GET", "PATCH"]}, }, - 'component_builds_list': { - 'url': '/module-build-service//component-builds/', - 'options': { - 'defaults': {'id': None}, - 'methods': ['GET'], - } + "component_builds_list": { + "url": "/module-build-service//component-builds/", + "options": {"defaults": {"id": None}, "methods": ["GET"]}, }, - 'component_build': { - 'url': '/module-build-service//component-builds/', - 'options': { - 'methods': ['GET'], - } + "component_build": { + "url": "/module-build-service//component-builds/", + "options": {"methods": ["GET"]}, }, - 'about': { - 'url': '/module-build-service//about/', - 'options': { - 'methods': ['GET'] - } + "about": { + "url": "/module-build-service//about/", + "options": {"methods": ["GET"]}, }, - 'rebuild_strategies_list': { - 'url': '/module-build-service//rebuild-strategies/', - 'options': { - 'methods': ['GET'] - } + "rebuild_strategies_list": { + "url": "/module-build-service//rebuild-strategies/", + "options": {"methods": ["GET"]}, + }, + "import_module": { + "url": "/module-build-service//import-module/", + "options": {"methods": ["POST"]}, }, - 'import_module': { - 'url': '/module-build-service//import-module/', - 'options': { - 'methods': ['POST'], - } - } } @@ -105,59 +94,60 @@ class AbstractQueryableBuildAPI(MethodView): @cors_header() @validate_api_version() def get(self, api_version, id): - id_flag = request.args.get('id') + id_flag = request.args.get("id") if id_flag: - endpoint = request.endpoint.split('s_list')[0] + endpoint = request.endpoint.split("s_list")[0] raise ValidationError( 'The "id" query option is invalid. Did you mean to go to "{0}"?'.format( - url_for(endpoint, api_version=api_version, id=id_flag))) - verbose_flag = request.args.get('verbose', 'false').lower() - short_flag = request.args.get('short', 'false').lower() + url_for(endpoint, api_version=api_version, id=id_flag) + ) + ) + verbose_flag = request.args.get("verbose", "false").lower() + short_flag = request.args.get("short", "false").lower() json_func_kwargs = {} - json_func_name = 'json' + json_func_name = "json" if id is None: # Lists all tracked builds p_query = self.query_filter(request) - json_data = { - 'meta': pagination_metadata(p_query, api_version, request.args) - } + json_data = {"meta": pagination_metadata(p_query, api_version, request.args)} - if verbose_flag == 'true' or verbose_flag == '1': - json_func_name = 'extended_json' - json_func_kwargs['show_state_url'] = True - json_func_kwargs['api_version'] = api_version - elif short_flag == 'true' or short_flag == '1': - if hasattr(p_query.items[0], 'short_json'): - json_func_name = 'short_json' - json_data['items'] = [getattr(item, json_func_name)(**json_func_kwargs) - for item in p_query.items] + if verbose_flag == "true" or verbose_flag == "1": + json_func_name = "extended_json" + json_func_kwargs["show_state_url"] = True + json_func_kwargs["api_version"] = api_version + elif short_flag == "true" or short_flag == "1": + if hasattr(p_query.items[0], "short_json"): + json_func_name = "short_json" + json_data["items"] = [ + getattr(item, json_func_name)(**json_func_kwargs) for item in p_query.items + ] return jsonify(json_data), 200 else: # Lists details for the specified build instance = self.model.query.filter_by(id=id).first() if instance: - if verbose_flag == 'true' or verbose_flag == '1': - json_func_name = 'extended_json' - json_func_kwargs['show_state_url'] = True - json_func_kwargs['api_version'] = api_version - elif short_flag == 'true' or short_flag == '1': - if getattr(instance, 'short_json', None): - json_func_name = 'short_json' + if verbose_flag == "true" or verbose_flag == "1": + json_func_name = "extended_json" + json_func_kwargs["show_state_url"] = True + json_func_kwargs["api_version"] = api_version + elif short_flag == "true" or short_flag == "1": + if getattr(instance, "short_json", None): + json_func_name = "short_json" return jsonify(getattr(instance, json_func_name)(**json_func_kwargs)), 200 else: - raise NotFound('No such %s found.' % self.kind) + raise NotFound("No such %s found." % self.kind) class ComponentBuildAPI(AbstractQueryableBuildAPI): - kind = 'component' + kind = "component" query_filter = staticmethod(filter_component_builds) model = models.ComponentBuild class ModuleBuildAPI(AbstractQueryableBuildAPI): - kind = 'module' + kind = "module" query_filter = staticmethod(filter_module_builds) model = models.ModuleBuild @@ -167,8 +157,7 @@ class ModuleBuildAPI(AbstractQueryableBuildAPI): if username in conf.allowed_users: return if allowed_groups and not (allowed_groups & groups): - raise Forbidden("%s is not in any of %r, only %r" % ( - username, allowed_groups, groups)) + raise Forbidden("%s is not in any of %r, only %r" % (username, allowed_groups, groups)) # Additional POST and DELETE handlers for modules follow. @validate_api_version() @@ -200,13 +189,14 @@ class ModuleBuildAPI(AbstractQueryableBuildAPI): try: r = json.loads(request.get_data().decode("utf-8")) except Exception: - log.exception('Invalid JSON submitted') - raise ValidationError('Invalid JSON submitted') + log.exception("Invalid JSON submitted") + raise ValidationError("Invalid JSON submitted") if "owner" in r: if conf.no_auth is not True: - raise ValidationError(("The request contains 'owner' parameter," - " however NO_AUTH is not allowed")) + raise ValidationError( + "The request contains 'owner' parameter, however NO_AUTH is not allowed" + ) elif username == "anonymous": username = r["owner"] @@ -214,27 +204,23 @@ class ModuleBuildAPI(AbstractQueryableBuildAPI): module = models.ModuleBuild.query.filter_by(id=id).first() if not module: - raise NotFound('No such module found.') + raise NotFound("No such module found.") if module.owner != username and not (conf.admin_groups & groups): - raise Forbidden('You are not owner of this build and ' - 'therefore cannot modify it.') + raise Forbidden("You are not owner of this build and therefore cannot modify it.") - if not r.get('state'): - log.error('Invalid JSON submitted') - raise ValidationError('Invalid JSON submitted') + if not r.get("state"): + log.error("Invalid JSON submitted") + raise ValidationError("Invalid JSON submitted") - if module.state == models.BUILD_STATES['failed']: - raise Forbidden('You can\'t cancel a failed module') + if module.state == models.BUILD_STATES["failed"]: + raise Forbidden("You can't cancel a failed module") - if r['state'] == 'failed' \ - or r['state'] == str(models.BUILD_STATES['failed']): - module.transition(conf, models.BUILD_STATES["failed"], - "Canceled by %s." % username) + if r["state"] == "failed" or r["state"] == str(models.BUILD_STATES["failed"]): + module.transition(conf, models.BUILD_STATES["failed"], "Canceled by %s." % username) else: - log.error('The provided state change of "{}" is not supported' - .format(r['state'])) - raise ValidationError('The provided state change is not supported') + log.error('The provided state change of "{}" is not supported'.format(r["state"])) + raise ValidationError("The provided state change is not supported") db.session.add(module) db.session.commit() @@ -245,14 +231,13 @@ class AboutAPI(MethodView): @cors_header() @validate_api_version() def get(self, api_version): - json = {'version': version, 'api_version': max_api_version} - config_items = ['auth_method'] + json = {"version": version, "api_version": max_api_version} + config_items = ["auth_method"] for item in config_items: config_item = getattr(conf, item) # All config items have a default, so if doesn't exist it is a programming error if not config_item: - raise ProgrammingError( - 'An invalid config item of "{0}" was specified'.format(item)) + raise ProgrammingError('An invalid config item of "{0}" was specified'.format(item)) json[item] = config_item return jsonify(json), 200 @@ -268,37 +253,37 @@ class RebuildStrategies(MethodView): if strategy == conf.rebuild_strategy: default = True allowed = True - elif conf.rebuild_strategy_allow_override and \ - strategy in conf.rebuild_strategies_allowed: + elif ( + conf.rebuild_strategy_allow_override and strategy in conf.rebuild_strategies_allowed + ): allowed = True else: allowed = False items.append({ - 'name': strategy, - 'description': models.ModuleBuild.rebuild_strategies[strategy], - 'allowed': allowed, - 'default': default + "name": strategy, + "description": models.ModuleBuild.rebuild_strategies[strategy], + "allowed": allowed, + "default": default, }) - return jsonify({'items': items}), 200 + return jsonify({"items": items}), 200 class ImportModuleAPI(MethodView): - @validate_api_version() def post(self, api_version): # disable this API endpoint if no groups are defined if not conf.allowed_groups_to_import_module: log.error( "Import module API is disabled. Set 'ALLOWED_GROUPS_TO_IMPORT_MODULE'" - " configuration value first.") - raise Forbidden( - "Import module API is disabled.") + " configuration value first." + ) + raise Forbidden("Import module API is disabled.") # auth checks username, groups = module_build_service.auth.get_user(request) - ModuleBuildAPI.check_groups(username, groups, - allowed_groups=conf.allowed_groups_to_import_module) + ModuleBuildAPI.check_groups( + username, groups, allowed_groups=conf.allowed_groups_to_import_module) # process request using SCM handler handler = SCMHandler(request) @@ -306,8 +291,7 @@ class ImportModuleAPI(MethodView): mmd = get_mmd_from_scm(handler.data["scmurl"]) build, messages = import_mmd(db.session, mmd) - json_data = {"module": build.json(show_tasks=False), - "messages": messages} + json_data = {"module": build.json(show_tasks=False), "messages": messages} # return 201 Created if we reach this point return jsonify(json_data), 201 @@ -315,16 +299,16 @@ class ImportModuleAPI(MethodView): class BaseHandler(object): valid_params = set([ - 'branch', - 'buildrequire_overrides', - 'modulemd', - 'module_name', - 'owner', - 'rebuild_strategy', - 'require_overrides', - 'scmurl', - 'scratch', - 'srpms' + "branch", + "buildrequire_overrides", + "modulemd", + "module_name", + "owner", + "rebuild_strategy", + "require_overrides", + "scmurl", + "scratch", + "srpms", ]) def __init__(self, request, data=None): @@ -332,21 +316,21 @@ class BaseHandler(object): self.data = data or _dict_from_request(request) # canonicalize and validate scratch option - if 'scratch' in self.data and str_to_bool(str(self.data['scratch'])): - self.data['scratch'] = True + if "scratch" in self.data and str_to_bool(str(self.data["scratch"])): + self.data["scratch"] = True if conf.modules_allow_scratch is not True: - raise Forbidden('Scratch builds are not enabled') + raise Forbidden("Scratch builds are not enabled") else: - self.data['scratch'] = False + self.data["scratch"] = False # canonicalize and validate srpms list - if 'srpms' in self.data and self.data['srpms']: - if not self.data['scratch']: - raise Forbidden('srpms may only be specified for scratch builds') - if not isinstance(self.data['srpms'], list): - raise ValidationError('srpms must be specified as a list') + if "srpms" in self.data and self.data["srpms"]: + if not self.data["scratch"]: + raise Forbidden("srpms may only be specified for scratch builds") + if not isinstance(self.data["srpms"], list): + raise ValidationError("srpms must be specified as a list") else: - self.data['srpms'] = [] + self.data["srpms"] = [] def _validate_dep_overrides_format(self, key): """ @@ -357,8 +341,10 @@ class BaseHandler(object): """ if not self.data.get(key): return - invalid_override_msg = ('The "{}" parameter must be an object with the keys as module ' - 'names and the values as arrays of streams'.format(key)) + invalid_override_msg = ( + 'The "{}" parameter must be an object with the keys as module ' + "names and the values as arrays of streams".format(key) + ) if not isinstance(self.data[key], dict): raise ValidationError(invalid_override_msg) for streams in self.data[key].values(): @@ -371,33 +357,37 @@ class BaseHandler(object): def validate_optional_params(self): forbidden_params = [k for k in self.data if k not in self.valid_params] if forbidden_params: - raise ValidationError('The request contains unspecified parameters: {}' - .format(", ".join(forbidden_params))) + raise ValidationError( + "The request contains unspecified parameters: {}".format( + ", ".join(forbidden_params)) + ) if not conf.no_auth and "owner" in self.data: - raise ValidationError(("The request contains 'owner' parameter," - " however NO_AUTH is not allowed")) + raise ValidationError( + "The request contains 'owner' parameter, however NO_AUTH is not allowed") - if not conf.rebuild_strategy_allow_override and 'rebuild_strategy' in self.data: - raise ValidationError('The request contains the "rebuild_strategy" parameter but ' - 'overriding the default isn\'t allowed') + if not conf.rebuild_strategy_allow_override and "rebuild_strategy" in self.data: + raise ValidationError( + 'The request contains the "rebuild_strategy" parameter but ' + "overriding the default isn't allowed" + ) - if 'rebuild_strategy' in self.data: - if self.data['rebuild_strategy'] not in conf.rebuild_strategies_allowed: + if "rebuild_strategy" in self.data: + if self.data["rebuild_strategy"] not in conf.rebuild_strategies_allowed: raise ValidationError( - 'The rebuild method of "{0}" is not allowed. Choose from: {1}.' - .format(self.data['rebuild_strategy'], - ', '.join(conf.rebuild_strategies_allowed))) + 'The rebuild method of "{0}" is not allowed. Choose from: {1}.'.format( + self.data["rebuild_strategy"], ", ".join(conf.rebuild_strategies_allowed)) + ) - self._validate_dep_overrides_format('buildrequire_overrides') - self._validate_dep_overrides_format('require_overrides') + self._validate_dep_overrides_format("buildrequire_overrides") + self._validate_dep_overrides_format("require_overrides") class SCMHandler(BaseHandler): def validate(self, skip_branch=False, skip_optional_params=False): if "scmurl" not in self.data: - log.error('Missing scmurl') - raise ValidationError('Missing scmurl') + log.error("Missing scmurl") + raise ValidationError("Missing scmurl") url = self.data["scmurl"] allowed_prefix = any(url.startswith(prefix) for prefix in conf.scmurls) @@ -410,8 +400,8 @@ class SCMHandler(BaseHandler): raise Forbidden("The submitted scmurl %s is not valid" % url) if not skip_branch and "branch" not in self.data: - log.error('Missing branch') - raise ValidationError('Missing branch') + log.error("Missing branch") + raise ValidationError("Missing branch") if not skip_optional_params: self.validate_optional_params() @@ -423,14 +413,16 @@ class SCMHandler(BaseHandler): class YAMLFileHandler(BaseHandler): def __init__(self, request, data=None): super(YAMLFileHandler, self).__init__(request, data) - if not self.data['scratch'] and not conf.yaml_submit_allowed: + if not self.data["scratch"] and not conf.yaml_submit_allowed: raise Forbidden("YAML submission is not enabled") def validate(self): - if ("modulemd" not in self.data and - (not hasattr(request, "files") or "yaml" not in request.files)): - log.error('Invalid file submitted') - raise ValidationError('Invalid file submitted') + if ( + "modulemd" not in self.data + and (not hasattr(request, "files") or "yaml" not in request.files) + ): + log.error("Invalid file submitted") + raise ValidationError("Invalid file submitted") self.validate_optional_params() def post(self): @@ -450,44 +442,31 @@ def _dict_from_request(request): try: data = json.loads(request.get_data().decode("utf-8")) except Exception: - log.exception('Invalid JSON submitted') - raise ValidationError('Invalid JSON submitted') + log.exception("Invalid JSON submitted") + raise ValidationError("Invalid JSON submitted") return data def register_api(): """ Registers the MBS API. """ - module_view = ModuleBuildAPI.as_view('module_builds') - component_view = ComponentBuildAPI.as_view('component_builds') - about_view = AboutAPI.as_view('about') - rebuild_strategies_view = RebuildStrategies.as_view('rebuild_strategies') - import_module = ImportModuleAPI.as_view('import_module') + module_view = ModuleBuildAPI.as_view("module_builds") + component_view = ComponentBuildAPI.as_view("component_builds") + about_view = AboutAPI.as_view("about") + rebuild_strategies_view = RebuildStrategies.as_view("rebuild_strategies") + import_module = ImportModuleAPI.as_view("import_module") for key, val in api_routes.items(): - if key.startswith('component_build'): - app.add_url_rule(val['url'], - endpoint=key, - view_func=component_view, - **val['options']) - elif key.startswith('module_build'): - app.add_url_rule(val['url'], - endpoint=key, - view_func=module_view, - **val['options']) - elif key.startswith('about'): - app.add_url_rule(val['url'], - endpoint=key, - view_func=about_view, - **val['options']) - elif key == 'rebuild_strategies_list': - app.add_url_rule(val['url'], - endpoint=key, - view_func=rebuild_strategies_view, - **val['options']) - elif key == 'import_module': - app.add_url_rule(val['url'], - endpoint=key, - view_func=import_module, - **val['options']) + if key.startswith("component_build"): + app.add_url_rule(val["url"], endpoint=key, view_func=component_view, **val["options"]) + elif key.startswith("module_build"): + app.add_url_rule(val["url"], endpoint=key, view_func=module_view, **val["options"]) + elif key.startswith("about"): + app.add_url_rule(val["url"], endpoint=key, view_func=about_view, **val["options"]) + elif key == "rebuild_strategies_list": + app.add_url_rule( + val["url"], endpoint=key, view_func=rebuild_strategies_view, **val["options"] + ) + elif key == "import_module": + app.add_url_rule(val["url"], endpoint=key, view_func=import_module, **val["options"]) else: raise NotImplementedError("Unhandled api key.") diff --git a/setup.py b/setup.py index 9f72b485..736b2ff1 100644 --- a/setup.py +++ b/setup.py @@ -6,11 +6,11 @@ from setuptools import setup, find_packages def read_requirements(filename): specifiers = [] dep_links = [] - with open(filename, 'r') as f: + with open(filename, "r") as f: for line in f: - if line.startswith('-r') or line.strip() == '': + if line.startswith("-r") or line.strip() == "": continue - if line.startswith('git+'): + if line.startswith("git+"): dep_links.append(line.strip()) else: specifiers.append(line.strip()) @@ -18,56 +18,67 @@ def read_requirements(filename): setup_py_path = path.dirname(path.realpath(__file__)) -install_requires, deps_links = read_requirements(path.join(setup_py_path, 'requirements.txt')) -tests_require, _ = read_requirements(path.join(setup_py_path, 'test-requirements.txt')) +install_requires, deps_links = read_requirements(path.join(setup_py_path, "requirements.txt")) +tests_require, _ = read_requirements(path.join(setup_py_path, "test-requirements.txt")) -setup(name='module-build-service', - description='The Module Build Service for Modularity', - version='2.19.1', - classifiers=[ - "Programming Language :: Python", - "Topic :: Software Development :: Build Tools" - ], - keywords='module build service fedora modularity koji mock rpm', - author='The Factory 2.0 Team', - author_email='module-build-service-owner@fedoraproject.org', - url='https://pagure.io/fm-orchestrator/', - license='MIT', - packages=find_packages(), - include_package_data=True, - zip_safe=False, - install_requires=install_requires, - tests_require=tests_require, - dependency_links=deps_links, - entry_points={ - 'console_scripts': ['mbs-upgradedb = module_build_service.manage:upgradedb', - 'mbs-frontend = module_build_service.manage:run', - 'mbs-manager = module_build_service.manage:manager_wrapper'], - 'moksha.consumer': 'mbsconsumer = module_build_service.scheduler.consumer:MBSConsumer', - 'moksha.producer': 'mbspoller = module_build_service.scheduler.producer:MBSProducer', - 'mbs.messaging_backends': [ - 'fedmsg = module_build_service.messaging:_fedmsg_backend', - 'in_memory = module_build_service.messaging:_in_memory_backend', - # 'custom = your_organization:_custom_backend', - ], - 'mbs.builder_backends': [ - 'koji = module_build_service.builder.KojiModuleBuilder:KojiModuleBuilder', - 'mock = module_build_service.builder.MockModuleBuilder:MockModuleBuilder', - ], - 'mbs.resolver_backends': [ - 'mbs = module_build_service.resolver.MBSResolver:MBSResolver', - 'db = module_build_service.resolver.DBResolver:DBResolver', - 'local = module_build_service.resolver.LocalResolver:LocalResolver', - ], - }, - scripts=['client/mbs-cli'], - data_files=[('/etc/module-build-service/', ['conf/cacert.pem', - 'conf/config.py', - 'conf/koji.conf', - 'conf/mock.cfg', - 'conf/yum.conf']), - ('/etc/fedmsg.d/', ['fedmsg.d/mbs-logging.py', - 'fedmsg.d/mbs-scheduler.py', - 'fedmsg.d/module_build_service.py']), - ], - ) +setup( + name="module-build-service", + description="The Module Build Service for Modularity", + version="2.19.1", + classifiers=["Programming Language :: Python", "Topic :: Software Development :: Build Tools"], + keywords="module build service fedora modularity koji mock rpm", + author="The Factory 2.0 Team", + author_email="module-build-service-owner@fedoraproject.org", + url="https://pagure.io/fm-orchestrator/", + license="MIT", + packages=find_packages(), + include_package_data=True, + zip_safe=False, + install_requires=install_requires, + tests_require=tests_require, + dependency_links=deps_links, + entry_points={ + "console_scripts": [ + "mbs-upgradedb = module_build_service.manage:upgradedb", + "mbs-frontend = module_build_service.manage:run", + "mbs-manager = module_build_service.manage:manager_wrapper", + ], + "moksha.consumer": "mbsconsumer = module_build_service.scheduler.consumer:MBSConsumer", + "moksha.producer": "mbspoller = module_build_service.scheduler.producer:MBSProducer", + "mbs.messaging_backends": [ + "fedmsg = module_build_service.messaging:_fedmsg_backend", + "in_memory = module_build_service.messaging:_in_memory_backend", + # 'custom = your_organization:_custom_backend', + ], + "mbs.builder_backends": [ + "koji = module_build_service.builder.KojiModuleBuilder:KojiModuleBuilder", + "mock = module_build_service.builder.MockModuleBuilder:MockModuleBuilder", + ], + "mbs.resolver_backends": [ + "mbs = module_build_service.resolver.MBSResolver:MBSResolver", + "db = module_build_service.resolver.DBResolver:DBResolver", + "local = module_build_service.resolver.LocalResolver:LocalResolver", + ], + }, + scripts=["client/mbs-cli"], + data_files=[ + ( + "/etc/module-build-service/", + [ + "conf/cacert.pem", + "conf/config.py", + "conf/koji.conf", + "conf/mock.cfg", + "conf/yum.conf", + ], + ), + ( + "/etc/fedmsg.d/", + [ + "fedmsg.d/mbs-logging.py", + "fedmsg.d/mbs-scheduler.py", + "fedmsg.d/module_build_service.py", + ], + ), + ], +) diff --git a/tests/__init__.py b/tests/__init__.py index a6a21d83..5eee705e 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -55,8 +55,8 @@ def read_staged_data(yaml_name): """ filename = os.path.join(base_dir, "staged_data", "{}.yaml".format(yaml_name)) if not os.path.exists(filename): - raise ValueError('Staged data {}.yaml does not exist.'.format(yaml_name)) - with open(filename, 'r') as mmd: + raise ValueError("Staged data {}.yaml does not exist.".format(yaml_name)) + with open(filename, "r") as mmd: return to_text_type(mmd.read()) @@ -64,7 +64,7 @@ def patch_config(): # add test builders for all resolvers with_test_builders = dict() for k, v in module_build_service.config.SUPPORTED_RESOLVERS.items(): - v['builders'].extend(['test', 'testlocal']) + v["builders"].extend(["test", "testlocal"]) with_test_builders[k] = v patch("module_build_service.config.SUPPORTED_RESOLVERS", with_test_builders) @@ -108,7 +108,7 @@ def clean_database(add_platform_module=True): db.drop_all() db.create_all() if add_platform_module: - mmd = load_mmd_file(os.path.join(base_dir, 'staged_data', 'platform.yaml')) + mmd = load_mmd_file(os.path.join(base_dir, "staged_data", "platform.yaml")) import_mmd(db.session, mmd) @@ -124,7 +124,7 @@ def init_data(data_size=10, contexts=False, multiple_stream_versions=False, scra """ clean_database() if multiple_stream_versions: - mmd = load_mmd_file(os.path.join(base_dir, 'staged_data', 'platform.yaml')) + mmd = load_mmd_file(os.path.join(base_dir, "staged_data", "platform.yaml")) for stream in ["f28.0.0", "f29.0.0", "f29.1.0", "f29.2.0"]: mmd.set_name("platform") mmd.set_stream(stream) @@ -132,7 +132,7 @@ def init_data(data_size=10, contexts=False, multiple_stream_versions=False, scra # Set the virtual_streams based on "fXY" to mark the platform streams # with the same major stream_version compatible. xmd = glib.from_variant_dict(mmd.get_xmd()) - xmd['mbs']['virtual_streams'] = [stream[:3]] + xmd["mbs"]["virtual_streams"] = [stream[:3]] mmd.set_xmd(glib.dict_values(xmd)) import_mmd(db.session, mmd) @@ -149,33 +149,37 @@ def _populate_data(session, data_size=10, contexts=False, scratch=False): for index in range(data_size): for context in range(num_contexts): build_one = ModuleBuild( - name='nginx', - stream='1', + name="nginx", + stream="1", version=2 + index, - state=BUILD_STATES['ready'], + state=BUILD_STATES["ready"], scratch=scratch, - modulemd=read_staged_data('nginx_mmd'), - koji_tag='scrmod-nginx-1.2' if scratch else 'module-nginx-1.2', - scmurl='git://pkgs.domain.local/modules/nginx' - '?#ba95886c7a443b36a9ce31abda1f9bef22f2f8c9', + modulemd=read_staged_data("nginx_mmd"), + koji_tag="scrmod-nginx-1.2" if scratch else "module-nginx-1.2", + scmurl="git://pkgs.domain.local/modules/nginx" + "?#ba95886c7a443b36a9ce31abda1f9bef22f2f8c9", batch=2, # https://www.youtube.com/watch?v=iQGwrK_yDEg, - owner='Moe Szyslak', + owner="Moe Szyslak", time_submitted=datetime(2016, 9, 3, 11, 23, 20) + timedelta(minutes=(index * 10)), time_modified=datetime(2016, 9, 3, 11, 25, 32) + timedelta(minutes=(index * 10)), time_completed=datetime(2016, 9, 3, 11, 25, 32) + timedelta(minutes=(index * 10)), - rebuild_strategy='changed-and-after', + rebuild_strategy="changed-and-after", ) if contexts: build_one.stream = str(index) - unique_hash = hashlib.sha1(("%s:%s:%d:%d" % ( - build_one.name, build_one.stream, build_one.version, - context)).encode("utf-8")).hexdigest() + nsvc = "{}:{}:{}:{}".format( + build_one.name, + build_one.stream, + build_one.version, + context + ) + unique_hash = hashlib.sha1(nsvc.encode('utf-8')).hexdigest() build_one.build_context = unique_hash build_one.runtime_context = unique_hash build_one.ref_build_context = unique_hash - combined_hashes = '{0}:{1}'.format(unique_hash, unique_hash) + combined_hashes = "{0}:{1}".format(unique_hash, unique_hash) build_one.context = hashlib.sha1(combined_hashes.encode("utf-8")).hexdigest()[:8] session.add(build_one) @@ -183,13 +187,13 @@ def _populate_data(session, data_size=10, contexts=False, scratch=False): build_one_component_release = get_rpm_release(build_one) component_one_build_one = ComponentBuild( - package='nginx', - scmurl='git://pkgs.domain.local/rpms/nginx?' - '#ga95886c8a443b36a9ce31abda1f9bed22f2f8c3', - format='rpms', + package="nginx", + scmurl="git://pkgs.domain.local/rpms/nginx?" + "#ga95886c8a443b36a9ce31abda1f9bed22f2f8c3", + format="rpms", task_id=12312345 + index, - state=koji.BUILD_STATES['COMPLETE'], - nvr='nginx-1.10.1-2.{0}'.format(build_one_component_release), + state=koji.BUILD_STATES["COMPLETE"], + nvr="nginx-1.10.1-2.{0}".format(build_one_component_release), batch=1, module_id=2 + index * 3, tagged=True, @@ -197,13 +201,13 @@ def _populate_data(session, data_size=10, contexts=False, scratch=False): ) component_two_build_one = ComponentBuild( - package='module-build-macros', - scmurl='/tmp/module_build_service-build-macrosWZUPeK/SRPMS/' - 'module-build-macros-0.1-1.module_nginx_1_2.src.rpm', - format='rpms', + package="module-build-macros", + scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/" + "module-build-macros-0.1-1.module_nginx_1_2.src.rpm", + format="rpms", task_id=12312321 + index, - state=koji.BUILD_STATES['COMPLETE'], - nvr='module-build-macros-01-1.{0}'.format(build_one_component_release), + state=koji.BUILD_STATES["COMPLETE"], + nvr="module-build-macros-01-1.{0}".format(build_one_component_release), batch=2, module_id=2 + index * 3, tagged=True, @@ -211,21 +215,21 @@ def _populate_data(session, data_size=10, contexts=False, scratch=False): ) build_two = ModuleBuild( - name='postgressql', - stream='1', + name="postgressql", + stream="1", version=2 + index, - state=BUILD_STATES['done'], + state=BUILD_STATES["done"], scratch=scratch, - modulemd=read_staged_data('testmodule'), - koji_tag='scrmod-postgressql-1.2' if scratch else 'module-postgressql-1.2', - scmurl='git://pkgs.domain.local/modules/postgressql' - '?#aa95886c7a443b36a9ce31abda1f9bef22f2f8c9', + modulemd=read_staged_data("testmodule"), + koji_tag="scrmod-postgressql-1.2" if scratch else "module-postgressql-1.2", + scmurl="git://pkgs.domain.local/modules/postgressql" + "?#aa95886c7a443b36a9ce31abda1f9bef22f2f8c9", batch=2, - owner='some_user', + owner="some_user", time_submitted=datetime(2016, 9, 3, 12, 25, 33) + timedelta(minutes=(index * 10)), time_modified=datetime(2016, 9, 3, 12, 27, 19) + timedelta(minutes=(index * 10)), time_completed=datetime(2016, 9, 3, 11, 27, 19) + timedelta(minutes=(index * 10)), - rebuild_strategy='changed-and-after', + rebuild_strategy="changed-and-after", ) session.add(build_two) @@ -233,13 +237,13 @@ def _populate_data(session, data_size=10, contexts=False, scratch=False): build_two_component_release = get_rpm_release(build_two) component_one_build_two = ComponentBuild( - package='postgresql', - scmurl='git://pkgs.domain.local/rpms/postgresql' - '?#dc95586c4a443b26a9ce38abda1f9bed22f2f8c3', - format='rpms', + package="postgresql", + scmurl="git://pkgs.domain.local/rpms/postgresql" + "?#dc95586c4a443b26a9ce38abda1f9bed22f2f8c3", + format="rpms", task_id=2433433 + index, - state=koji.BUILD_STATES['COMPLETE'], - nvr='postgresql-9.5.3-4.{0}'.format(build_two_component_release), + state=koji.BUILD_STATES["COMPLETE"], + nvr="postgresql-9.5.3-4.{0}".format(build_two_component_release), batch=2, module_id=3 + index * 3, tagged=True, @@ -247,58 +251,58 @@ def _populate_data(session, data_size=10, contexts=False, scratch=False): ) component_two_build_two = ComponentBuild( - package='module-build-macros', - scmurl='/tmp/module_build_service-build-macrosWZUPeK/SRPMS/' - 'module-build-macros-0.1-1.module_postgresql_1_2.src.rpm', - format='rpms', + package="module-build-macros", + scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/" + "module-build-macros-0.1-1.module_postgresql_1_2.src.rpm", + format="rpms", task_id=47383993 + index, - state=koji.BUILD_STATES['COMPLETE'], - nvr='module-build-macros-01-1.{0}'.format(build_two_component_release), + state=koji.BUILD_STATES["COMPLETE"], + nvr="module-build-macros-01-1.{0}".format(build_two_component_release), batch=1, module_id=3 + index * 3, ) build_three = ModuleBuild( - name='testmodule', - stream='4.3.43', + name="testmodule", + stream="4.3.43", version=6 + index, - state=BUILD_STATES['wait'], + state=BUILD_STATES["wait"], scratch=scratch, - modulemd=read_staged_data('testmodule'), + modulemd=read_staged_data("testmodule"), koji_tag=None, - scmurl='git://pkgs.domain.local/modules/testmodule' - '?#ca95886c7a443b36a9ce31abda1f9bef22f2f8c9', + scmurl="git://pkgs.domain.local/modules/testmodule" + "?#ca95886c7a443b36a9ce31abda1f9bef22f2f8c9", batch=0, - owner='some_other_user', + owner="some_other_user", time_submitted=datetime(2016, 9, 3, 12, 28, 33) + timedelta(minutes=(index * 10)), time_modified=datetime(2016, 9, 3, 12, 28, 40) + timedelta(minutes=(index * 10)), time_completed=None, - rebuild_strategy='changed-and-after', + rebuild_strategy="changed-and-after", ) session.add(build_three) session.commit() build_three_component_release = get_rpm_release(build_three) component_one_build_three = ComponentBuild( - package='rubygem-rails', - scmurl='git://pkgs.domain.local/rpms/rubygem-rails' - '?#dd55886c4a443b26a9ce38abda1f9bed22f2f8c3', - format='rpms', + package="rubygem-rails", + scmurl="git://pkgs.domain.local/rpms/rubygem-rails" + "?#dd55886c4a443b26a9ce38abda1f9bed22f2f8c3", + format="rpms", task_id=2433433 + index, - state=koji.BUILD_STATES['FAILED'], - nvr='postgresql-9.5.3-4.{0}'.format(build_three_component_release), + state=koji.BUILD_STATES["FAILED"], + nvr="postgresql-9.5.3-4.{0}".format(build_three_component_release), batch=2, module_id=4 + index * 3, ) component_two_build_three = ComponentBuild( - package='module-build-macros', - scmurl='/tmp/module_build_service-build-macrosWZUPeK/SRPMS/' - 'module-build-macros-0.1-1.module_testmodule_1_2.src.rpm', - format='rpms', + package="module-build-macros", + scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/" + "module-build-macros-0.1-1.module_testmodule_1_2.src.rpm", + format="rpms", task_id=47383993 + index, - state=koji.BUILD_STATES['COMPLETE'], - nvr='module-build-macros-01-1.{0}'.format(build_three_component_release), + state=koji.BUILD_STATES["COMPLETE"], + nvr="module-build-macros-01-1.{0}".format(build_three_component_release), batch=1, module_id=4 + index * 3, tagged=True, @@ -321,31 +325,31 @@ def scheduler_init_data(tangerine_state=None, scratch=False): current_dir = os.path.dirname(__file__) formatted_testmodule_yml_path = os.path.join( - current_dir, 'staged_data', 'formatted_testmodule.yaml') + current_dir, "staged_data", "formatted_testmodule.yaml") mmd = load_mmd_file(formatted_testmodule_yml_path) - mmd.get_rpm_components()['tangerine'].set_buildorder(0) + mmd.get_rpm_components()["tangerine"].set_buildorder(0) platform_br = module_build_service.models.ModuleBuild.query.get(1) module_build = module_build_service.models.ModuleBuild( - name='testmodule', - stream='master', + name="testmodule", + stream="master", version=20170109091357, - state=BUILD_STATES['build'], + state=BUILD_STATES["build"], scratch=scratch, - build_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0', - runtime_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0', - context='7c29193d', - koji_tag='scrmod-testmodule-master-20170109091357-7c29193d' - if scratch else - 'module-testmodule-master-20170109091357-7c29193d', - scmurl='https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79', + build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0", + runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0", + context="7c29193d", + koji_tag="scrmod-testmodule-master-20170109091357-7c29193d" + if scratch + else "module-testmodule-master-20170109091357-7c29193d", + scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79", batch=3 if tangerine_state else 2, # https://www.youtube.com/watch?v=iOKymYVSaJE - owner='Buzz Lightyear', + owner="Buzz Lightyear", time_submitted=datetime(2017, 2, 15, 16, 8, 18), time_modified=datetime(2017, 2, 15, 16, 19, 35), - rebuild_strategy='changed-and-after', + rebuild_strategy="changed-and-after", modulemd=to_text_type(mmd.dumps()), ) @@ -354,53 +358,54 @@ def scheduler_init_data(tangerine_state=None, scratch=False): module_build.component_builds.extend([ module_build_service.models.ComponentBuild( - package='perl-Tangerine', - scmurl='https://src.fedoraproject.org/rpms/perl-Tangerine' - '?#4ceea43add2366d8b8c5a622a2fb563b625b9abf', - format='rpms', + package="perl-Tangerine", + scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine" + "?#4ceea43add2366d8b8c5a622a2fb563b625b9abf", + format="rpms", task_id=90276227, - state=koji.BUILD_STATES['COMPLETE'], - nvr='perl-Tangerine-0.23-1.{0}'.format(build_one_component_release), + state=koji.BUILD_STATES["COMPLETE"], + nvr="perl-Tangerine-0.23-1.{0}".format(build_one_component_release), batch=2, - ref='4ceea43add2366d8b8c5a622a2fb563b625b9abf', + ref="4ceea43add2366d8b8c5a622a2fb563b625b9abf", tagged=True, tagged_in_final=True, ), module_build_service.models.ComponentBuild( - package='perl-List-Compare', - scmurl='https://src.fedoraproject.org/rpms/perl-List-Compare' - '?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb', - format='rpms', + package="perl-List-Compare", + scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare" + "?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb", + format="rpms", task_id=90276228, - state=koji.BUILD_STATES['COMPLETE'], - nvr='perl-List-Compare-0.53-5.{0}'.format(build_one_component_release), + state=koji.BUILD_STATES["COMPLETE"], + nvr="perl-List-Compare-0.53-5.{0}".format(build_one_component_release), batch=2, - ref='76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb', + ref="76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb", tagged=True, tagged_in_final=True, ), module_build_service.models.ComponentBuild( - package='tangerine', - scmurl='https://src.fedoraproject.org/rpms/tangerine' - '?#fbed359411a1baa08d4a88e0d12d426fbf8f602c', - format='rpms', + package="tangerine", + scmurl="https://src.fedoraproject.org/rpms/tangerine" + "?#fbed359411a1baa08d4a88e0d12d426fbf8f602c", + format="rpms", batch=3, - ref='fbed359411a1baa08d4a88e0d12d426fbf8f602c', + ref="fbed359411a1baa08d4a88e0d12d426fbf8f602c", state=tangerine_state, task_id=90276315 if tangerine_state else None, - nvr='tangerine-0.22-3.{}'.format(build_one_component_release) - if tangerine_state else None, - tagged=tangerine_state == koji.BUILD_STATES['COMPLETE'], - tagged_in_final=tangerine_state == koji.BUILD_STATES['COMPLETE'], + nvr="tangerine-0.22-3.{}".format(build_one_component_release) + if tangerine_state + else None, + tagged=tangerine_state == koji.BUILD_STATES["COMPLETE"], + tagged_in_final=tangerine_state == koji.BUILD_STATES["COMPLETE"], ), module_build_service.models.ComponentBuild( - package='module-build-macros', - scmurl='/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-' - 'macros-0.1-1.module_testmodule_master_20170109091357.src.rpm', - format='rpms', + package="module-build-macros", + scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-" + "macros-0.1-1.module_testmodule_master_20170109091357.src.rpm", + format="rpms", task_id=90276181, - state=koji.BUILD_STATES['COMPLETE'], - nvr='module-build-macros-0.1-1.{}'.format(build_one_component_release), + state=koji.BUILD_STATES["COMPLETE"], + nvr="module-build-macros-0.1-1.{}".format(build_one_component_release), batch=1, tagged=True, build_time_only=True, @@ -418,88 +423,88 @@ def reuse_component_init_data(): current_dir = os.path.dirname(__file__) formatted_testmodule_yml_path = os.path.join( - current_dir, 'staged_data', 'formatted_testmodule.yaml') + current_dir, "staged_data", "formatted_testmodule.yaml") mmd = load_mmd_file(formatted_testmodule_yml_path) platform_br = module_build_service.models.ModuleBuild.query.get(1) build_one = module_build_service.models.ModuleBuild( - name='testmodule', - stream='master', + name="testmodule", + stream="master", version=20170109091357, - state=BUILD_STATES['ready'], - ref_build_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0', - runtime_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0', - build_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1', - context='78e4a6fd', - koji_tag='module-testmodule-master-20170109091357-78e4a6fd', - scmurl='https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79', + state=BUILD_STATES["ready"], + ref_build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0", + runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0", + build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1", + context="78e4a6fd", + koji_tag="module-testmodule-master-20170109091357-78e4a6fd", + scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79", batch=3, - owner='Tom Brady', + owner="Tom Brady", time_submitted=datetime(2017, 2, 15, 16, 8, 18), time_modified=datetime(2017, 2, 15, 16, 19, 35), time_completed=datetime(2017, 2, 15, 16, 19, 35), - rebuild_strategy='changed-and-after', + rebuild_strategy="changed-and-after", ) build_one_component_release = get_rpm_release(build_one) mmd.set_version(int(build_one.version)) xmd = glib.from_variant_dict(mmd.get_xmd()) - xmd['mbs']['scmurl'] = build_one.scmurl - xmd['mbs']['commit'] = 'ff1ea79fc952143efeed1851aa0aa006559239ba' + xmd["mbs"]["scmurl"] = build_one.scmurl + xmd["mbs"]["commit"] = "ff1ea79fc952143efeed1851aa0aa006559239ba" mmd.set_xmd(glib.dict_values(xmd)) build_one.modulemd = to_text_type(mmd.dumps()) build_one.buildrequires.append(platform_br) build_one.component_builds.extend([ module_build_service.models.ComponentBuild( - package='perl-Tangerine', - scmurl='https://src.fedoraproject.org/rpms/perl-Tangerine' - '?#4ceea43add2366d8b8c5a622a2fb563b625b9abf', - format='rpms', + package="perl-Tangerine", + scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine" + "?#4ceea43add2366d8b8c5a622a2fb563b625b9abf", + format="rpms", task_id=90276227, - state=koji.BUILD_STATES['COMPLETE'], - nvr='perl-Tangerine-0.23-1.{0}'.format(build_one_component_release), + state=koji.BUILD_STATES["COMPLETE"], + nvr="perl-Tangerine-0.23-1.{0}".format(build_one_component_release), batch=2, - ref='4ceea43add2366d8b8c5a622a2fb563b625b9abf', + ref="4ceea43add2366d8b8c5a622a2fb563b625b9abf", tagged=True, tagged_in_final=True, ), module_build_service.models.ComponentBuild( - package='perl-List-Compare', - scmurl='https://src.fedoraproject.org/rpms/perl-List-Compare' - '?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb', - format='rpms', + package="perl-List-Compare", + scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare" + "?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb", + format="rpms", task_id=90276228, - state=koji.BUILD_STATES['COMPLETE'], - nvr='perl-List-Compare-0.53-5.{0}'.format(build_one_component_release), + state=koji.BUILD_STATES["COMPLETE"], + nvr="perl-List-Compare-0.53-5.{0}".format(build_one_component_release), batch=2, - ref='76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb', + ref="76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb", tagged=True, tagged_in_final=True, ), module_build_service.models.ComponentBuild( - package='tangerine', - scmurl='https://src.fedoraproject.org/rpms/tangerine' - '?#fbed359411a1baa08d4a88e0d12d426fbf8f602c', - format='rpms', + package="tangerine", + scmurl="https://src.fedoraproject.org/rpms/tangerine" + "?#fbed359411a1baa08d4a88e0d12d426fbf8f602c", + format="rpms", task_id=90276315, - state=koji.BUILD_STATES['COMPLETE'], - nvr='tangerine-0.22-3.{0}'.format(build_one_component_release), + state=koji.BUILD_STATES["COMPLETE"], + nvr="tangerine-0.22-3.{0}".format(build_one_component_release), batch=3, - ref='fbed359411a1baa08d4a88e0d12d426fbf8f602c', + ref="fbed359411a1baa08d4a88e0d12d426fbf8f602c", tagged=True, tagged_in_final=True, ), module_build_service.models.ComponentBuild( - package='module-build-macros', - scmurl='/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-' - 'macros-0.1-1.module_testmodule_master_20170109091357.src.rpm', - format='rpms', + package="module-build-macros", + scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-" + "macros-0.1-1.module_testmodule_master_20170109091357.src.rpm", + format="rpms", task_id=90276181, - state=koji.BUILD_STATES['COMPLETE'], - nvr='module-build-macros-0.1-1.{0}'.format(build_one_component_release), + state=koji.BUILD_STATES["COMPLETE"], + nvr="module-build-macros-0.1-1.{0}".format(build_one_component_release), batch=1, tagged=True, build_time_only=True, @@ -507,70 +512,70 @@ def reuse_component_init_data(): ]) build_two = module_build_service.models.ModuleBuild( - name='testmodule', - stream='master', + name="testmodule", + stream="master", version=20170219191323, - state=BUILD_STATES['build'], - ref_build_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0', - runtime_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0', - build_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1', - context='c40c156c', - koji_tag='module-testmodule-master-20170219191323-c40c156c', - scmurl='https://src.stg.fedoraproject.org/modules/testmodule.git?#55f4a0a', + state=BUILD_STATES["build"], + ref_build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0", + runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0", + build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1", + context="c40c156c", + koji_tag="module-testmodule-master-20170219191323-c40c156c", + scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#55f4a0a", batch=1, - owner='Tom Brady', + owner="Tom Brady", time_submitted=datetime(2017, 2, 19, 16, 8, 18), time_modified=datetime(2017, 2, 19, 16, 8, 18), - rebuild_strategy='changed-and-after', + rebuild_strategy="changed-and-after", ) build_two_component_release = get_rpm_release(build_two) mmd.set_version(int(build_one.version)) xmd = glib.from_variant_dict(mmd.get_xmd()) - xmd['mbs']['scmurl'] = build_one.scmurl - xmd['mbs']['commit'] = '55f4a0a2e6cc255c88712a905157ab39315b8fd8' + xmd["mbs"]["scmurl"] = build_one.scmurl + xmd["mbs"]["commit"] = "55f4a0a2e6cc255c88712a905157ab39315b8fd8" mmd.set_xmd(glib.dict_values(xmd)) build_two.modulemd = to_text_type(mmd.dumps()) build_two.buildrequires.append(platform_br) build_two.component_builds.extend([ module_build_service.models.ComponentBuild( - package='perl-Tangerine', - scmurl='https://src.fedoraproject.org/rpms/perl-Tangerine' - '?#4ceea43add2366d8b8c5a622a2fb563b625b9abf', - format='rpms', + package="perl-Tangerine", + scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine" + "?#4ceea43add2366d8b8c5a622a2fb563b625b9abf", + format="rpms", batch=2, - ref='4ceea43add2366d8b8c5a622a2fb563b625b9abf', + ref="4ceea43add2366d8b8c5a622a2fb563b625b9abf", ), module_build_service.models.ComponentBuild( - package='perl-List-Compare', - scmurl='https://src.fedoraproject.org/rpms/perl-List-Compare' - '?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb', - format='rpms', + package="perl-List-Compare", + scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare" + "?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb", + format="rpms", batch=2, - ref='76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb', + ref="76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb", ), module_build_service.models.ComponentBuild( - package='tangerine', - scmurl='https://src.fedoraproject.org/rpms/tangerine' - '?#fbed359411a1baa08d4a88e0d12d426fbf8f602c', - format='rpms', + package="tangerine", + scmurl="https://src.fedoraproject.org/rpms/tangerine" + "?#fbed359411a1baa08d4a88e0d12d426fbf8f602c", + format="rpms", batch=3, - ref='fbed359411a1baa08d4a88e0d12d426fbf8f602c', + ref="fbed359411a1baa08d4a88e0d12d426fbf8f602c", ), module_build_service.models.ComponentBuild( - package='module-build-macros', - scmurl='/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-' - 'macros-0.1-1.module_testmodule_master_20170219191323.src.rpm', - format='rpms', + package="module-build-macros", + scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-" + "macros-0.1-1.module_testmodule_master_20170219191323.src.rpm", + format="rpms", task_id=90276186, - state=koji.BUILD_STATES['COMPLETE'], - nvr='module-build-macros-0.1-1.{0}'.format(build_two_component_release), + state=koji.BUILD_STATES["COMPLETE"], + nvr="module-build-macros-0.1-1.{0}".format(build_two_component_release), batch=1, tagged=True, build_time_only=True, - ) + ), ]) with make_session(conf) as session: @@ -588,25 +593,25 @@ def reuse_shared_userspace_init_data(): # are properly built. current_dir = os.path.dirname(__file__) formatted_testmodule_yml_path = os.path.join( - current_dir, 'staged_data', 'shared-userspace-570.yaml') + current_dir, "staged_data", "shared-userspace-570.yaml") mmd = load_mmd_file(formatted_testmodule_yml_path) module_build = module_build_service.models.ModuleBuild( name=mmd.get_name(), stream=mmd.get_stream(), version=mmd.get_version(), - build_context='e046b867a400a06a3571f3c71142d497895fefbe', - runtime_context='50dd3eb5dde600d072e45d4120e1548ce66bc94a', - state=BUILD_STATES['ready'], + build_context="e046b867a400a06a3571f3c71142d497895fefbe", + runtime_context="50dd3eb5dde600d072e45d4120e1548ce66bc94a", + state=BUILD_STATES["ready"], modulemd=to_text_type(mmd.dumps()), - koji_tag='module-shared-userspace-f26-20170601141014-75f92abb', - scmurl='https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453', + koji_tag="module-shared-userspace-f26-20170601141014-75f92abb", + scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453", batch=16, - owner='Tom Brady', + owner="Tom Brady", time_submitted=datetime(2017, 2, 15, 16, 8, 18), time_modified=datetime(2017, 2, 15, 16, 19, 35), time_completed=datetime(2017, 2, 15, 16, 19, 35), - rebuild_strategy='changed-and-after', + rebuild_strategy="changed-and-after", ) components = list(mmd.get_rpm_components().values()) @@ -619,7 +624,7 @@ def reuse_shared_userspace_init_data(): previous_buildorder = pkg.get_buildorder() batch += 1 - pkgref = mmd.get_xmd()['mbs']['rpms'][pkg.get_name()]['ref'] + pkgref = mmd.get_xmd()["mbs"]["rpms"][pkg.get_name()]["ref"] full_url = pkg.get_repository() + "?#" + pkgref module_build.component_builds.append( @@ -631,33 +636,35 @@ def reuse_shared_userspace_init_data(): ref=pkgref, state=1, tagged=True, - tagged_in_final=True - )) + tagged_in_final=True, + ) + ) session.add(module_build) session.commit() # Create shared-userspace-577, state is WAIT, no component built formatted_testmodule_yml_path = os.path.join( - current_dir, 'staged_data', 'shared-userspace-577.yaml') + current_dir, "staged_data", "shared-userspace-577.yaml" + ) mmd2 = load_mmd_file(formatted_testmodule_yml_path) module_build = module_build_service.models.ModuleBuild( name=mmd2.get_name(), stream=mmd2.get_stream(), version=mmd2.get_version(), - build_context='e046b867a400a06a3571f3c71142d497895fefbe', - runtime_context='50dd3eb5dde600d072e45d4120e1548ce66bc94a', - state=BUILD_STATES['done'], + build_context="e046b867a400a06a3571f3c71142d497895fefbe", + runtime_context="50dd3eb5dde600d072e45d4120e1548ce66bc94a", + state=BUILD_STATES["done"], modulemd=to_text_type(mmd2.dumps()), - koji_tag='module-shared-userspace-f26-20170605091544-75f92abb', - scmurl='https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453', + koji_tag="module-shared-userspace-f26-20170605091544-75f92abb", + scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453", batch=0, - owner='Tom Brady', + owner="Tom Brady", time_submitted=datetime(2017, 2, 15, 16, 8, 18), time_modified=datetime(2017, 2, 15, 16, 19, 35), time_completed=datetime(2017, 2, 15, 16, 19, 35), - rebuild_strategy='changed-and-after', + rebuild_strategy="changed-and-after", ) components2 = list(mmd2.get_rpm_components().values()) @@ -673,24 +680,28 @@ def reuse_shared_userspace_init_data(): previous_buildorder = pkg.get_buildorder() batch += 1 - pkgref = mmd2.get_xmd()['mbs']['rpms'][pkg.get_name()]['ref'] + pkgref = mmd2.get_xmd()["mbs"]["rpms"][pkg.get_name()]["ref"] full_url = pkg.get_repository() + "?#" + pkgref module_build.component_builds.append( module_build_service.models.ComponentBuild( - package=pkg.get_name(), - format="rpms", - scmurl=full_url, - batch=batch, - ref=pkgref - )) + package=pkg.get_name(), format="rpms", scmurl=full_url, batch=batch, ref=pkgref) + ) session.add(module_build) session.commit() -def make_module(nsvc, requires_list=None, build_requires_list=None, base_module=None, - filtered_rpms=None, xmd=None, store_to_db=True, virtual_streams=None): +def make_module( + nsvc, + requires_list=None, + build_requires_list=None, + base_module=None, + filtered_rpms=None, + xmd=None, + store_to_db=True, + virtual_streams=None, +): """ Creates new models.ModuleBuild defined by `nsvc` string with requires and buildrequires set according to ``requires_list`` and ``build_requires_list``. @@ -739,8 +750,7 @@ def make_module(nsvc, requires_list=None, build_requires_list=None, base_module= build_requires_list = [build_requires_list] deps_list = [] - for requires, build_requires in zip(requires_list, - build_requires_list): + for requires, build_requires in zip(requires_list, build_requires_list): deps = Modulemd.Dependencies() for req_name, req_streams in requires.items(): deps.add_requires(req_name, req_streams) @@ -751,19 +761,19 @@ def make_module(nsvc, requires_list=None, build_requires_list=None, base_module= # Caller could pass whole xmd including mbs, but if something is missing, # default values are given here. - xmd = xmd or {'mbs': {}} - xmd_mbs = xmd['mbs'] - if 'buildrequires' not in xmd_mbs: - xmd_mbs['buildrequires'] = {} - if 'requires' not in xmd_mbs: - xmd_mbs['requires'] = {} - if 'commit' not in xmd_mbs: - xmd_mbs['commit'] = 'ref_%s' % context - if 'mse' not in xmd_mbs: - xmd_mbs['mse'] = 'true' + xmd = xmd or {"mbs": {}} + xmd_mbs = xmd["mbs"] + if "buildrequires" not in xmd_mbs: + xmd_mbs["buildrequires"] = {} + if "requires" not in xmd_mbs: + xmd_mbs["requires"] = {} + if "commit" not in xmd_mbs: + xmd_mbs["commit"] = "ref_%s" % context + if "mse" not in xmd_mbs: + xmd_mbs["mse"] = "true" if virtual_streams: - xmd_mbs['virtual_streams'] = virtual_streams + xmd_mbs["virtual_streams"] = virtual_streams mmd.set_xmd(glib.dict_values(xmd)) @@ -776,17 +786,17 @@ def make_module(nsvc, requires_list=None, build_requires_list=None, base_module= stream_version=ModuleBuild.get_stream_version(stream), version=version, context=context, - state=BUILD_STATES['ready'], - scmurl='https://src.stg.fedoraproject.org/modules/unused.git?#ff1ea79', + state=BUILD_STATES["ready"], + scmurl="https://src.stg.fedoraproject.org/modules/unused.git?#ff1ea79", batch=1, - owner='Tom Brady', + owner="Tom Brady", time_submitted=datetime(2017, 2, 15, 16, 8, 18), time_modified=datetime(2017, 2, 15, 16, 19, 35), - rebuild_strategy='changed-and-after', + rebuild_strategy="changed-and-after", build_context=context, runtime_context=context, modulemd=to_text_type(mmd.dumps()), - koji_tag=xmd['mbs']['koji_tag'] if 'koji_tag' in xmd['mbs'] else None + koji_tag=xmd["mbs"]["koji_tag"] if "koji_tag" in xmd["mbs"] else None, ) if base_module: module_build.buildrequires.append(base_module) diff --git a/tests/conftest.py b/tests/conftest.py index a80363cb..829f5446 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -27,20 +27,17 @@ from module_build_service import Modulemd BASE_DIR = os.path.dirname(__file__) -STAGED_DATA_DIR = os.path.join(BASE_DIR, 'staged_data') +STAGED_DATA_DIR = os.path.join(BASE_DIR, "staged_data") -_mmd = Modulemd.Module().new_from_file( - os.path.join(STAGED_DATA_DIR, 'platform.yaml')) +_mmd = Modulemd.Module().new_from_file(os.path.join(STAGED_DATA_DIR, "platform.yaml")) _mmd.upgrade() PLATFORM_MODULEMD = _mmd.dumps() -_mmd2 = Modulemd.Module().new_from_file( - os.path.join(STAGED_DATA_DIR, 'formatted_testmodule.yaml')) +_mmd2 = Modulemd.Module().new_from_file(os.path.join(STAGED_DATA_DIR, "formatted_testmodule.yaml")) _mmd2.upgrade() TESTMODULE_MODULEMD = _mmd2.dumps() -_mmd3 = Modulemd.Module().new_from_file( - os.path.join(STAGED_DATA_DIR, 'formatted_testmodule.yaml')) +_mmd3 = Modulemd.Module().new_from_file(os.path.join(STAGED_DATA_DIR, "formatted_testmodule.yaml")) _mmd3.upgrade() _mmd3.set_context("c2c572ed") TESTMODULE_MODULEMD_SECOND_CONTEXT = _mmd3.dumps() diff --git a/tests/test_auth.py b/tests/test_auth.py index c2270c82..7689f877 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -39,8 +39,10 @@ class TestAuthModule: def test_get_user_no_token(self): base_dir = path.abspath(path.dirname(__file__)) client_secrets = path.join(base_dir, "client_secrets.json") - with patch.dict('module_build_service.app.config', {'OIDC_CLIENT_SECRETS': client_secrets, - 'OIDC_REQUIRED_SCOPE': 'mbs-scope'}): + with patch.dict( + "module_build_service.app.config", + {"OIDC_CLIENT_SECRETS": client_secrets, "OIDC_REQUIRED_SCOPE": "mbs-scope"}, + ): request = mock.MagicMock() request.cookies.return_value = {} @@ -49,18 +51,22 @@ class TestAuthModule: module_build_service.auth.get_user(request) assert str(cm.value) == "No 'authorization' header found." - @patch('module_build_service.auth._get_token_info') - @patch('module_build_service.auth._get_user_info') + @patch("module_build_service.auth._get_token_info") + @patch("module_build_service.auth._get_user_info") def test_get_user_failure(self, get_user_info, get_token_info): base_dir = path.abspath(path.dirname(__file__)) client_secrets = path.join(base_dir, "client_secrets.json") - with patch.dict('module_build_service.app.config', {'OIDC_CLIENT_SECRETS': client_secrets, - 'OIDC_REQUIRED_SCOPE': 'mbs-scope'}): + with patch.dict( + "module_build_service.app.config", + {"OIDC_CLIENT_SECRETS": client_secrets, "OIDC_REQUIRED_SCOPE": "mbs-scope"}, + ): # https://www.youtube.com/watch?v=G-LtddOgUCE name = "Joey Jo Jo Junior Shabadoo" - mocked_get_token_info = {"active": False, "username": name, - "scope": ("openid https://id.fedoraproject.org/scope/groups" - " mbs-scope")} + mocked_get_token_info = { + "active": False, + "username": name, + "scope": ("openid https://id.fedoraproject.org/scope/groups mbs-scope"), + } get_token_info.return_value = mocked_get_token_info get_user_info.return_value = {"groups": ["group"]} @@ -77,21 +83,25 @@ class TestAuthModule: module_build_service.auth.get_user(request) assert str(cm.value) == "OIDC token invalid or expired." - @pytest.mark.parametrize('allowed_users', (set(), set(['Joey Jo Jo Junior Shabadoo']))) - @patch.object(mbs_config.Config, 'allowed_users', new_callable=PropertyMock) - @patch('module_build_service.auth._get_token_info') - @patch('module_build_service.auth._get_user_info') + @pytest.mark.parametrize("allowed_users", (set(), set(["Joey Jo Jo Junior Shabadoo"]))) + @patch.object(mbs_config.Config, "allowed_users", new_callable=PropertyMock) + @patch("module_build_service.auth._get_token_info") + @patch("module_build_service.auth._get_user_info") def test_get_user_good(self, get_user_info, get_token_info, m_allowed_users, allowed_users): m_allowed_users.return_value = allowed_users base_dir = path.abspath(path.dirname(__file__)) client_secrets = path.join(base_dir, "client_secrets.json") - with patch.dict('module_build_service.app.config', {'OIDC_CLIENT_SECRETS': client_secrets, - 'OIDC_REQUIRED_SCOPE': 'mbs-scope'}): + with patch.dict( + "module_build_service.app.config", + {"OIDC_CLIENT_SECRETS": client_secrets, "OIDC_REQUIRED_SCOPE": "mbs-scope"}, + ): # https://www.youtube.com/watch?v=G-LtddOgUCE name = "Joey Jo Jo Junior Shabadoo" - mocked_get_token_info = {"active": True, "username": name, - "scope": ("openid https://id.fedoraproject.org/scope/groups" - " mbs-scope")} + mocked_get_token_info = { + "active": True, + "username": name, + "scope": ("openid https://id.fedoraproject.org/scope/groups mbs-scope"), + } get_token_info.return_value = mocked_get_token_info get_user_info.return_value = {"groups": ["group"]} @@ -118,14 +128,14 @@ class TestAuthModule: assert username_second_call == username assert groups_second_call == groups - @patch.object(mbs_config.Config, 'no_auth', new_callable=PropertyMock, return_value=True) + @patch.object(mbs_config.Config, "no_auth", new_callable=PropertyMock, return_value=True) def test_disable_authentication(self, conf_no_auth): request = mock.MagicMock() username, groups = module_build_service.auth.get_user(request) assert username == "anonymous" assert groups == {"packager"} - @patch('module_build_service.auth.client_secrets', None) + @patch("module_build_service.auth.client_secrets", None) def test_misconfiguring_oidc_client_secrets_should_be_failed(self): request = mock.MagicMock() with pytest.raises(module_build_service.errors.Forbidden) as cm: @@ -133,18 +143,22 @@ class TestAuthModule: module_build_service.auth.get_user(request) assert str(cm.value) == "OIDC_CLIENT_SECRETS must be set in server config." - @patch('module_build_service.auth._get_token_info') - @patch('module_build_service.auth._get_user_info') + @patch("module_build_service.auth._get_token_info") + @patch("module_build_service.auth._get_user_info") def test_get_required_scope_not_present(self, get_user_info, get_token_info): base_dir = path.abspath(path.dirname(__file__)) client_secrets = path.join(base_dir, "client_secrets.json") - with patch.dict('module_build_service.app.config', {'OIDC_CLIENT_SECRETS': client_secrets, - 'OIDC_REQUIRED_SCOPE': 'mbs-scope'}): + with patch.dict( + "module_build_service.app.config", + {"OIDC_CLIENT_SECRETS": client_secrets, "OIDC_REQUIRED_SCOPE": "mbs-scope"}, + ): # https://www.youtube.com/watch?v=G-LtddOgUCE name = "Joey Jo Jo Junior Shabadoo" - mocked_get_token_info = {"active": True, - "username": name, - "scope": "openid https://id.fedoraproject.org/scope/groups"} + mocked_get_token_info = { + "active": True, + "username": name, + "scope": "openid https://id.fedoraproject.org/scope/groups", + } get_token_info.return_value = mocked_get_token_info get_user_info.return_value = {"groups": ["group"]} @@ -159,20 +173,24 @@ class TestAuthModule: with pytest.raises(module_build_service.errors.Unauthorized) as cm: with app.app_context(): module_build_service.auth.get_user(request) - assert str(cm.value) == ("Required OIDC scope 'mbs-scope' not present: " - "['openid', 'https://id.fedoraproject.org/scope/groups']") + assert str(cm.value) == ( + "Required OIDC scope 'mbs-scope' not present: " + "['openid', 'https://id.fedoraproject.org/scope/groups']" + ) - @patch('module_build_service.auth._get_token_info') - @patch('module_build_service.auth._get_user_info') + @patch("module_build_service.auth._get_token_info") + @patch("module_build_service.auth._get_user_info") def test_get_required_scope_not_set_in_cfg(self, get_user_info, get_token_info): base_dir = path.abspath(path.dirname(__file__)) client_secrets = path.join(base_dir, "client_secrets.json") - with patch.dict('module_build_service.app.config', {'OIDC_CLIENT_SECRETS': client_secrets}): + with patch.dict("module_build_service.app.config", {"OIDC_CLIENT_SECRETS": client_secrets}): # https://www.youtube.com/watch?v=G-LtddOgUCE name = "Joey Jo Jo Junior Shabadoo" - mocked_get_token_info = {"active": True, - "username": name, - "scope": "openid https://id.fedoraproject.org/scope/groups"} + mocked_get_token_info = { + "active": True, + "username": name, + "scope": "openid https://id.fedoraproject.org/scope/groups", + } get_token_info.return_value = mocked_get_token_info get_user_info.return_value = {"groups": ["group"]} @@ -191,8 +209,13 @@ class TestAuthModule: class KerberosMockConfig(object): - def __init__(self, uri='ldaps://test.example.local:636', dn='ou=groups,dc=domain,dc=local', - kt='/path/to/keytab', host='mbs.domain.local'): + def __init__( + self, + uri="ldaps://test.example.local:636", + dn="ou=groups,dc=domain,dc=local", + kt="/path/to/keytab", + host="mbs.domain.local", + ): """ :param uri: a string overriding config.ldap_uri :param dn: a string overriding config.ldap_groups_dn @@ -206,27 +229,26 @@ class KerberosMockConfig(object): def __enter__(self): self.auth_method_p = patch.object( - mbs_config.Config, 'auth_method', new_callable=PropertyMock) + mbs_config.Config, "auth_method", new_callable=PropertyMock) mocked_auth_method = self.auth_method_p.start() - mocked_auth_method.return_value = 'kerberos' + mocked_auth_method.return_value = "kerberos" - self.ldap_uri_p = patch.object( - mbs_config.Config, 'ldap_uri', new_callable=PropertyMock) + self.ldap_uri_p = patch.object(mbs_config.Config, "ldap_uri", new_callable=PropertyMock) mocked_ldap_uri = self.ldap_uri_p.start() mocked_ldap_uri.return_value = self.uri self.ldap_dn_p = patch.object( - mbs_config.Config, 'ldap_groups_dn', new_callable=PropertyMock) + mbs_config.Config, "ldap_groups_dn", new_callable=PropertyMock) mocked_ldap_dn = self.ldap_dn_p.start() mocked_ldap_dn.return_value = self.dn self.kerberos_keytab_p = patch.object( - mbs_config.Config, 'kerberos_keytab', new_callable=PropertyMock) + mbs_config.Config, "kerberos_keytab", new_callable=PropertyMock) mocked_kerberos_keytab = self.kerberos_keytab_p.start() mocked_kerberos_keytab.return_value = self.kt self.kerberos_http_host_p = patch.object( - mbs_config.Config, 'kerberos_http_host', new_callable=PropertyMock) + mbs_config.Config, "kerberos_http_host", new_callable=PropertyMock) mocked_kerberos_http_host = self.kerberos_http_host_p.start() mocked_kerberos_http_host.return_value = self.host @@ -239,18 +261,19 @@ class KerberosMockConfig(object): class TestAuthModuleKerberos: - @pytest.mark.parametrize('allowed_users', (set(), set(['mprahl']))) - @patch('kerberos.authGSSServerInit', return_value=(kerberos.AUTH_GSS_COMPLETE, object())) - @patch('kerberos.authGSSServerStep', return_value=kerberos.AUTH_GSS_COMPLETE) - @patch('kerberos.authGSSServerResponse', return_value='STOKEN') - @patch('kerberos.authGSSServerUserName', return_value='mprahl@EXAMPLE.ORG') - @patch('kerberos.authGSSServerClean') - @patch('kerberos.getServerPrincipalDetails') - @patch.dict('os.environ') - @patch('module_build_service.auth.stack') - @patch.object(mbs_config.Config, 'allowed_users', new_callable=PropertyMock) - def test_get_user_kerberos(self, m_allowed_users, stack, principal, clean, name, response, - step, init, allowed_users): + @pytest.mark.parametrize("allowed_users", (set(), set(["mprahl"]))) + @patch("kerberos.authGSSServerInit", return_value=(kerberos.AUTH_GSS_COMPLETE, object())) + @patch("kerberos.authGSSServerStep", return_value=kerberos.AUTH_GSS_COMPLETE) + @patch("kerberos.authGSSServerResponse", return_value="STOKEN") + @patch("kerberos.authGSSServerUserName", return_value="mprahl@EXAMPLE.ORG") + @patch("kerberos.authGSSServerClean") + @patch("kerberos.getServerPrincipalDetails") + @patch.dict("os.environ") + @patch("module_build_service.auth.stack") + @patch.object(mbs_config.Config, "allowed_users", new_callable=PropertyMock) + def test_get_user_kerberos( + self, m_allowed_users, stack, principal, clean, name, response, step, init, allowed_users + ): """ Test that authentication works with Kerberos and LDAP """ @@ -258,7 +281,7 @@ class TestAuthModuleKerberos: mock_top = Mock() stack.return_value = mock_top - headers = {'Authorization': 'foobar'} + headers = {"Authorization": "foobar"} request = mock.MagicMock() request.headers.return_value = mock.MagicMock(spec_set=dict) request.headers.__getitem__.side_effect = headers.__getitem__ @@ -266,51 +289,53 @@ class TestAuthModuleKerberos: request.headers.__contains__.side_effect = headers.__contains__ # Create the mock LDAP instance - server = ldap3.Server('ldaps://test.domain.local') + server = ldap3.Server("ldaps://test.domain.local") connection = ldap3.Connection(server, client_strategy=ldap3.MOCK_SYNC) - base_dn = 'dc=domain,dc=local' + base_dn = "dc=domain,dc=local" factory_group_attrs = { - 'objectClass': ['top', 'posixGroup'], - 'memberUid': ['mprahl', 'tbrady'], - 'gidNumber': 1234, - 'cn': ['factory2-devs'] + "objectClass": ["top", "posixGroup"], + "memberUid": ["mprahl", "tbrady"], + "gidNumber": 1234, + "cn": ["factory2-devs"], } devs_group_attrs = { - 'objectClass': ['top', 'posixGroup'], - 'memberUid': ['mprahl', 'mikeb'], - 'gidNumber': 1235, - 'cn': ['devs'] + "objectClass": ["top", "posixGroup"], + "memberUid": ["mprahl", "mikeb"], + "gidNumber": 1235, + "cn": ["devs"], } athletes_group_attrs = { - 'objectClass': ['top', 'posixGroup'], - 'memberUid': ['tbrady', 'rgronkowski'], - 'gidNumber': 1236, - 'cn': ['athletes'] + "objectClass": ["top", "posixGroup"], + "memberUid": ["tbrady", "rgronkowski"], + "gidNumber": 1236, + "cn": ["athletes"], } mprahl_attrs = { - 'memberOf': ['cn=Employee,ou=groups,{0}'.format(base_dn)], - 'uid': ['mprahl'], - 'cn': ['mprahl'], - 'objectClass': ['top', 'person'] + "memberOf": ["cn=Employee,ou=groups,{0}".format(base_dn)], + "uid": ["mprahl"], + "cn": ["mprahl"], + "objectClass": ["top", "person"], } - connection.strategy.add_entry('cn=factory2-devs,ou=groups,{0}'.format(base_dn), - factory_group_attrs) - connection.strategy.add_entry('cn=athletes,ou=groups,{0}'.format(base_dn), - athletes_group_attrs) - connection.strategy.add_entry('cn=devs,ou=groups,{0}'.format(base_dn), devs_group_attrs) - connection.strategy.add_entry('cn=mprahl,ou=users,{0}'.format(base_dn), mprahl_attrs) + connection.strategy.add_entry( + "cn=factory2-devs,ou=groups,{0}".format(base_dn), factory_group_attrs + ) + connection.strategy.add_entry( + "cn=athletes,ou=groups,{0}".format(base_dn), athletes_group_attrs + ) + connection.strategy.add_entry("cn=devs,ou=groups,{0}".format(base_dn), devs_group_attrs) + connection.strategy.add_entry("cn=mprahl,ou=users,{0}".format(base_dn), mprahl_attrs) # If the user is in allowed_users, then group membership is not checked, and an empty set # is just returned for the groups if allowed_users: expected_groups = set() else: - expected_groups = {'devs', 'factory2-devs'} + expected_groups = {"devs", "factory2-devs"} - with patch('ldap3.Connection') as mock_ldap_con, KerberosMockConfig(): + with patch("ldap3.Connection") as mock_ldap_con, KerberosMockConfig(): mock_ldap_con.return_value = connection - assert module_build_service.auth.get_user_kerberos(request) == \ - ('mprahl', expected_groups) + assert module_build_service.auth.get_user_kerberos(request) == ( + "mprahl", expected_groups) def test_auth_header_not_set(self): """ @@ -327,53 +352,55 @@ class TestAuthModuleKerberos: with KerberosMockConfig(): try: module_build_service.auth.get_user_kerberos(request) - assert False, 'Unauthorized error not raised' + assert False, "Unauthorized error not raised" except FlaskUnauthorized as error: - assert error.response.www_authenticate.to_header().strip() == 'Negotiate' - assert error.response.status == '401 UNAUTHORIZED' + assert error.response.www_authenticate.to_header().strip() == "Negotiate" + assert error.response.status == "401 UNAUTHORIZED" @patch.dict(environ) def test_keytab_not_set(self): """ Test that authentication fails when the keytab is not set """ - if 'KRB5_KTNAME' in environ: - del environ['KRB5_KTNAME'] + if "KRB5_KTNAME" in environ: + del environ["KRB5_KTNAME"] - headers = {'Authorization': 'foobar'} + headers = {"Authorization": "foobar"} request = mock.MagicMock() request.headers.return_value = mock.MagicMock(spec_set=dict) request.headers.__getitem__.side_effect = headers.__getitem__ request.headers.__setitem__.side_effect = headers.__setitem__ request.headers.__contains__.side_effect = headers.__contains__ - with KerberosMockConfig(kt=''): + with KerberosMockConfig(kt=""): try: module_build_service.auth.get_user_kerberos(request) - assert False, 'Unauthorized error not raised' + assert False, "Unauthorized error not raised" except module_build_service.errors.Unauthorized as error: - assert str(error) == ('Kerberos: set the config value of "KERBEROS_KEYTAB" ' - 'or the environment variable "KRB5_KTNAME" to your ' - 'keytab file') + assert str(error) == ( + 'Kerberos: set the config value of "KERBEROS_KEYTAB" ' + 'or the environment variable "KRB5_KTNAME" to your keytab file' + ) # Set the return value to something not 0 (continue) or 1 (complete) - @patch('kerberos.authGSSServerInit', return_value=(100, object())) - @patch('kerberos.authGSSServerStep', return_value=kerberos.AUTH_GSS_COMPLETE) - @patch('kerberos.authGSSServerResponse', return_value='STOKEN') - @patch('kerberos.authGSSServerUserName', return_value='mprahl@EXAMPLE.ORG') - @patch('kerberos.authGSSServerClean') - @patch('kerberos.getServerPrincipalDetails') - @patch.dict('os.environ') - @patch('module_build_service.auth.stack') - def test_get_user_kerberos_invalid_ticket(self, stack, principal, clean, name, response, - step, init): + @patch("kerberos.authGSSServerInit", return_value=(100, object())) + @patch("kerberos.authGSSServerStep", return_value=kerberos.AUTH_GSS_COMPLETE) + @patch("kerberos.authGSSServerResponse", return_value="STOKEN") + @patch("kerberos.authGSSServerUserName", return_value="mprahl@EXAMPLE.ORG") + @patch("kerberos.authGSSServerClean") + @patch("kerberos.getServerPrincipalDetails") + @patch.dict("os.environ") + @patch("module_build_service.auth.stack") + def test_get_user_kerberos_invalid_ticket( + self, stack, principal, clean, name, response, step, init + ): """ Test that authentication fails with an invalid Kerberos ticket """ mock_top = Mock() stack.return_value = mock_top - headers = {'Authorization': 'foobar'} + headers = {"Authorization": "foobar"} request = mock.MagicMock() request.headers.return_value = mock.MagicMock(spec_set=dict) request.headers.__getitem__.side_effect = headers.__getitem__ @@ -383,6 +410,6 @@ class TestAuthModuleKerberos: with KerberosMockConfig(): try: module_build_service.auth.get_user_kerberos(request) - assert False, 'Forbidden error not raised' + assert False, "Forbidden error not raised" except module_build_service.errors.Forbidden as error: - assert str(error) == ('Invalid Kerberos ticket') + assert str(error) == ("Invalid Kerberos ticket") diff --git a/tests/test_build/test_build.py b/tests/test_build/test_build.py index 27b3cae5..2d10b0d9 100644 --- a/tests/test_build/test_build.py +++ b/tests/test_build/test_build.py @@ -52,7 +52,7 @@ from module_build_service.messaging import MBSModule base_dir = dirname(dirname(__file__)) -user = ('Homer J. Simpson', set(['packager'])) +user = ("Homer J. Simpson", set(["packager"])) class FakeSCM(object): @@ -66,7 +66,7 @@ class FakeSCM(object): self.mocked_scm.return_value.checkout = self.checkout self.mocked_scm.return_value.name = self.name - self.mocked_scm.return_value.branch = 'master' + self.mocked_scm.return_value.branch = "master" self.mocked_scm.return_value.get_latest = self.get_latest self.mocked_scm.return_value.commit = self.commit self.mocked_scm.return_value.version = self.version @@ -78,12 +78,12 @@ class FakeSCM(object): self.sourcedir = path.join(temp_dir, self.name) mkdir(self.sourcedir) base_dir = path.abspath(path.dirname(__file__)) - copyfile(path.join(base_dir, '..', 'staged_data', self.mmd_filename), - self.get_module_yaml()) + copyfile( + path.join(base_dir, "..", "staged_data", self.mmd_filename), self.get_module_yaml()) return self.sourcedir - def get_latest(self, ref='master'): + def get_latest(self, ref="master"): return ref def get_module_yaml(self): @@ -108,7 +108,7 @@ class FakeModuleBuilder(GenericBuilder): on_tag_artifacts_cb = None on_buildroot_add_repos_cb = None - @module_build_service.utils.validate_koji_tag('tag_name') + @module_build_service.utils.validate_koji_tag("tag_name") def __init__(self, owner, module, config, tag_name, components): self.module_str = module self.tag_name = tag_name @@ -125,19 +125,46 @@ class FakeModuleBuilder(GenericBuilder): FakeModuleBuilder.on_tag_artifacts_cb = None FakeModuleBuilder.on_buildroot_add_repos_cb = None FakeModuleBuilder.DEFAULT_GROUPS = None - FakeModuleBuilder.backend = 'test' + FakeModuleBuilder.backend = "test" def buildroot_connect(self, groups): default_groups = FakeModuleBuilder.DEFAULT_GROUPS or { - 'srpm-build': - set(['shadow-utils', 'fedora-release', 'redhat-rpm-config', - 'rpm-build', 'fedpkg-minimal', 'gnupg2', 'bash']), - 'build': - set(['unzip', 'fedora-release', 'tar', 'cpio', 'gawk', - 'gcc', 'xz', 'sed', 'findutils', 'util-linux', 'bash', - 'info', 'bzip2', 'grep', 'redhat-rpm-config', - 'diffutils', 'make', 'patch', 'shadow-utils', - 'coreutils', 'which', 'rpm-build', 'gzip', 'gcc-c++'])} + "srpm-build": set([ + "shadow-utils", + "fedora-release", + "redhat-rpm-config", + "rpm-build", + "fedpkg-minimal", + "gnupg2", + "bash", + ]), + "build": set([ + "unzip", + "fedora-release", + "tar", + "cpio", + "gawk", + "gcc", + "xz", + "sed", + "findutils", + "util-linux", + "bash", + "info", + "bzip2", + "grep", + "redhat-rpm-config", + "diffutils", + "make", + "patch", + "shadow-utils", + "coreutils", + "which", + "rpm-build", + "gzip", + "gcc-c++", + ]), + } if groups != default_groups: raise ValueError("Wrong groups in FakeModuleBuilder.buildroot_connect()") @@ -159,18 +186,18 @@ class FakeModuleBuilder(GenericBuilder): def buildroot_add_artifacts(self, artifacts, install=False): if FakeModuleBuilder.on_buildroot_add_artifacts_cb: FakeModuleBuilder.on_buildroot_add_artifacts_cb(self, artifacts, install) - if self.backend == 'test': + if self.backend == "test": for nvr in artifacts: # buildroot_add_artifacts received a list of NVRs, but the tag message expects the # component name. At this point, the NVR may not be set if we are trying to reuse # all components, so we can't search the database. We must parse the package name # from the nvr and then tag it in the build tag. Kobo doesn't work when parsing # the NVR of a component with a module dist-tag, so we must manually do it. - package_name = nvr.split('.module')[0].rsplit('-', 2)[0] + package_name = nvr.split(".module")[0].rsplit("-", 2)[0] # When INSTANT_COMPLETE is on, the components are already in the build tag if self.INSTANT_COMPLETE is False: self._send_tag(package_name, nvr, dest_tag=False) - elif self.backend == 'testlocal': + elif self.backend == "testlocal": self._send_repo_done() def buildroot_add_repos(self, dependencies): @@ -181,7 +208,7 @@ class FakeModuleBuilder(GenericBuilder): if FakeModuleBuilder.on_tag_artifacts_cb: FakeModuleBuilder.on_tag_artifacts_cb(self, artifacts, dest_tag=dest_tag) - if self.backend == 'test': + if self.backend == "test": for nvr in artifacts: # tag_artifacts received a list of NVRs, but the tag message expects the # component name @@ -195,6 +222,7 @@ class FakeModuleBuilder(GenericBuilder): def _newRepo(tag): session.newRepo = self._send_repo_done() return 123 + session.newRepo = _newRepo return session @@ -204,9 +232,7 @@ class FakeModuleBuilder(GenericBuilder): def _send_repo_done(self): msg = module_build_service.messaging.KojiRepoChange( - msg_id='a faked internal message', - repo_tag=self.tag_name + "-build", - ) + msg_id="a faked internal message", repo_tag=self.tag_name + "-build") module_build_service.scheduler.consumer.work_queue_put(msg) def _send_tag(self, artifact, nvr, dest_tag=True): @@ -215,24 +241,20 @@ class FakeModuleBuilder(GenericBuilder): else: tag = self.tag_name + "-build" msg = module_build_service.messaging.KojiTagChange( - msg_id='a faked internal message', - tag=tag, - artifact=artifact, - nvr=nvr - ) + msg_id="a faked internal message", tag=tag, artifact=artifact, nvr=nvr) module_build_service.scheduler.consumer.work_queue_put(msg) def _send_build_change(self, state, name, build_id): # build_id=1 and task_id=1 are OK here, because we are building just # one RPM at the time. msg = module_build_service.messaging.KojiBuildChange( - msg_id='a faked internal message', + msg_id="a faked internal message", build_id=build_id, task_id=build_id, build_name=name, build_new_state=state, build_release="1", - build_version="1" + build_version="1", ) module_build_service.scheduler.consumer.work_queue_put(msg) @@ -248,7 +270,7 @@ class FakeModuleBuilder(GenericBuilder): koji.BUILD_STATES[FakeModuleBuilder.BUILD_STATE], artifact_name, build_id) reason = "Submitted %s to Koji" % (artifact_name) - return build_id, koji.BUILD_STATES['BUILDING'], reason, None + return build_id, koji.BUILD_STATES["BUILDING"], reason, None @staticmethod def get_disttag_srpm(disttag, module_build): @@ -259,31 +281,42 @@ class FakeModuleBuilder(GenericBuilder): if FakeModuleBuilder.on_cancel_cb: FakeModuleBuilder.on_cancel_cb(self, task_id) - def list_tasks_for_components(self, component_builds=None, state='active'): + def list_tasks_for_components(self, component_builds=None, state="active"): pass def recover_orphaned_artifact(self, component_build): msgs = [] if self.INSTANT_COMPLETE: - disttag = module_build_service.utils.get_rpm_release( - component_build.module_build) + disttag = module_build_service.utils.get_rpm_release(component_build.module_build) # We don't know the version or release, so just use a random one here - nvr = '{0}-1.0-1.{1}'.format(component_build.package, disttag) - component_build.state = koji.BUILD_STATES['COMPLETE'] + nvr = "{0}-1.0-1.{1}".format(component_build.package, disttag) + component_build.state = koji.BUILD_STATES["COMPLETE"] component_build.nvr = nvr component_build.task_id = component_build.id + 51234 - component_build.state_reason = 'Found existing build' + component_build.state_reason = "Found existing build" nvr_dict = kobo.rpmlib.parse_nvr(component_build.nvr) # Send a message stating the build is complete - msgs.append(module_build_service.messaging.KojiBuildChange( - 'recover_orphaned_artifact: fake message', randint(1, 9999999), - component_build.task_id, koji.BUILD_STATES['COMPLETE'], component_build.package, - nvr_dict['version'], nvr_dict['release'], component_build.module_build.id)) + msgs.append( + module_build_service.messaging.KojiBuildChange( + "recover_orphaned_artifact: fake message", + randint(1, 9999999), + component_build.task_id, + koji.BUILD_STATES["COMPLETE"], + component_build.package, + nvr_dict["version"], + nvr_dict["release"], + component_build.module_build.id, + ) + ) # Send a message stating that the build was tagged in the build tag - msgs.append(module_build_service.messaging.KojiTagChange( - 'recover_orphaned_artifact: fake message', - component_build.module_build.koji_tag + '-build', component_build.package, - component_build.nvr)) + msgs.append( + module_build_service.messaging.KojiTagChange( + "recover_orphaned_artifact: fake message", + component_build.module_build.koji_tag + "-build", + component_build.package, + component_build.nvr, + ) + ) return msgs def finalize(self, succeeded=None): @@ -294,26 +327,57 @@ class FakeModuleBuilder(GenericBuilder): def cleanup_moksha(): # Necessary to restart the twisted reactor for the next test. import sys - del sys.modules['twisted.internet.reactor'] - del sys.modules['moksha.hub.reactor'] - del sys.modules['moksha.hub'] - import moksha.hub.reactor # noqa + + del sys.modules["twisted.internet.reactor"] + del sys.modules["moksha.hub.reactor"] + del sys.modules["moksha.hub"] + import moksha.hub.reactor # noqa -@patch('module_build_service.scheduler.handlers.modules.handle_stream_collision_modules') -@patch.object(module_build_service.config.Config, 'system', new_callable=PropertyMock, - return_value='test') -@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", - return_value={ - 'srpm-build': - set(['shadow-utils', 'fedora-release', 'redhat-rpm-config', - 'rpm-build', 'fedpkg-minimal', 'gnupg2', 'bash']), - 'build': - set(['unzip', 'fedora-release', 'tar', 'cpio', 'gawk', - 'gcc', 'xz', 'sed', 'findutils', 'util-linux', 'bash', - 'info', 'bzip2', 'grep', 'redhat-rpm-config', - 'diffutils', 'make', 'patch', 'shadow-utils', - 'coreutils', 'which', 'rpm-build', 'gzip', 'gcc-c++'])}) +@patch("module_build_service.scheduler.handlers.modules.handle_stream_collision_modules") +@patch.object( + module_build_service.config.Config, "system", new_callable=PropertyMock, return_value="test" +) +@patch( + "module_build_service.builder.GenericBuilder.default_buildroot_groups", + return_value={ + "srpm-build": set([ + "shadow-utils", + "fedora-release", + "redhat-rpm-config", + "rpm-build", + "fedpkg-minimal", + "gnupg2", + "bash", + ]), + "build": set([ + "unzip", + "fedora-release", + "tar", + "cpio", + "gawk", + "gcc", + "xz", + "sed", + "findutils", + "util-linux", + "bash", + "info", + "bzip2", + "grep", + "redhat-rpm-config", + "diffutils", + "make", + "patch", + "shadow-utils", + "coreutils", + "which", + "rpm-build", + "gzip", + "gcc-c++", + ]), + }, +) class TestBuild: # Global variable used for tests if needed _global_var = None @@ -332,33 +396,36 @@ class TestBuild: except Exception: pass - @pytest.mark.parametrize('mmd_version', [1, 2]) - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - def test_submit_build(self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, - mmd_version): + @pytest.mark.parametrize("mmd_version", [1, 2]) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + def test_submit_build(self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, mmd_version): """ Tests the build of testmodule.yaml using FakeModuleBuilder which succeeds everytime. """ if mmd_version == 1: - yaml_file = 'testmodule.yaml' + yaml_file = "testmodule.yaml" else: - yaml_file = 'testmodule_v2.yaml' - FakeSCM(mocked_scm, 'testmodule', yaml_file, - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + yaml_file = "testmodule_v2.yaml" + FakeSCM(mocked_scm, "testmodule", yaml_file, "620ec77321b2ea7b0d67d82992dda3e1d67055b4") - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + }), + ) data = json.loads(rv.data) - module_build_id = data['id'] + module_build_id = data["id"] # Check that components are tagged after the batch is built. tag_groups = [] - tag_groups.append(set(['perl-Tangerine-1-1', 'perl-List-Compare-1-1'])) - tag_groups.append(set(['tangerine-1-1'])) + tag_groups.append(set(["perl-Tangerine-1-1", "perl-List-Compare-1-1"])) + tag_groups.append(set(["tangerine-1-1"])) def on_finalize_cb(cls, succeeded): assert succeeded is True @@ -372,9 +439,9 @@ class TestBuild: # Check that the components are added to buildroot after the batch # is built. buildroot_groups = [] - buildroot_groups.append(set(['module-build-macros-1-1'])) - buildroot_groups.append(set(['perl-Tangerine-1-1', 'perl-List-Compare-1-1'])) - buildroot_groups.append(set(['tangerine-1-1'])) + buildroot_groups.append(set(["module-build-macros-1-1"])) + buildroot_groups.append(set(["perl-Tangerine-1-1", "perl-List-Compare-1-1"])) + buildroot_groups.append(set(["tangerine-1-1"])) def on_buildroot_add_artifacts_cb(cls, artifacts, install): assert buildroot_groups.pop(0) == set(artifacts) @@ -388,35 +455,46 @@ class TestBuild: # All components should be built and module itself should be in "done" # or "ready" state. for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all(): - assert build.state == koji.BUILD_STATES['COMPLETE'] - assert build.module_build.state in [models.BUILD_STATES["done"], - models.BUILD_STATES["ready"]] + assert build.state == koji.BUILD_STATES["COMPLETE"] + assert build.module_build.state in [ + models.BUILD_STATES["done"], + models.BUILD_STATES["ready"], + ] # All components has to be tagged, so tag_groups and buildroot_groups are empty... assert tag_groups == [] assert buildroot_groups == [] module_build = models.ModuleBuild.query.get(module_build_id) - assert module_build.module_builds_trace[0].state == models.BUILD_STATES['init'] - assert module_build.module_builds_trace[1].state == models.BUILD_STATES['wait'] - assert module_build.module_builds_trace[2].state == models.BUILD_STATES['build'] - assert module_build.module_builds_trace[3].state == models.BUILD_STATES['done'] - assert module_build.module_builds_trace[4].state == models.BUILD_STATES['ready'] + assert module_build.module_builds_trace[0].state == models.BUILD_STATES["init"] + assert module_build.module_builds_trace[1].state == models.BUILD_STATES["wait"] + assert module_build.module_builds_trace[2].state == models.BUILD_STATES["build"] + assert module_build.module_builds_trace[3].state == models.BUILD_STATES["done"] + assert module_build.module_builds_trace[4].state == models.BUILD_STATES["ready"] assert len(module_build.module_builds_trace) == 5 - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_no_components(self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc): """ Tests the build of a module with no components """ - FakeSCM(mocked_scm, 'python3', 'python3-no-components.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) + FakeSCM( + mocked_scm, + "python3", + "python3-no-components.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + }), + ) data = json.loads(rv.data) - module_build_id = data['id'] + module_build_id = data["id"] msgs = [] stop = module_build_service.scheduler.make_simple_stop_condition(db.session) module_build_service.scheduler.main(msgs, stop) @@ -425,90 +503,124 @@ class TestBuild: # Make sure no component builds were registered assert len(module_build.component_builds) == 0 # Make sure the build is done - assert module_build.state == models.BUILD_STATES['ready'] + assert module_build.state == models.BUILD_STATES["ready"] - @patch('module_build_service.config.Config.check_for_eol', - new_callable=PropertyMock, return_value=True) - @patch('module_build_service.utils.submit._is_eol_in_pdc', return_value=True) - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - def test_submit_build_eol_module(self, mocked_scm, mocked_get_user, is_eol, check, - conf_system, dbg, hmsc): + @patch( + "module_build_service.config.Config.check_for_eol", + new_callable=PropertyMock, + return_value=True, + ) + @patch("module_build_service.utils.submit._is_eol_in_pdc", return_value=True) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + def test_submit_build_eol_module( + self, mocked_scm, mocked_get_user, is_eol, check, conf_system, dbg, hmsc + ): """ Tests the build of a module with an eol stream. """ - FakeSCM(mocked_scm, 'python3', 'python3-no-components.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) + FakeSCM( + mocked_scm, + "python3", + "python3-no-components.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + }), + ) assert rv.status_code == 400 data = json.loads(rv.data) - assert data['status'] == 400 - assert data['message'] == u'Module python3:master is marked as EOL in PDC.' + assert data["status"] == 400 + assert data["message"] == u"Module python3:master is marked as EOL in PDC." - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_from_yaml_not_allowed( - self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc): + self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): FakeSCM(mocked_scm, "testmodule", "testmodule.yaml") - testmodule = os.path.join(base_dir, 'staged_data', 'testmodule.yaml') + testmodule = os.path.join(base_dir, "staged_data", "testmodule.yaml") with open(testmodule) as f: yaml = to_text_type(f.read()) - with patch.object(module_build_service.config.Config, 'yaml_submit_allowed', - new_callable=PropertyMock, return_value=False): - rv = self.client.post('/module-build-service/1/module-builds/', - content_type='multipart/form-data', - data={'yaml': (testmodule, yaml)}) + with patch.object( + module_build_service.config.Config, + "yaml_submit_allowed", + new_callable=PropertyMock, + return_value=False, + ): + rv = self.client.post( + "/module-build-service/1/module-builds/", + content_type="multipart/form-data", + data={"yaml": (testmodule, yaml)}, + ) data = json.loads(rv.data) - assert data['status'] == 403 - assert data['message'] == 'YAML submission is not enabled' + assert data["status"] == 403 + assert data["message"] == "YAML submission is not enabled" - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_from_yaml_allowed( - self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc): - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') - testmodule = os.path.join(base_dir, 'staged_data', 'testmodule.yaml') + self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") + testmodule = os.path.join(base_dir, "staged_data", "testmodule.yaml") - with patch.object(module_build_service.config.Config, 'yaml_submit_allowed', - new_callable=PropertyMock, return_value=True): - with open(testmodule, 'rb') as f: + with patch.object( + module_build_service.config.Config, + "yaml_submit_allowed", + new_callable=PropertyMock, + return_value=True, + ): + with open(testmodule, "rb") as f: yaml_file = FileStorage(f) - rv = self.client.post('/module-build-service/1/module-builds/', - content_type='multipart/form-data', - data={'yaml': yaml_file}) + rv = self.client.post( + "/module-build-service/1/module-builds/", + content_type="multipart/form-data", + data={"yaml": yaml_file}, + ) data = json.loads(rv.data) - assert data['id'] == 2 + assert data["id"] == 2 msgs = [] stop = module_build_service.scheduler.make_simple_stop_condition(db.session) module_build_service.scheduler.main(msgs, stop) - assert models.ModuleBuild.query.first().state == models.BUILD_STATES['ready'] + assert models.ModuleBuild.query.first().state == models.BUILD_STATES["ready"] - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_cancel(self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc): """ Submit all builds for a module and cancel the module build later. """ - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + }), + ) data = json.loads(rv.data) - module_build_id = data['id'] + module_build_id = data["id"] # This callback is called before return of FakeModuleBuilder.build() # method. We just cancel the build here using the web API to simulate # user cancelling the build in the middle of building. def on_build_cb(cls, artifact_name, source): - self.client.patch('/module-build-service/1/module-builds/' + str(module_build_id), - data=json.dumps({'state': 'failed'})) + self.client.patch( + "/module-build-service/1/module-builds/" + str(module_build_id), + data=json.dumps({"state": "failed"}), + ) cancelled_tasks = [] @@ -533,7 +645,7 @@ class TestBuild: # module build, all components and even the module itself should be in # failed state with state_reason se to cancellation message. for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all(): - assert build.state == koji.BUILD_STATES['FAILED'] + assert build.state == koji.BUILD_STATES["FAILED"] assert build.state_reason == "Canceled by Homer J. Simpson." assert build.module_build.state == models.BUILD_STATES["failed"] assert build.module_build.state_reason == "Canceled by Homer J. Simpson." @@ -542,23 +654,29 @@ class TestBuild: if build.task_id: assert build.task_id in cancelled_tasks - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_instant_complete( - self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc): + self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): """ Tests the build of testmodule.yaml using FakeModuleBuilder which succeeds everytime. """ - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + }), + ) data = json.loads(rv.data) - module_build_id = data['id'] + module_build_id = data["id"] FakeModuleBuilder.INSTANT_COMPLETE = True msgs = [] @@ -568,30 +686,40 @@ class TestBuild: # All components should be built and module itself should be in "done" # or "ready" state. for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all(): - assert build.state == koji.BUILD_STATES['COMPLETE'] - assert build.module_build.state in [models.BUILD_STATES["done"], - models.BUILD_STATES["ready"]] + assert build.state == koji.BUILD_STATES["COMPLETE"] + assert build.module_build.state in [ + models.BUILD_STATES["done"], + models.BUILD_STATES["ready"], + ] - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - @patch("module_build_service.config.Config.num_concurrent_builds", - new_callable=PropertyMock, return_value=1) - def test_submit_build_concurrent_threshold(self, conf_num_concurrent_builds, - mocked_scm, mocked_get_user, - conf_system, dbg, hmsc): + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + @patch( + "module_build_service.config.Config.num_concurrent_builds", + new_callable=PropertyMock, + return_value=1, + ) + def test_submit_build_concurrent_threshold( + self, conf_num_concurrent_builds, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): """ Tests the build of testmodule.yaml using FakeModuleBuilder with num_concurrent_builds set to 1. """ - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + }), + ) data = json.loads(rv.data) - module_build_id = data['id'] + module_build_id = data["id"] def stop(message): """ @@ -599,9 +727,12 @@ class TestBuild: more components than the num_concurrent_builds. """ main_stop = module_build_service.scheduler.make_simple_stop_condition(db.session) - over_threshold = conf.num_concurrent_builds < \ + build_count = ( db.session.query(models.ComponentBuild).filter_by( - state=koji.BUILD_STATES['BUILDING']).count() + state=koji.BUILD_STATES["BUILDING"] + ).count() + ) + over_threshold = conf.num_concurrent_builds < build_count return main_stop(message) or over_threshold msgs = [] @@ -610,29 +741,43 @@ class TestBuild: # All components should be built and module itself should be in "done" # or "ready" state. for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all(): - assert build.state == koji.BUILD_STATES['COMPLETE'] + assert build.state == koji.BUILD_STATES["COMPLETE"] # When this fails, it can mean that num_concurrent_builds # threshold has been met. - assert build.module_build.state in [models.BUILD_STATES["done"], - models.BUILD_STATES["ready"]] + assert build.module_build.state in [ + models.BUILD_STATES["done"], + models.BUILD_STATES["ready"], + ] - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - @patch("module_build_service.config.Config.num_concurrent_builds", - new_callable=PropertyMock, return_value=2) - def test_try_to_reach_concurrent_threshold(self, conf_num_concurrent_builds, - mocked_scm, mocked_get_user, - conf_system, dbg, hmsc): + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + @patch( + "module_build_service.config.Config.num_concurrent_builds", + new_callable=PropertyMock, + return_value=2, + ) + def test_try_to_reach_concurrent_threshold( + self, conf_num_concurrent_builds, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): """ Tests that we try to submit new component build right after the previous one finished without waiting for all the num_concurrent_builds to finish. """ - FakeSCM(mocked_scm, 'testmodule-more-components', 'testmodule-more-components.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') - self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) + FakeSCM( + mocked_scm, + "testmodule-more-components", + "testmodule-more-components.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) + self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + }), + ) # Holds the number of concurrent component builds during # the module build. @@ -644,8 +789,11 @@ class TestBuild: more components than the num_concurrent_builds. """ main_stop = module_build_service.scheduler.make_simple_stop_condition(db.session) - num_building = db.session.query(models.ComponentBuild).filter_by( - state=koji.BUILD_STATES['BUILDING']).count() + num_building = ( + db.session.query(models.ComponentBuild) + .filter_by(state=koji.BUILD_STATES["BUILDING"]) + .count() + ) over_threshold = conf.num_concurrent_builds < num_building TestBuild._global_var.append(num_building) return main_stop(message) or over_threshold @@ -666,25 +814,35 @@ class TestBuild: num_builds = [k for k, g in itertools.groupby(TestBuild._global_var)] assert num_builds.count(1) == 2 - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - @patch("module_build_service.config.Config.num_concurrent_builds", - new_callable=PropertyMock, return_value=1) - def test_build_in_batch_fails(self, conf_num_concurrent_builds, mocked_scm, - mocked_get_user, conf_system, dbg, hmsc): + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + @patch( + "module_build_service.config.Config.num_concurrent_builds", + new_callable=PropertyMock, + return_value=1, + ) + def test_build_in_batch_fails( + self, conf_num_concurrent_builds, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): """ Tests that if the build in batch fails, other components in a batch are still build, but next batch is not started. """ - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4" + ) - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + }), + ) data = json.loads(rv.data) - module_build_id = data['id'] + module_build_id = data["id"] def on_build_cb(cls, artifact_name, source): # fail perl-Tangerine build @@ -699,6 +857,7 @@ class TestBuild: # in batch. def on_tag_artifacts_cb(cls, artifacts, dest_tag=True): raise ValueError("No component should be tagged.") + FakeModuleBuilder.on_tag_artifacts_cb = on_tag_artifacts_cb msgs = [] @@ -708,42 +867,51 @@ class TestBuild: for c in models.ComponentBuild.query.filter_by(module_id=module_build_id).all(): # perl-Tangerine is expected to fail as configured in on_build_cb. if c.package == "perl-Tangerine": - assert c.state == koji.BUILD_STATES['FAILED'] + assert c.state == koji.BUILD_STATES["FAILED"] # tangerine is expected to fail, because it is in batch 3, but # we had a failing component in batch 2. elif c.package == "tangerine": - assert c.state == koji.BUILD_STATES['FAILED'] + assert c.state == koji.BUILD_STATES["FAILED"] assert c.state_reason == "Component(s) perl-Tangerine failed to build." else: - assert c.state == koji.BUILD_STATES['COMPLETE'] + assert c.state == koji.BUILD_STATES["COMPLETE"] # Whole module should be failed. - assert c.module_build.state == models.BUILD_STATES['failed'] + assert c.module_build.state == models.BUILD_STATES["failed"] assert c.module_build.state_reason == "Component(s) perl-Tangerine failed to build." # We should end up with batch 2 and never start batch 3, because # there were failed components in batch 2. assert c.module_build.batch == 2 - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - @patch("module_build_service.config.Config.num_concurrent_builds", - new_callable=PropertyMock, return_value=1) - def test_all_builds_in_batch_fail(self, conf_num_concurrent_builds, mocked_scm, - mocked_get_user, conf_system, dbg, hmsc): + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + @patch( + "module_build_service.config.Config.num_concurrent_builds", + new_callable=PropertyMock, + return_value=1, + ) + def test_all_builds_in_batch_fail( + self, conf_num_concurrent_builds, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): """ Tests that if the build in batch fails, other components in a batch are still build, but next batch is not started. """ - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + }), + ) data = json.loads(rv.data) - module_build_id = data['id'] + module_build_id = data["id"] def on_build_cb(cls, artifact_name, source): # Next components *after* the module-build-macros will fail @@ -760,22 +928,24 @@ class TestBuild: for c in models.ComponentBuild.query.filter_by(module_id=module_build_id).all(): # perl-Tangerine is expected to fail as configured in on_build_cb. if c.package == "module-build-macros": - assert c.state == koji.BUILD_STATES['COMPLETE'] + assert c.state == koji.BUILD_STATES["COMPLETE"] else: - assert c.state == koji.BUILD_STATES['FAILED'] + assert c.state == koji.BUILD_STATES["FAILED"] # Whole module should be failed. - assert c.module_build.state == models.BUILD_STATES['failed'] - assert re.match(r'Component\(s\) (perl-Tangerine|perl-List-Compare), ' - '(perl-Tangerine|perl-List-Compare) failed to build.', - c.module_build.state_reason) + assert c.module_build.state == models.BUILD_STATES["failed"] + assert re.match( + r"Component\(s\) (perl-Tangerine|perl-List-Compare), " + "(perl-Tangerine|perl-List-Compare) failed to build.", + c.module_build.state_reason, + ) # We should end up with batch 2 and never start batch 3, because # there were failed components in batch 2. assert c.module_build.batch == 2 - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_reuse_all(self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc): """ Tests that we do not try building module-build-macros when reusing all @@ -785,49 +955,63 @@ class TestBuild: def on_build_cb(cls, artifact_name, source): raise ValueError("All components should be reused, not build.") + FakeModuleBuilder.on_build_cb = on_build_cb # Check that components are tagged after the batch is built. tag_groups = [] - tag_groups.append(set( - ['perl-Tangerine-0.23-1.module+0+d027b723', - 'perl-List-Compare-0.53-5.module+0+d027b723', - 'tangerine-0.22-3.module+0+d027b723'])) + tag_groups.append( + set([ + "perl-Tangerine-0.23-1.module+0+d027b723", + "perl-List-Compare-0.53-5.module+0+d027b723", + "tangerine-0.22-3.module+0+d027b723", + ]) + ) def on_tag_artifacts_cb(cls, artifacts, dest_tag=True): if dest_tag is True: assert tag_groups.pop(0) == set(artifacts) + FakeModuleBuilder.on_tag_artifacts_cb = on_tag_artifacts_cb buildtag_groups = [] - buildtag_groups.append(set( - ['perl-Tangerine-0.23-1.module+0+d027b723', - 'perl-List-Compare-0.53-5.module+0+d027b723', - 'tangerine-0.22-3.module+0+d027b723'])) + buildtag_groups.append(set([ + "perl-Tangerine-0.23-1.module+0+d027b723", + "perl-List-Compare-0.53-5.module+0+d027b723", + "tangerine-0.22-3.module+0+d027b723", + ])) def on_buildroot_add_artifacts_cb(cls, artifacts, install): assert buildtag_groups.pop(0) == set(artifacts) + FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb msgs = [MBSModule("local module build", 3, 1)] stop = module_build_service.scheduler.make_simple_stop_condition(db.session) module_build_service.scheduler.main(msgs, stop) - reused_component_ids = {"module-build-macros": None, "tangerine": 3, - "perl-Tangerine": 1, "perl-List-Compare": 2} + reused_component_ids = { + "module-build-macros": None, + "tangerine": 3, + "perl-Tangerine": 1, + "perl-List-Compare": 2, + } # All components should be built and module itself should be in "done" # or "ready" state. for build in models.ComponentBuild.query.filter_by(module_id=3).all(): - assert build.state == koji.BUILD_STATES['COMPLETE'] - assert build.module_build.state in [models.BUILD_STATES["done"], - models.BUILD_STATES["ready"]] + assert build.state == koji.BUILD_STATES["COMPLETE"] + assert build.module_build.state in [ + models.BUILD_STATES["done"], + models.BUILD_STATES["ready"], + ] assert build.reused_component_id == reused_component_ids[build.package] - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - def test_submit_build_reuse_all_without_build_macros(self, mocked_scm, mocked_get_user, - conf_system, dbg, hmsc): + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + def test_submit_build_reuse_all_without_build_macros( + self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): """ Tests that we can reuse components even when the reused module does not have module-build-macros component. @@ -841,28 +1025,35 @@ class TestBuild: def on_build_cb(cls, artifact_name, source): raise ValueError("All components should be reused, not build.") + FakeModuleBuilder.on_build_cb = on_build_cb # Check that components are tagged after the batch is built. tag_groups = [] - tag_groups.append(set( - ['perl-Tangerine-0.23-1.module+0+d027b723', - 'perl-List-Compare-0.53-5.module+0+d027b723', - 'tangerine-0.22-3.module+0+d027b723'])) + tag_groups.append( + set([ + "perl-Tangerine-0.23-1.module+0+d027b723", + "perl-List-Compare-0.53-5.module+0+d027b723", + "tangerine-0.22-3.module+0+d027b723", + ]) + ) def on_tag_artifacts_cb(cls, artifacts, dest_tag=True): if dest_tag is True: assert tag_groups.pop(0) == set(artifacts) + FakeModuleBuilder.on_tag_artifacts_cb = on_tag_artifacts_cb buildtag_groups = [] - buildtag_groups.append(set( - ['perl-Tangerine-0.23-1.module+0+d027b723', - 'perl-List-Compare-0.53-5.module+0+d027b723', - 'tangerine-0.22-3.module+0+d027b723'])) + buildtag_groups.append(set([ + "perl-Tangerine-0.23-1.module+0+d027b723", + "perl-List-Compare-0.53-5.module+0+d027b723", + "tangerine-0.22-3.module+0+d027b723", + ])) def on_buildroot_add_artifacts_cb(cls, artifacts, install): assert buildtag_groups.pop(0) == set(artifacts) + FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb msgs = [MBSModule("local module build", 3, 1)] @@ -872,13 +1063,15 @@ class TestBuild: # All components should be built and module itself should be in "done" # or "ready" state. for build in models.ComponentBuild.query.filter_by(module_id=3).all(): - assert build.state == koji.BUILD_STATES['COMPLETE'] - assert build.module_build.state in [models.BUILD_STATES["done"], - models.BUILD_STATES["ready"]] + assert build.state == koji.BUILD_STATES["COMPLETE"] + assert build.module_build.state in [ + models.BUILD_STATES["done"], + models.BUILD_STATES["ready"], + ] assert build.package != "module-build-macros" - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_resume(self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc): """ Tests that resuming the build works even when previous batches @@ -888,33 +1081,36 @@ class TestBuild: submitted_time = now - timedelta(minutes=3) # Create a module in the failed state build_one = models.ModuleBuild() - build_one.name = 'testmodule' - build_one.stream = 'master' - build_one.version = '2820180205135154' - build_one.build_context = 'return_runtime_context' - build_one.ref_build_context = 'return_runtime_context' - build_one.runtime_context = '9c690d0e' - build_one.context = '9c690d0e' - build_one.state = models.BUILD_STATES['failed'] + build_one.name = "testmodule" + build_one.stream = "master" + build_one.version = "2820180205135154" + build_one.build_context = "return_runtime_context" + build_one.ref_build_context = "return_runtime_context" + build_one.runtime_context = "9c690d0e" + build_one.context = "9c690d0e" + build_one.state = models.BUILD_STATES["failed"] current_dir = os.path.dirname(__file__) formatted_testmodule_yml_path = os.path.join( - current_dir, '..', 'staged_data', 'formatted_testmodule.yaml') - with open(formatted_testmodule_yml_path, 'r') as f: + current_dir, "..", "staged_data", "formatted_testmodule.yaml") + with open(formatted_testmodule_yml_path, "r") as f: build_one.modulemd = to_text_type(f.read()) - build_one.koji_tag = 'module-testmodule-master-20180205135154-9c690d0e' - build_one.scmurl = 'https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453' + build_one.koji_tag = "module-testmodule-master-20180205135154-9c690d0e" + build_one.scmurl = "https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453" build_one.batch = 2 - build_one.owner = 'Homer J. Simpson' + build_one.owner = "Homer J. Simpson" build_one.time_submitted = submitted_time build_one.time_modified = now - build_one.rebuild_strategy = 'changed-and-after' + build_one.rebuild_strategy = "changed-and-after" # It went from init, to wait, to build, and then failed mbt_one = models.ModuleBuildTrace( - state_time=submitted_time, state=models.BUILD_STATES['init']) + state_time=submitted_time, state=models.BUILD_STATES["init"] + ) mbt_two = models.ModuleBuildTrace( - state_time=now - timedelta(minutes=2), state=models.BUILD_STATES['wait']) + state_time=now - timedelta(minutes=2), state=models.BUILD_STATES["wait"] + ) mbt_three = models.ModuleBuildTrace( - state_time=now - timedelta(minutes=1), state=models.BUILD_STATES['build']) + state_time=now - timedelta(minutes=1), state=models.BUILD_STATES["build"] + ) mbt_four = models.ModuleBuildTrace(state_time=now, state=build_one.state) build_one.module_builds_trace.append(mbt_one) build_one.module_builds_trace.append(mbt_two) @@ -922,40 +1118,41 @@ class TestBuild: build_one.module_builds_trace.append(mbt_four) # Successful component component_one = models.ComponentBuild() - component_one.package = 'perl-Tangerine' - component_one.format = 'rpms' - component_one.scmurl = 'https://src.stg.fedoraproject.org/rpms/perl-Tangerine.git?#master' - component_one.state = koji.BUILD_STATES['COMPLETE'] - component_one.nvr = 'perl-Tangerine-0:0.22-2.module+0+d027b723' + component_one.package = "perl-Tangerine" + component_one.format = "rpms" + component_one.scmurl = "https://src.stg.fedoraproject.org/rpms/perl-Tangerine.git?#master" + component_one.state = koji.BUILD_STATES["COMPLETE"] + component_one.nvr = "perl-Tangerine-0:0.22-2.module+0+d027b723" component_one.batch = 2 component_one.module_id = 2 - component_one.ref = '7e96446223f1ad84a26c7cf23d6591cd9f6326c6' + component_one.ref = "7e96446223f1ad84a26c7cf23d6591cd9f6326c6" component_one.tagged = True component_one.tagged_in_final = True # Failed component component_two = models.ComponentBuild() - component_two.package = 'perl-List-Compare' - component_two.format = 'rpms' + component_two.package = "perl-List-Compare" + component_two.format = "rpms" component_two.scmurl = \ - 'https://src.stg.fedoraproject.org/rpms/perl-List-Compare.git?#master' - component_two.state = koji.BUILD_STATES['FAILED'] + "https://src.stg.fedoraproject.org/rpms/perl-List-Compare.git?#master" + component_two.state = koji.BUILD_STATES["FAILED"] component_two.batch = 2 component_two.module_id = 2 # Component that isn't started yet component_three = models.ComponentBuild() - component_three.package = 'tangerine' - component_three.format = 'rpms' - component_three.scmurl = 'https://src.stg.fedoraproject.org/rpms/tangerine.git?#master' + component_three.package = "tangerine" + component_three.format = "rpms" + component_three.scmurl = "https://src.stg.fedoraproject.org/rpms/tangerine.git?#master" component_three.batch = 3 component_three.module_id = 2 # module-build-macros component_four = models.ComponentBuild() - component_four.package = 'module-build-macros' - component_four.format = 'rpms' - component_four.state = koji.BUILD_STATES['COMPLETE'] + component_four.package = "module-build-macros" + component_four.format = "rpms" + component_four.state = koji.BUILD_STATES["COMPLETE"] component_four.scmurl = ( - '/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-macros-0.1-1.' - 'module_testmodule_master_20170109091357.src.rpm') + "/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-macros-0.1-1." + "module_testmodule_master_20170109091357.src.rpm" + ) component_four.batch = 1 component_four.module_id = 2 component_four.tagged = True @@ -969,21 +1166,29 @@ class TestBuild: db.session.commit() db.session.expire_all() - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") # Resubmit the failed module - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + }), + ) data = json.loads(rv.data) - module_build_id = data['id'] + module_build_id = data["id"] module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one() - components = models.ComponentBuild.query.filter_by( - module_id=module_build_id, batch=2).order_by(models.ComponentBuild.id).all() + components = ( + models.ComponentBuild.query.filter_by(module_id=module_build_id, batch=2) + .order_by(models.ComponentBuild.id) + .all() + ) # Make sure the build went from failed to wait - assert module_build.state == models.BUILD_STATES['wait'] - assert module_build.state_reason == 'Resubmitted by Homer J. Simpson' + assert module_build.state == models.BUILD_STATES["wait"] + assert module_build.state_reason == "Resubmitted by Homer J. Simpson" # Make sure the state was reset on the failed component assert components[1].state is None db.session.expire_all() @@ -996,14 +1201,17 @@ class TestBuild: # All components should be built and module itself should be in "done" # or "ready" state. for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all(): - assert build.state == koji.BUILD_STATES['COMPLETE'] - assert build.module_build.state in [models.BUILD_STATES['done'], - models.BUILD_STATES['ready']] + assert build.state == koji.BUILD_STATES["COMPLETE"] + assert build.module_build.state in [ + models.BUILD_STATES["done"], + models.BUILD_STATES["ready"], + ] - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_resume_recover_orphaned_macros( - self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc): + self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): """ Tests that resuming the build works when module-build-macros is orphaned but marked as failed in the database @@ -1013,35 +1221,35 @@ class TestBuild: submitted_time = now - timedelta(minutes=3) # Create a module in the failed state build_one = models.ModuleBuild() - build_one.name = 'testmodule' - build_one.stream = 'master' - build_one.version = '2820180205135154' - build_one.build_context = 'return_runtime_context' - build_one.ref_build_context = 'return_runtime_context' - build_one.runtime_context = '9c690d0e' - build_one.state = models.BUILD_STATES['failed'] + build_one.name = "testmodule" + build_one.stream = "master" + build_one.version = "2820180205135154" + build_one.build_context = "return_runtime_context" + build_one.ref_build_context = "return_runtime_context" + build_one.runtime_context = "9c690d0e" + build_one.state = models.BUILD_STATES["failed"] # this is not calculated by real but just a value to # match the calculated context from expanded test mmd - build_one.context = '9c690d0e' + build_one.context = "9c690d0e" current_dir = os.path.dirname(__file__) formatted_testmodule_yml_path = os.path.join( - current_dir, '..', 'staged_data', 'formatted_testmodule.yaml') - with open(formatted_testmodule_yml_path, 'r') as f: + current_dir, "..", "staged_data", "formatted_testmodule.yaml") + with open(formatted_testmodule_yml_path, "r") as f: build_one.modulemd = to_text_type(f.read()) - build_one.koji_tag = 'module-testmodule-master-20180205135154-6ef9a711' - build_one.scmurl = 'https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453' + build_one.koji_tag = "module-testmodule-master-20180205135154-6ef9a711" + build_one.scmurl = "https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453" build_one.batch = 2 - build_one.owner = 'Homer J. Simpson' + build_one.owner = "Homer J. Simpson" build_one.time_submitted = submitted_time build_one.time_modified = now - build_one.rebuild_strategy = 'changed-and-after' + build_one.rebuild_strategy = "changed-and-after" # It went from init, to wait, to build, and then failed mbt_one = models.ModuleBuildTrace( - state_time=submitted_time, state=models.BUILD_STATES['init']) + state_time=submitted_time, state=models.BUILD_STATES["init"]) mbt_two = models.ModuleBuildTrace( - state_time=now - timedelta(minutes=2), state=models.BUILD_STATES['wait']) + state_time=now - timedelta(minutes=2), state=models.BUILD_STATES["wait"]) mbt_three = models.ModuleBuildTrace( - state_time=now - timedelta(minutes=1), state=models.BUILD_STATES['build']) + state_time=now - timedelta(minutes=1), state=models.BUILD_STATES["build"]) mbt_four = models.ModuleBuildTrace(state_time=now, state=build_one.state) build_one.module_builds_trace.append(mbt_one) build_one.module_builds_trace.append(mbt_two) @@ -1049,32 +1257,33 @@ class TestBuild: build_one.module_builds_trace.append(mbt_four) # Components that haven't started yet component_one = models.ComponentBuild() - component_one.package = 'perl-Tangerine' - component_one.format = 'rpms' - component_one.scmurl = 'https://src.stg.fedoraproject.org/rpms/perl-Tangerine.git?#master' + component_one.package = "perl-Tangerine" + component_one.format = "rpms" + component_one.scmurl = "https://src.stg.fedoraproject.org/rpms/perl-Tangerine.git?#master" component_one.batch = 2 component_one.module_id = 2 component_two = models.ComponentBuild() - component_two.package = 'perl-List-Compare' - component_two.format = 'rpms' + component_two.package = "perl-List-Compare" + component_two.format = "rpms" component_two.scmurl = \ - 'https://src.stg.fedoraproject.org/rpms/perl-List-Compare.git?#master' + "https://src.stg.fedoraproject.org/rpms/perl-List-Compare.git?#master" component_two.batch = 2 component_two.module_id = 2 component_three = models.ComponentBuild() - component_three.package = 'tangerine' - component_three.format = 'rpms' - component_three.scmurl = 'https://src.stg.fedoraproject.org/rpms/tangerine.git?#master' + component_three.package = "tangerine" + component_three.format = "rpms" + component_three.scmurl = "https://src.stg.fedoraproject.org/rpms/tangerine.git?#master" component_three.batch = 3 component_three.module_id = 2 # Failed module-build-macros component_four = models.ComponentBuild() - component_four.package = 'module-build-macros' - component_four.format = 'rpms' - component_four.state = koji.BUILD_STATES['FAILED'] + component_four.package = "module-build-macros" + component_four.format = "rpms" + component_four.state = koji.BUILD_STATES["FAILED"] component_four.scmurl = ( - '/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-macros-0.1-1.' - 'module_testmodule_master_20180205135154.src.rpm') + "/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-macros-0.1-1." + "module_testmodule_master_20180205135154.src.rpm" + ) component_four.batch = 1 component_four.module_id = 2 component_four.build_time_only = True @@ -1087,18 +1296,23 @@ class TestBuild: db.session.commit() db.session.expire_all() - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', '7fea453') + FakeSCM(mocked_scm, "testmodule", "testmodule.yaml", "7fea453") # Resubmit the failed module - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#7fea453'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#7fea453", + }), + ) data = json.loads(rv.data) - module_build_id = data['id'] + module_build_id = data["id"] module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one() # Make sure the build went from failed to wait - assert module_build.state == models.BUILD_STATES['wait'] - assert module_build.state_reason == 'Resubmitted by Homer J. Simpson' + assert module_build.state == models.BUILD_STATES["wait"] + assert module_build.state_reason == "Resubmitted by Homer J. Simpson" # Make sure the state was reset on the failed component for c in module_build.component_builds: assert c.state is None @@ -1112,51 +1326,67 @@ class TestBuild: # All components should be built and module itself should be in "done" # or "ready" state. for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all(): - assert build.state == koji.BUILD_STATES['COMPLETE'] - assert build.module_build.state in [models.BUILD_STATES['done'], - models.BUILD_STATES['ready']] + assert build.state == koji.BUILD_STATES["COMPLETE"] + assert build.module_build.state in [ + models.BUILD_STATES["done"], + models.BUILD_STATES["ready"], + ] - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_resume_failed_init( - self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc): + self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): """ Tests that resuming the build works when the build failed during the init step """ - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") stop = module_build_service.scheduler.make_simple_stop_condition(db.session) - with patch('module_build_service.utils.submit.format_mmd') as mock_format_mmd: - mock_format_mmd.side_effect = Forbidden( - 'Custom component repositories aren\'t allowed.') - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) + with patch("module_build_service.utils.submit.format_mmd") as mock_format_mmd: + mock_format_mmd.side_effect = Forbidden("Custom component repositories aren't allowed.") + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + }), + ) # Run the backend so that it fails in the "init" handler module_build_service.scheduler.main([], stop) cleanup_moksha() - module_build_id = json.loads(rv.data)['id'] + module_build_id = json.loads(rv.data)["id"] module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one() - assert module_build.state == models.BUILD_STATES['failed'] - assert module_build.state_reason == 'Custom component repositories aren\'t allowed.' + assert module_build.state == models.BUILD_STATES["failed"] + assert module_build.state_reason == "Custom component repositories aren't allowed." assert len(module_build.module_builds_trace) == 2 - assert module_build.module_builds_trace[0].state == models.BUILD_STATES['init'] - assert module_build.module_builds_trace[1].state == models.BUILD_STATES['failed'] + assert module_build.module_builds_trace[0].state == models.BUILD_STATES["init"] + assert module_build.module_builds_trace[1].state == models.BUILD_STATES["failed"] # Resubmit the failed module - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', - 'scmurl': ('https://src.stg.fedoraproject.org/modules/testmodule.git?' - '#620ec77321b2ea7b0d67d82992dda3e1d67055b4')})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": ( + "https://src.stg.fedoraproject.org/modules/testmodule.git?" + "#620ec77321b2ea7b0d67d82992dda3e1d67055b4" + ), + }), + ) module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one() - components = models.ComponentBuild.query.filter_by( - module_id=module_build_id, batch=2).order_by(models.ComponentBuild.id).all() + components = ( + models.ComponentBuild.query.filter_by(module_id=module_build_id, batch=2) + .order_by(models.ComponentBuild.id) + .all() + ) # Make sure the build went from failed to init - assert module_build.state == models.BUILD_STATES['init'] - assert module_build.state_reason == 'Resubmitted by Homer J. Simpson' + assert module_build.state == models.BUILD_STATES["init"] + assert module_build.state_reason == "Resubmitted by Homer J. Simpson" # Make sure there are no components assert components == [] db.session.expire_all() @@ -1167,147 +1397,175 @@ class TestBuild: # All components should be built and module itself should be in "done" # or "ready" state. for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all(): - assert build.state == koji.BUILD_STATES['COMPLETE'] - assert build.module_build.state in [models.BUILD_STATES['done'], - models.BUILD_STATES['ready']] + assert build.state == koji.BUILD_STATES["COMPLETE"] + assert build.module_build.state in [ + models.BUILD_STATES["done"], + models.BUILD_STATES["ready"], + ] - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_resume_init_fail( - self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc): + self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): """ Tests that resuming the build fails when the build is in init state """ - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") # Post so a module is in the init phase - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + }), + ) assert rv.status_code == 201 # Run the backend stop = module_build_service.scheduler.make_simple_stop_condition(db.session) module_build_service.scheduler.main([], stop) # Post again and make sure it fails - rv2 = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) + rv2 = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + }), + ) data = json.loads(rv2.data) expected = { - 'error': 'Conflict', - 'message': ('Module (state=5) already exists. Only a new build, resubmission of a ' - 'failed build or build against new buildrequirements is allowed.'), - 'status': 409 + "error": "Conflict", + "message": ( + "Module (state=5) already exists. Only a new build, resubmission of a " + "failed build or build against new buildrequirements is allowed." + ), + "status": 409, } assert data == expected - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - @patch('module_build_service.config.Config.modules_allow_scratch', - new_callable=PropertyMock, return_value=True) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + @patch( + "module_build_service.config.Config.modules_allow_scratch", + new_callable=PropertyMock, + return_value=True, + ) def test_submit_scratch_vs_normal( - self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc): + self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): """ Tests that submitting a scratch build with the same NSV as a previously completed normal build succeeds and both have expected contexts """ - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") # Post so a module is in the init phase - post_url = '/module-build-service/1/module-builds/' + post_url = "/module-build-service/1/module-builds/" post_data = { - 'branch': 'master', - 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4', - 'scratch': False, + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + "scratch": False, } rv = self.client.post(post_url, data=json.dumps(post_data)) assert rv.status_code == 201 data = json.loads(rv.data) - module_build_id = data['id'] + module_build_id = data["id"] module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one() # make sure normal build has expected context without a suffix - assert module_build.context == '9c690d0e' + assert module_build.context == "9c690d0e" # Run the backend stop = module_build_service.scheduler.make_simple_stop_condition(db.session) module_build_service.scheduler.main([], stop) # Post again as a scratch build and make sure it succeeds - post_data['scratch'] = True + post_data["scratch"] = True rv2 = self.client.post(post_url, data=json.dumps(post_data)) assert rv2.status_code == 201 data = json.loads(rv2.data) - module_build_id = data['id'] + module_build_id = data["id"] module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one() # make sure scratch build has expected context with unique suffix - assert module_build.context == '9c690d0e_1' + assert module_build.context == "9c690d0e_1" - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - @patch('module_build_service.config.Config.modules_allow_scratch', - new_callable=PropertyMock, return_value=True) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + @patch( + "module_build_service.config.Config.modules_allow_scratch", + new_callable=PropertyMock, + return_value=True, + ) def test_submit_normal_vs_scratch( - self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc): + self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): """ Tests that submitting a normal build with the same NSV as a previously completed scratch build succeeds and both have expected contexts """ - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4" + ) # Post so a scratch module build is in the init phase - post_url = '/module-build-service/1/module-builds/' + post_url = "/module-build-service/1/module-builds/" post_data = { - 'branch': 'master', - 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4', - 'scratch': True, + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + "scratch": True, } rv = self.client.post(post_url, data=json.dumps(post_data)) assert rv.status_code == 201 data = json.loads(rv.data) - module_build_id = data['id'] + module_build_id = data["id"] module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one() # make sure scratch build has expected context with unique suffix - assert module_build.context == '9c690d0e_1' + assert module_build.context == "9c690d0e_1" # Run the backend stop = module_build_service.scheduler.make_simple_stop_condition(db.session) module_build_service.scheduler.main([], stop) # Post again as a non-scratch build and make sure it succeeds - post_data['scratch'] = False + post_data["scratch"] = False rv2 = self.client.post(post_url, data=json.dumps(post_data)) assert rv2.status_code == 201 data = json.loads(rv2.data) - module_build_id = data['id'] + module_build_id = data["id"] module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one() # make sure normal build has expected context without suffix - assert module_build.context == '9c690d0e' + assert module_build.context == "9c690d0e" - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - @patch('module_build_service.config.Config.modules_allow_scratch', - new_callable=PropertyMock, return_value=True) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + @patch( + "module_build_service.config.Config.modules_allow_scratch", + new_callable=PropertyMock, + return_value=True, + ) def test_submit_scratch_vs_scratch( - self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc): + self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): """ Tests that submitting a scratch build with the same NSV as a previously completed scratch build succeeds and both have expected contexts """ - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") # Post so a scratch module build is in the init phase - post_url = '/module-build-service/1/module-builds/' + post_url = "/module-build-service/1/module-builds/" post_data = { - 'branch': 'master', - 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4', - 'scratch': True, + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + "scratch": True, } rv = self.client.post(post_url, data=json.dumps(post_data)) assert rv.status_code == 201 data = json.loads(rv.data) - module_build_id = data['id'] + module_build_id = data["id"] module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one() # make sure first scratch build has expected context with unique suffix - assert module_build.context == '9c690d0e_1' + assert module_build.context == "9c690d0e_1" # Run the backend stop = module_build_service.scheduler.make_simple_stop_condition(db.session) module_build_service.scheduler.main([], stop) @@ -1315,111 +1573,129 @@ class TestBuild: rv2 = self.client.post(post_url, data=json.dumps(post_data)) assert rv2.status_code == 201 data = json.loads(rv2.data) - module_build_id = data['id'] + module_build_id = data["id"] module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one() # make sure second scratch build has expected context with unique suffix - assert module_build.context == '9c690d0e_2' + assert module_build.context == "9c690d0e_2" - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - def test_submit_build_repo_regen_not_started_batch(self, mocked_scm, mocked_get_user, - conf_system, dbg, hmsc): + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + def test_submit_build_repo_regen_not_started_batch( + self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): """ Tests that if MBS starts a new batch, the concurrent component threshold is met before a build can start, and an unexpected repo regen occurs, the build will not fail. See: https://pagure.io/fm-orchestrator/issue/864 """ - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + }), + ) data = json.loads(rv.data) - module_build_id = data['id'] + module_build_id = data["id"] def _stop_condition(message): # Stop the backend if the module batch is 2 (where we simulate the concurrent threshold # being met). For safety, also stop the backend if the module erroneously completes. module = db.session.query(models.ModuleBuild).get(module_build_id) - return module.batch == 2 or module.state >= models.BUILD_STATES['done'] + return module.batch == 2 or module.state >= models.BUILD_STATES["done"] - with patch('module_build_service.utils.batches.at_concurrent_component_threshold') as \ - mock_acct: + with patch( + "module_build_service.utils.batches.at_concurrent_component_threshold" + ) as mock_acct: # Once we get to batch 2, then simulate the concurrent threshold being met def _at_concurrent_component_threshold(config, session): return db.session.query(models.ModuleBuild).get(module_build_id).batch == 2 + mock_acct.side_effect = _at_concurrent_component_threshold module_build_service.scheduler.main([], _stop_condition) # Only module-build-macros should be built - for build in db.session.query(models.ComponentBuild).filter_by( - module_id=module_build_id).all(): - if build.package == 'module-build-macros': - assert build.state == koji.BUILD_STATES['COMPLETE'] + for build in ( + db.session.query(models.ComponentBuild).filter_by(module_id=module_build_id).all() + ): + if build.package == "module-build-macros": + assert build.state == koji.BUILD_STATES["COMPLETE"] else: assert build.state is None - assert build.module_build.state == models.BUILD_STATES['build'] + assert build.module_build.state == models.BUILD_STATES["build"] # Simulate a random repo regen message that MBS didn't expect cleanup_moksha() module = db.session.query(models.ModuleBuild).get(module_build_id) - msgs = [module_build_service.messaging.KojiRepoChange( - msg_id='a faked internal message', repo_tag=module.koji_tag + '-build')] + msgs = [ + module_build_service.messaging.KojiRepoChange( + msg_id="a faked internal message", repo_tag=module.koji_tag + "-build" + ) + ] db.session.expire_all() # Stop after processing the seeded message module_build_service.scheduler.main(msgs, lambda message: True) # Make sure the module build didn't fail so that the poller can resume it later module = db.session.query(models.ModuleBuild).get(module_build_id) - assert module.state == models.BUILD_STATES['build'] + assert module.state == models.BUILD_STATES["build"] - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_br_metadata_only_module( - self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc): + self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc + ): """ Test that when a build is submitted with a buildrequire without a Koji tag, MBS doesn't supply it as a dependency to the builder. """ metadata_mmd = module_build_service.utils.load_mmd_file( - path.join(base_dir, 'staged_data', 'build_metadata_module.yaml')) + path.join(base_dir, "staged_data", "build_metadata_module.yaml") + ) module_build_service.utils.import_mmd(db.session, metadata_mmd) - FakeSCM(mocked_scm, 'testmodule', 'testmodule_br_metadata_module.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') - post_url = '/module-build-service/1/module-builds/' + FakeSCM( + mocked_scm, + "testmodule", + "testmodule_br_metadata_module.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) + post_url = "/module-build-service/1/module-builds/" post_data = { - 'branch': 'master', - 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4', + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", } rv = self.client.post(post_url, data=json.dumps(post_data)) assert rv.status_code == 201 data = json.loads(rv.data) - module_build_id = data['id'] + module_build_id = data["id"] def on_buildroot_add_repos_cb(cls, dependencies): # Make sure that the metadata module is not present since it doesn't have a Koji tag - assert set(dependencies.keys()) == set(['module-f28-build']) + assert set(dependencies.keys()) == set(["module-f28-build"]) FakeModuleBuilder.on_buildroot_add_repos_cb = on_buildroot_add_repos_cb stop = module_build_service.scheduler.make_simple_stop_condition(db.session) module_build_service.scheduler.main([], stop) module = db.session.query(models.ModuleBuild).get(module_build_id) - assert module.state == models.BUILD_STATES['ready'] + assert module.state == models.BUILD_STATES["ready"] -@patch("module_build_service.config.Config.system", - new_callable=PropertyMock, return_value="testlocal") +@patch( + "module_build_service.config.Config.system", new_callable=PropertyMock, return_value="testlocal" +) class TestLocalBuild: - def setup_method(self, test_method): FakeModuleBuilder.on_build_cb = None - FakeModuleBuilder.backend = 'testlocal' + FakeModuleBuilder.backend = "testlocal" GenericBuilder.register_backend_class(FakeModuleBuilder) self.client = app.test_client() clean_database() @@ -1433,48 +1709,54 @@ class TestLocalBuild: except Exception: pass - @patch('module_build_service.scheduler.handlers.modules.handle_stream_collision_modules') - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - @patch("module_build_service.config.Config.mock_resultsdir", - new_callable=PropertyMock, - return_value=path.join(base_dir, 'staged_data', "local_builds")) + @patch("module_build_service.scheduler.handlers.modules.handle_stream_collision_modules") + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + @patch( + "module_build_service.config.Config.mock_resultsdir", + new_callable=PropertyMock, + return_value=path.join(base_dir, "staged_data", "local_builds"), + ) def test_submit_build_local_dependency( - self, resultsdir, mocked_scm, mocked_get_user, conf_system, hmsc): + self, resultsdir, mocked_scm, mocked_get_user, conf_system, hmsc + ): """ Tests local module build dependency. """ with app.app_context(): module_build_service.utils.load_local_builds(["platform"]) - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, + "testmodule", + "testmodule.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) rv = self.client.post( - '/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', - 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4", + }), + ) data = json.loads(rv.data) - module_build_id = data['id'] + module_build_id = data["id"] # Local base-runtime has changed profiles, so we can detect we use # the local one and not the main one. - FakeModuleBuilder.DEFAULT_GROUPS = { - 'srpm-build': - set(['bar']), - 'build': - set(['foo'])} + FakeModuleBuilder.DEFAULT_GROUPS = {"srpm-build": set(["bar"]), "build": set(["foo"])} msgs = [] - stop = module_build_service.scheduler.make_simple_stop_condition( - db.session) + stop = module_build_service.scheduler.make_simple_stop_condition(db.session) module_build_service.scheduler.main(msgs, stop) # All components should be built and module itself should be in "done" # or "ready" state. - for build in models.ComponentBuild.query.filter_by( - module_id=module_build_id).all(): - assert build.state == koji.BUILD_STATES['COMPLETE'] - assert build.module_build.state in [models.BUILD_STATES["done"], - models.BUILD_STATES["ready"]] + for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all(): + assert build.state == koji.BUILD_STATES["COMPLETE"] + assert build.module_build.state in [ + models.BUILD_STATES["done"], + models.BUILD_STATES["ready"], + ] diff --git a/tests/test_builder/test_base.py b/tests/test_builder/test_base.py index e4e51531..3078170e 100644 --- a/tests/test_builder/test_base.py +++ b/tests/test_builder/test_base.py @@ -33,29 +33,22 @@ from mock import patch class TestGenericBuilder: - def setup_method(self, test_method): init_data(1) self.module = module_build_service.models.ModuleBuild.query.filter_by(id=1).one() - @patch('module_build_service.resolver.DBResolver') - @patch('module_build_service.resolver.GenericResolver') + @patch("module_build_service.resolver.DBResolver") + @patch("module_build_service.resolver.GenericResolver") def test_default_buildroot_groups_cache(self, generic_resolver, resolver): - mbs_groups = { - "buildroot": [], - "srpm-buildroot": [] - } + mbs_groups = {"buildroot": [], "srpm-buildroot": []} resolver = mock.MagicMock() - resolver.backend = 'mbs' + resolver.backend = "mbs" resolver.resolve_profiles.return_value = mbs_groups - expected_groups = { - "build": [], - "srpm-build": [] - } + expected_groups = {"build": [], "srpm-build": []} - with patch.object(module_build_service.resolver, 'system_resolver', new=resolver): + with patch.object(module_build_service.resolver, "system_resolver", new=resolver): # Call default_buildroot_groups, the result should be cached. ret = GenericBuilder.default_buildroot_groups(db.session, self.module) assert ret == expected_groups @@ -64,14 +57,14 @@ class TestGenericBuilder: # Now try calling it again to verify resolve_profiles is not called, # because it is cached. - with patch.object(module_build_service.resolver, 'system_resolver', new=resolver): + with patch.object(module_build_service.resolver, "system_resolver", new=resolver): ret = GenericBuilder.default_buildroot_groups(db.session, self.module) assert ret == expected_groups resolver.resolve_profiles.assert_not_called() resolver.resolve_profiles.reset_mock() # And now try clearing the cache and call it again. - with patch.object(module_build_service.resolver, 'system_resolver', new=resolver): + with patch.object(module_build_service.resolver, "system_resolver", new=resolver): GenericBuilder.clear_cache(self.module) ret = GenericBuilder.default_buildroot_groups(db.session, self.module) assert ret == expected_groups diff --git a/tests/test_builder/test_builder_utils.py b/tests/test_builder/test_builder_utils.py index 6aa56d8c..3b8b470c 100644 --- a/tests/test_builder/test_builder_utils.py +++ b/tests/test_builder/test_builder_utils.py @@ -30,110 +30,114 @@ from tests import conf class TestBuilderUtils: - - @patch('requests.get') - @patch('koji.ClientSession') - @patch('module_build_service.builder.utils.execute_cmd') + @patch("requests.get") + @patch("koji.ClientSession") + @patch("module_build_service.builder.utils.execute_cmd") def test_create_local_repo_from_koji_tag(self, mock_exec_cmd, mock_koji_session, mock_get): session = Mock() rpms = [ { - 'arch': 'src', - 'build_id': 875991, - 'name': 'module-build-macros', - 'release': '1.module_92011fe6', - 'size': 6890, - 'version': '0.1' + "arch": "src", + "build_id": 875991, + "name": "module-build-macros", + "release": "1.module_92011fe6", + "size": 6890, + "version": "0.1", }, { - 'arch': 'noarch', - 'build_id': 875991, - 'name': 'module-build-macros', - 'release': '1.module_92011fe6', - 'size': 6890, - 'version': '0.1' + "arch": "noarch", + "build_id": 875991, + "name": "module-build-macros", + "release": "1.module_92011fe6", + "size": 6890, + "version": "0.1", }, { - 'arch': 'x86_64', - 'build_id': 875636, - 'name': 'ed-debuginfo', - 'release': '2.module_bd6e0eb1', - 'size': 81438, - 'version': '1.14.1' + "arch": "x86_64", + "build_id": 875636, + "name": "ed-debuginfo", + "release": "2.module_bd6e0eb1", + "size": 81438, + "version": "1.14.1", }, { - 'arch': 'x86_64', - 'build_id': 875636, - 'name': 'ed', - 'release': '2.module_bd6e0eb1', - 'size': 80438, - 'version': '1.14.1' + "arch": "x86_64", + "build_id": 875636, + "name": "ed", + "release": "2.module_bd6e0eb1", + "size": 80438, + "version": "1.14.1", }, { - 'arch': 'x86_64', - 'build_id': 875640, - 'name': 'mksh-debuginfo', - 'release': '2.module_bd6e0eb1', - 'size': 578774, - 'version': '54' + "arch": "x86_64", + "build_id": 875640, + "name": "mksh-debuginfo", + "release": "2.module_bd6e0eb1", + "size": 578774, + "version": "54", }, { - 'arch': 'x86_64', - 'build_id': 875640, - 'name': 'mksh', - 'release': '2.module_bd6e0eb1', - 'size': 267042, - 'version': '54' - } + "arch": "x86_64", + "build_id": 875640, + "name": "mksh", + "release": "2.module_bd6e0eb1", + "size": 267042, + "version": "54", + }, ] builds = [ { - 'build_id': 875640, - 'name': 'mksh', - 'release': '2.module_bd6e0eb1', - 'version': '54', - 'volume_name': 'prod' + "build_id": 875640, + "name": "mksh", + "release": "2.module_bd6e0eb1", + "version": "54", + "volume_name": "prod", }, { - 'build_id': 875636, - 'name': 'ed', - 'release': '2.module_bd6e0eb1', - 'version': '1.14.1', - 'volume_name': 'prod' + "build_id": 875636, + "name": "ed", + "release": "2.module_bd6e0eb1", + "version": "1.14.1", + "volume_name": "prod", }, { - 'build_id': 875991, - 'name': 'module-build-macros', - 'release': '1.module_92011fe6', - 'version': '0.1', - 'volume_name': 'prod' - } + "build_id": 875991, + "name": "module-build-macros", + "release": "1.module_92011fe6", + "version": "0.1", + "volume_name": "prod", + }, ] session.listTaggedRPMS.return_value = (rpms, builds) - session.opts = {'topurl': 'https://kojipkgs.stg.fedoraproject.org/'} + session.opts = {"topurl": "https://kojipkgs.stg.fedoraproject.org/"} mock_koji_session.return_value = session - tag = 'module-testmodule-master-20170405123740-build' + tag = "module-testmodule-master-20170405123740-build" temp_dir = tempfile.mkdtemp() try: utils.create_local_repo_from_koji_tag(conf, tag, temp_dir) finally: shutil.rmtree(temp_dir) - url_one = ('https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/module-build-macros/' - '0.1/1.module_92011fe6/noarch/module-build-macros-0.1-1.module_92011fe6.noarch.' - 'rpm') - url_two = ('https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/ed/1.14.1/' - '2.module_bd6e0eb1/x86_64/ed-1.14.1-2.module_bd6e0eb1.x86_64.rpm') - url_three = ('https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/mksh/54/' - '2.module_bd6e0eb1/x86_64/mksh-54-2.module_bd6e0eb1.x86_64.rpm') + url_one = ( + "https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/module-build-macros/" + "0.1/1.module_92011fe6/noarch/module-build-macros-0.1-1.module_92011fe6.noarch.rpm" + ) + url_two = ( + "https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/ed/1.14.1/" + "2.module_bd6e0eb1/x86_64/ed-1.14.1-2.module_bd6e0eb1.x86_64.rpm" + ) + url_three = ( + "https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/mksh/54/" + "2.module_bd6e0eb1/x86_64/mksh-54-2.module_bd6e0eb1.x86_64.rpm" + ) expected_calls = [ call(url_one, stream=True, timeout=60), call(url_two, stream=True, timeout=60), - call(url_three, stream=True, timeout=60) + call(url_three, stream=True, timeout=60), ] for expected_call in expected_calls: assert expected_call in mock_get.call_args_list diff --git a/tests/test_builder/test_koji.py b/tests/test_builder/test_koji.py index afc59c91..dcfb4ec5 100644 --- a/tests/test_builder/test_koji.py +++ b/tests/test_builder/test_koji.py @@ -45,47 +45,61 @@ from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder class FakeKojiModuleBuilder(KojiModuleBuilder): - @module_build_service.utils.retry(wait_on=(xmlrpclib.ProtocolError, koji.GenericError)) def get_session(self, config, login=True): koji_session = MagicMock() - koji_session.getRepo.return_value = {'create_event': 'fake event'} + koji_session.getRepo.return_value = {"create_event": "fake event"} FakeKojiModuleBuilder.tags = { "module-foo": { - "name": "module-foo", "id": 1, "arches": "x86_64", "locked": False, - "perm": "admin"}, + "name": "module-foo", + "id": 1, + "arches": "x86_64", + "locked": False, + "perm": "admin", + }, "module-foo-build": { - "name": "module-foo-build", "id": 2, "arches": "x86_64", "locked": False, - "perm": "admin"} + "name": "module-foo-build", + "id": 2, + "arches": "x86_64", + "locked": False, + "perm": "admin", + }, } def _get_tag(name): return FakeKojiModuleBuilder.tags.get(name, {}) + koji_session.getTag = _get_tag def _createTag(name): FakeKojiModuleBuilder.tags[name] = { - "name": name, "id": len(FakeKojiModuleBuilder.tags) + 1, "arches": "x86_64", - "locked": False, "perm": "admin"} + "name": name, + "id": len(FakeKojiModuleBuilder.tags) + 1, + "arches": "x86_64", + "locked": False, + "perm": "admin", + } + koji_session.createTag = _createTag def _getBuildTarget(name): return { - "build_tag_name": self.module_build_tag['name'], - "dest_tag_name": self.module_tag['name'] + "build_tag_name": self.module_build_tag["name"], + "dest_tag_name": self.module_tag["name"], } + koji_session.getBuildTarget = _getBuildTarget def _getAllPerms(*args, **kwargs): return [{"id": 1, "name": "admin"}] + koji_session.getAllPerms = _getAllPerms return koji_session class TestKojiBuilder: - def setup_method(self, test_method): init_data(1) self.config = mock.Mock() @@ -93,11 +107,14 @@ class TestKojiBuilder: self.config.koji_repository_url = conf.koji_repository_url self.module = module_build_service.models.ModuleBuild.query.filter_by(id=2).one() - self.p_read_config = patch('koji.read_config', return_value={ - 'authtype': 'kerberos', - 'timeout': 60, - 'server': 'http://koji.example.com/' - }) + self.p_read_config = patch( + "koji.read_config", + return_value={ + "authtype": "kerberos", + "timeout": 60, + "server": "http://koji.example.com/", + }, + ) self.mock_read_config = self.p_read_config.start() def teardown_method(self, test_method): @@ -108,27 +125,30 @@ class TestKojiBuilder: that we do nothing gracefully. """ repo = module_build_service.builder.GenericBuilder.tag_to_repo( - "koji", self.config, - "module-base-runtime-0.25-9", - "x86_64") - assert repo == ("https://kojipkgs.stg.fedoraproject.org/repos" - "/module-base-runtime-0.25-9/latest/x86_64") + "koji", self.config, "module-base-runtime-0.25-9", "x86_64" + ) + assert repo == ( + "https://kojipkgs.stg.fedoraproject.org/repos" + "/module-base-runtime-0.25-9/latest/x86_64" + ) def test_recover_orphaned_artifact_when_tagged(self): """ Test recover_orphaned_artifact when the artifact is found and tagged in both tags """ - builder = FakeKojiModuleBuilder(owner=self.module.owner, - module=self.module, - config=conf, - tag_name='module-foo', - components=[]) + builder = FakeKojiModuleBuilder( + owner=self.module.owner, + module=self.module, + config=conf, + tag_name="module-foo", + components=[], + ) builder.module_tag = {"name": "module-foo", "id": 1} builder.module_build_tag = {"name": "module-foo-build", "id": 2} # Set listTagged to return test data - build_tagged = [{"nvr": "foo-1.0-1.module+e0095747", "task_id": 12345, 'build_id': 91}] - dest_tagged = [{"nvr": "foo-1.0-1.module+e0095747", "task_id": 12345, 'build_id': 91}] + build_tagged = [{"nvr": "foo-1.0-1.module+e0095747", "task_id": 12345, "build_id": 91}] + dest_tagged = [{"nvr": "foo-1.0-1.module+e0095747", "task_id": 12345, "build_id": 91}] builder.koji_session.listTagged.side_effect = [build_tagged, dest_tagged] module_build = module_build_service.models.ModuleBuild.query.get(4) component_build = module_build.component_builds[0] @@ -141,48 +161,43 @@ class TestKojiBuilder: assert type(actual[0]) == module_build_service.messaging.KojiBuildChange assert actual[0].build_id == 91 assert actual[0].task_id == 12345 - assert actual[0].build_new_state == koji.BUILD_STATES['COMPLETE'] - assert actual[0].build_name == 'rubygem-rails' - assert actual[0].build_version == '1.0' - assert actual[0].build_release == '1.module+e0095747' + assert actual[0].build_new_state == koji.BUILD_STATES["COMPLETE"] + assert actual[0].build_name == "rubygem-rails" + assert actual[0].build_version == "1.0" + assert actual[0].build_release == "1.module+e0095747" assert actual[0].module_build_id == 4 assert type(actual[1]) == module_build_service.messaging.KojiTagChange - assert actual[1].tag == 'module-foo-build' - assert actual[1].artifact == 'rubygem-rails' + assert actual[1].tag == "module-foo-build" + assert actual[1].artifact == "rubygem-rails" assert type(actual[2]) == module_build_service.messaging.KojiTagChange - assert actual[2].tag == 'module-foo' - assert actual[2].artifact == 'rubygem-rails' - assert component_build.state == koji.BUILD_STATES['COMPLETE'] + assert actual[2].tag == "module-foo" + assert actual[2].artifact == "rubygem-rails" + assert component_build.state == koji.BUILD_STATES["COMPLETE"] assert component_build.task_id == 12345 - assert component_build.state_reason == 'Found existing build' + assert component_build.state_reason == "Found existing build" assert builder.koji_session.tagBuild.call_count == 0 def test_recover_orphaned_artifact_when_untagged(self): """ Tests recover_orphaned_artifact when the build is found but untagged """ - builder = FakeKojiModuleBuilder(owner=self.module.owner, - module=self.module, - config=conf, - tag_name='module-foo', - components=[]) + builder = FakeKojiModuleBuilder( + owner=self.module.owner, + module=self.module, + config=conf, + tag_name="module-foo", + components=[], + ) builder.module_tag = {"name": "module-foo", "id": 1} builder.module_build_tag = {"name": "module-foo-build", "id": 2} - dist_tag = 'module+2+b8661ee4' + dist_tag = "module+2+b8661ee4" # Set listTagged to return test data builder.koji_session.listTagged.side_effect = [[], [], []] - untagged = [{ - "id": 9000, - "name": "foo", - "version": "1.0", - "release": "1.{0}".format(dist_tag), - }] + untagged = [ + {"id": 9000, "name": "foo", "version": "1.0", "release": "1.{0}".format(dist_tag)} + ] builder.koji_session.untaggedBuilds.return_value = untagged - build_info = { - 'nvr': 'foo-1.0-1.{0}'.format(dist_tag), - 'task_id': 12345, - 'build_id': 91 - } + build_info = {"nvr": "foo-1.0-1.{0}".format(dist_tag), "task_id": 12345, "build_id": 91} builder.koji_session.getBuild.return_value = build_info module_build = module_build_service.models.ModuleBuild.query.get(4) component_build = module_build.component_builds[0] @@ -195,24 +210,26 @@ class TestKojiBuilder: assert type(actual[0]) == module_build_service.messaging.KojiBuildChange assert actual[0].build_id == 91 assert actual[0].task_id == 12345 - assert actual[0].build_new_state == koji.BUILD_STATES['COMPLETE'] - assert actual[0].build_name == 'rubygem-rails' - assert actual[0].build_version == '1.0' - assert actual[0].build_release == '1.{0}'.format(dist_tag) + assert actual[0].build_new_state == koji.BUILD_STATES["COMPLETE"] + assert actual[0].build_name == "rubygem-rails" + assert actual[0].build_version == "1.0" + assert actual[0].build_release == "1.{0}".format(dist_tag) assert actual[0].module_build_id == 4 - assert component_build.state == koji.BUILD_STATES['COMPLETE'] + assert component_build.state == koji.BUILD_STATES["COMPLETE"] assert component_build.task_id == 12345 - assert component_build.state_reason == 'Found existing build' - builder.koji_session.tagBuild.assert_called_once_with(2, 'foo-1.0-1.{0}'.format(dist_tag)) + assert component_build.state_reason == "Found existing build" + builder.koji_session.tagBuild.assert_called_once_with(2, "foo-1.0-1.{0}".format(dist_tag)) def test_recover_orphaned_artifact_when_nothing_exists(self): """ Test recover_orphaned_artifact when the build is not found """ - builder = FakeKojiModuleBuilder(owner=self.module.owner, - module=self.module, - config=conf, - tag_name='module-foo', - components=[]) + builder = FakeKojiModuleBuilder( + owner=self.module.owner, + module=self.module, + config=conf, + tag_name="module-foo", + components=[], + ) builder.module_tag = {"name": "module-foo", "id": 1} builder.module_build_tag = {"name": "module-foo-build", "id": 2} @@ -220,10 +237,7 @@ class TestKojiBuilder: # Set listTagged to return nothing... tagged = [] builder.koji_session.listTagged.return_value = tagged - untagged = [{ - "nvr": "foo-1.0-1.nope", - "release": "nope", - }] + untagged = [{"nvr": "foo-1.0-1.nope", "release": "nope"}] builder.koji_session.untaggedBuilds.return_value = untagged module_build = module_build_service.models.ModuleBuild.query.get(4) component_build = module_build.component_builds[0] @@ -236,24 +250,25 @@ class TestKojiBuilder: # Make sure nothing erroneous gets tag assert builder.koji_session.tagBuild.call_count == 0 - @patch('koji.util') + @patch("koji.util") def test_buildroot_ready(self, mocked_kojiutil): - attrs = {'checkForBuilds.return_value': None, - 'checkForBuilds.side_effect': IOError} + attrs = {"checkForBuilds.return_value": None, "checkForBuilds.side_effect": IOError} mocked_kojiutil.configure_mock(**attrs) - fake_kmb = FakeKojiModuleBuilder(owner=self.module.owner, - module=self.module, - config=conf, - tag_name='module-nginx-1.2', - components=[]) - fake_kmb.module_target = {'build_tag': 'module-fake_tag'} + fake_kmb = FakeKojiModuleBuilder( + owner=self.module.owner, + module=self.module, + config=conf, + tag_name="module-nginx-1.2", + components=[], + ) + fake_kmb.module_target = {"build_tag": "module-fake_tag"} with pytest.raises(IOError): fake_kmb.buildroot_ready() assert mocked_kojiutil.checkForBuilds.call_count == 3 - @pytest.mark.parametrize('blocklist', [False, True]) + @pytest.mark.parametrize("blocklist", [False, True]) def test_tagging_already_tagged_artifacts(self, blocklist): """ Tests that buildroot_add_artifacts and tag_artifacts do not try to @@ -266,18 +281,19 @@ class TestKojiBuilder: mmd.set_xmd(glib.dict_values(xmd)) self.module.modulemd = to_text_type(mmd.dumps()) - builder = FakeKojiModuleBuilder(owner=self.module.owner, - module=self.module, - config=conf, - tag_name='module-nginx-1.2', - components=[]) + builder = FakeKojiModuleBuilder( + owner=self.module.owner, + module=self.module, + config=conf, + tag_name="module-nginx-1.2", + components=[], + ) builder.module_tag = {"name": "module-foo", "id": 1} builder.module_build_tag = {"name": "module-foo-build", "id": 2} # Set listTagged to return test data - tagged = [{"nvr": "foo-1.0-1.module_e0095747"}, - {"nvr": "bar-1.0-1.module_e0095747"}] + tagged = [{"nvr": "foo-1.0-1.module_e0095747"}, {"nvr": "bar-1.0-1.module_e0095747"}] builder.koji_session.listTagged.return_value = tagged # Try to tag one artifact which is already tagged and one new ... @@ -286,44 +302,54 @@ class TestKojiBuilder: if blocklist: # "foo" and "new" packages should be unblocked before tagging. - expected_calls = [mock.call('module-foo-build', 'foo'), - mock.call('module-foo-build', 'new')] + expected_calls = [ + mock.call("module-foo-build", "foo"), + mock.call("module-foo-build", "new"), + ] else: expected_calls = [] assert builder.koji_session.packageListUnblock.mock_calls == expected_calls # ... only new one should be added. builder.koji_session.tagBuild.assert_called_once_with( - builder.module_build_tag["id"], "new-1.0-1.module_e0095747") + builder.module_build_tag["id"], "new-1.0-1.module_e0095747" + ) # Try the same for tag_artifacts(...). builder.koji_session.tagBuild.reset_mock() builder.tag_artifacts(to_tag) builder.koji_session.tagBuild.assert_called_once_with( - builder.module_tag["id"], "new-1.0-1.module_e0095747") + builder.module_tag["id"], "new-1.0-1.module_e0095747" + ) - @patch.object(FakeKojiModuleBuilder, 'get_session') - @patch.object(FakeKojiModuleBuilder, '_get_tagged_nvrs') + @patch.object(FakeKojiModuleBuilder, "get_session") + @patch.object(FakeKojiModuleBuilder, "_get_tagged_nvrs") def test_untagged_artifacts(self, mock_get_tagged_nvrs, mock_get_session): """ Tests that only tagged artifacts will be untagged """ mock_session = mock.Mock() mock_session.getTag.side_effect = [ - {'name': 'foobar', 'id': 1}, {'name': 'foobar-build', 'id': 2}] + {"name": "foobar", "id": 1}, + {"name": "foobar-build", "id": 2}, + ] mock_get_session.return_value = mock_session - mock_get_tagged_nvrs.side_effect = [['foo', 'bar'], ['foo']] + mock_get_tagged_nvrs.side_effect = [["foo", "bar"], ["foo"]] builder = FakeKojiModuleBuilder( - owner=self.module.owner, module=self.module, config=conf, tag_name='module-foo', - components=[]) + owner=self.module.owner, + module=self.module, + config=conf, + tag_name="module-foo", + components=[], + ) - builder.untag_artifacts(['foo', 'bar']) + builder.untag_artifacts(["foo", "bar"]) assert mock_session.untagBuild.call_count == 3 - expected_calls = [mock.call(1, 'foo'), mock.call(2, 'foo'), mock.call(1, 'bar')] + expected_calls = [mock.call(1, "foo"), mock.call(2, "foo"), mock.call(1, "bar")] assert mock_session.untagBuild.mock_calls == expected_calls - @patch.dict('sys.modules', krbV=MagicMock()) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch.dict("sys.modules", krbV=MagicMock()) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_get_build_weights(self, ClientSession): session = ClientSession.return_value session.getLoggedInUser.return_value = {"id": 123} @@ -333,8 +359,10 @@ class TestKojiBuilder: # listBuilds response [[[{"task_id": 456}]], [[{"task_id": 789}]]], # getTaskDescendents response - [[{'1': [], '2': [], '3': [{'weight': 1.0}, {'weight': 1.0}]}], - [{'1': [], '2': [], '3': [{'weight': 1.0}, {'weight': 1.0}]}]] + [ + [{"1": [], "2": [], "3": [{"weight": 1.0}, {"weight": 1.0}]}], + [{"1": [], "2": [], "3": [{"weight": 1.0}, {"weight": 1.0}]}], + ], ] weights = KojiModuleBuilder.get_build_weights(["httpd", "apr"]) @@ -346,8 +374,8 @@ class TestKojiBuilder: # getLoggedInUser requires to a logged-in session session.krb_login.assert_called_once() - @patch.dict('sys.modules', krbV=MagicMock()) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch.dict("sys.modules", krbV=MagicMock()) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_get_build_weights_no_task_id(self, ClientSession): session = ClientSession.return_value session.getLoggedInUser.return_value = {"id": 123} @@ -357,7 +385,7 @@ class TestKojiBuilder: # listBuilds response [[[{"task_id": 456}]], [[{"task_id": None}]]], # getTaskDescendents response - [[{'1': [], '2': [], '3': [{'weight': 1.0}, {'weight': 1.0}]}]] + [[{"1": [], "2": [], "3": [{"weight": 1.0}, {"weight": 1.0}]}]], ] session.getAverageBuildDuration.return_value = None @@ -368,8 +396,8 @@ class TestKojiBuilder: assert session.getTaskDescendents.mock_calls == expected_calls session.krb_login.assert_called_once() - @patch.dict('sys.modules', krbV=MagicMock()) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch.dict("sys.modules", krbV=MagicMock()) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_get_build_weights_no_build(self, ClientSession): session = ClientSession.return_value session.getLoggedInUser.return_value = {"id": 123} @@ -379,7 +407,7 @@ class TestKojiBuilder: # listBuilds response [[[{"task_id": 456}]], [[]]], # getTaskDescendents response - [[{'1': [], '2': [], '3': [{'weight': 1.0}, {'weight': 1.0}]}]] + [[{"1": [], "2": [], "3": [{"weight": 1.0}, {"weight": 1.0}]}]], ] session.getAverageBuildDuration.return_value = None @@ -390,8 +418,8 @@ class TestKojiBuilder: assert session.getTaskDescendents.mock_calls == expected_calls session.krb_login.assert_called_once() - @patch.dict('sys.modules', krbV=MagicMock()) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch.dict("sys.modules", krbV=MagicMock()) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_get_build_weights_listBuilds_failed(self, ClientSession): session = ClientSession.return_value session.getLoggedInUser.return_value = {"id": 123} @@ -401,15 +429,17 @@ class TestKojiBuilder: weights = KojiModuleBuilder.get_build_weights(["httpd", "apr"]) assert weights == {"httpd": 1.5, "apr": 1.5} - expected_calls = [mock.call(packageID=1, userID=123, state=1, - queryOpts={'limit': 1, 'order': '-build_id'}), - mock.call(packageID=2, userID=123, state=1, - queryOpts={'limit': 1, 'order': '-build_id'})] + expected_calls = [ + mock.call( + packageID=1, userID=123, state=1, queryOpts={"limit": 1, "order": "-build_id"}), + mock.call( + packageID=2, userID=123, state=1, queryOpts={"limit": 1, "order": "-build_id"}), + ] assert session.listBuilds.mock_calls == expected_calls session.krb_login.assert_called_once() - @patch.dict('sys.modules', krbV=MagicMock()) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch.dict("sys.modules", krbV=MagicMock()) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_get_build_weights_getPackageID_failed(self, ClientSession): session = ClientSession.return_value session.getLoggedInUser.return_value = {"id": 123} @@ -424,8 +454,8 @@ class TestKojiBuilder: session.krb_login.assert_called_once() - @patch.dict('sys.modules', krbV=MagicMock()) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch.dict("sys.modules", krbV=MagicMock()) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_get_build_weights_getLoggedInUser_failed(self, ClientSession): session = ClientSession.return_value session.getAverageBuildDuration.return_value = None @@ -433,14 +463,14 @@ class TestKojiBuilder: assert weights == {"httpd": 1.5, "apr": 1.5} session.krb_login.assert_called_once() - @patch.object(conf, 'base_module_arches', - new={"platform:xx": ["x86_64", "i686"]}) - @pytest.mark.parametrize('blocklist', [False, True]) - @pytest.mark.parametrize('custom_whitelist', [False, True]) - @pytest.mark.parametrize('repo_include_all', [False, True]) - @pytest.mark.parametrize('override_arches', [False, True]) - def test_buildroot_connect(self, custom_whitelist, blocklist, repo_include_all, - override_arches): + @patch.object(conf, "base_module_arches", new={"platform:xx": ["x86_64", "i686"]}) + @pytest.mark.parametrize("blocklist", [False, True]) + @pytest.mark.parametrize("custom_whitelist", [False, True]) + @pytest.mark.parametrize("repo_include_all", [False, True]) + @pytest.mark.parametrize("override_arches", [False, True]) + def test_buildroot_connect( + self, custom_whitelist, blocklist, repo_include_all, override_arches + ): if blocklist: mmd = self.module.mmd() xmd = glib.from_variant_dict(mmd.get_xmd()) @@ -451,7 +481,7 @@ class TestKojiBuilder: if custom_whitelist: mmd = self.module.mmd() opts = mmd.get_buildopts() - opts.set_rpm_whitelist(['custom1', 'custom2']) + opts.set_rpm_whitelist(["custom1", "custom2"]) mmd.set_buildopts(opts) self.module.modulemd = to_text_type(mmd.dumps()) @@ -474,35 +504,43 @@ class TestKojiBuilder: self.module.modulemd = to_text_type(mmd.dumps()) builder = FakeKojiModuleBuilder( - owner=self.module.owner, module=self.module, config=conf, tag_name='module-foo', - components=["nginx"]) + owner=self.module.owner, + module=self.module, + config=conf, + tag_name="module-foo", + components=["nginx"], + ) session = builder.koji_session groups = OrderedDict() - groups['build'] = set(["unzip"]) - groups['srpm-build'] = set(["fedora-release"]) + groups["build"] = set(["unzip"]) + groups["srpm-build"] = set(["fedora-release"]) builder.buildroot_connect(groups) if custom_whitelist: expected_calls = [ - mock.call('module-foo', 'custom1', 'Moe Szyslak'), - mock.call('module-foo', 'custom2', 'Moe Szyslak'), - mock.call('module-foo-build', 'custom1', 'Moe Szyslak'), - mock.call('module-foo-build', 'custom2', 'Moe Szyslak') + mock.call("module-foo", "custom1", "Moe Szyslak"), + mock.call("module-foo", "custom2", "Moe Szyslak"), + mock.call("module-foo-build", "custom1", "Moe Szyslak"), + mock.call("module-foo-build", "custom2", "Moe Szyslak"), ] else: expected_calls = [ - mock.call('module-foo', 'nginx', 'Moe Szyslak'), - mock.call('module-foo-build', 'nginx', 'Moe Szyslak') + mock.call("module-foo", "nginx", "Moe Szyslak"), + mock.call("module-foo-build", "nginx", "Moe Szyslak"), ] assert session.packageListAdd.mock_calls == expected_calls - expected_calls = [mock.call('module-foo-build', 'build'), - mock.call('module-foo-build', 'srpm-build')] + expected_calls = [ + mock.call("module-foo-build", "build"), + mock.call("module-foo-build", "srpm-build"), + ] assert session.groupListAdd.mock_calls == expected_calls - expected_calls = [mock.call('module-foo-build', 'build', 'unzip'), - mock.call('module-foo-build', 'srpm-build', 'fedora-release')] + expected_calls = [ + mock.call("module-foo-build", "build", "unzip"), + mock.call("module-foo-build", "srpm-build", "fedora-release"), + ] assert session.groupPackageListAdd.mock_calls == expected_calls # packageListBlock should not be called, because we set the block list only when creating @@ -515,17 +553,29 @@ class TestKojiBuilder: else: expected_arches = "i686 armv7hl x86_64" - expected_calls = [mock.call('module-foo', arches=expected_arches, - extra={'mock.package_manager': 'dnf', - 'repo_include_all': repo_include_all, - 'mock.new_chroot': 0}), - mock.call('module-foo-build', arches=expected_arches, - extra={'mock.package_manager': 'dnf', - 'repo_include_all': repo_include_all, - 'mock.new_chroot': 0})] + expected_calls = [ + mock.call( + "module-foo", + arches=expected_arches, + extra={ + "mock.package_manager": "dnf", + "repo_include_all": repo_include_all, + "mock.new_chroot": 0, + }, + ), + mock.call( + "module-foo-build", + arches=expected_arches, + extra={ + "mock.package_manager": "dnf", + "repo_include_all": repo_include_all, + "mock.new_chroot": 0, + }, + ), + ] assert session.editTag2.mock_calls == expected_calls - @pytest.mark.parametrize('blocklist', [False, True]) + @pytest.mark.parametrize("blocklist", [False, True]) def test_buildroot_connect_create_tag(self, blocklist): if blocklist: mmd = self.module.mmd() @@ -535,62 +585,98 @@ class TestKojiBuilder: self.module.modulemd = to_text_type(mmd.dumps()) builder = FakeKojiModuleBuilder( - owner=self.module.owner, module=self.module, config=conf, tag_name='module-foo', - components=["nginx"]) + owner=self.module.owner, + module=self.module, + config=conf, + tag_name="module-foo", + components=["nginx"], + ) session = builder.koji_session FakeKojiModuleBuilder.tags = {} groups = OrderedDict() - groups['build'] = set(["unzip"]) - groups['srpm-build'] = set(["fedora-release"]) + groups["build"] = set(["unzip"]) + groups["srpm-build"] = set(["fedora-release"]) builder.buildroot_connect(groups) if blocklist: - expected_calls = [mock.call('module-foo-build', 'foo'), - mock.call('module-foo-build', 'nginx')] + expected_calls = [ + mock.call("module-foo-build", "foo"), + mock.call("module-foo-build", "nginx"), + ] else: expected_calls = [] assert session.packageListBlock.mock_calls == expected_calls - @pytest.mark.parametrize('scratch', [False, True]) + @pytest.mark.parametrize("scratch", [False, True]) def test_buildroot_connect_create_target(self, scratch): if scratch: self.module.scratch = scratch builder = FakeKojiModuleBuilder( - owner=self.module.owner, module=self.module, config=conf, tag_name='module-foo', - components=["nginx"]) + owner=self.module.owner, + module=self.module, + config=conf, + tag_name="module-foo", + components=["nginx"], + ) session = builder.koji_session session.getBuildTarget = MagicMock() session.getBuildTarget.return_value = {} groups = OrderedDict() - groups['build'] = set(["unzip"]) - groups['srpm-build'] = set(["fedora-release"]) + groups["build"] = set(["unzip"]) + groups["srpm-build"] = set(["fedora-release"]) builder.buildroot_connect(groups) if scratch: - expected_calls = [mock.call( - 'scrmod-nginx-1-2-00000000+2', 'module-foo-build', 'module-foo')] + expected_calls = [ + mock.call("scrmod-nginx-1-2-00000000+2", "module-foo-build", "module-foo") + ] else: - expected_calls = [mock.call( - 'module-nginx-1-2-00000000', 'module-foo-build', 'module-foo')] + expected_calls = [ + mock.call("module-nginx-1-2-00000000", "module-foo-build", "module-foo") + ] assert session.createBuildTarget.mock_calls == expected_calls - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_get_built_rpms_in_module_build(self, ClientSession): session = ClientSession.return_value - session.listTaggedRPMS.return_value = ([ - {'build_id': 735939, 'name': 'tar', 'extra': None, 'arch': 'ppc64le', - 'buildtime': 1533299221, 'id': 6021394, 'epoch': 2, 'version': '1.30', - 'metadata_only': False, 'release': '4.el8+1308+551bfa71', - 'buildroot_id': 4321122, 'payloadhash': '0621ab2091256d21c47dcac868e7fc2a', - 'size': 878684}, - {'build_id': 735939, 'name': 'bar', 'extra': None, 'arch': 'ppc64le', - 'buildtime': 1533299221, 'id': 6021394, 'epoch': 2, 'version': '1.30', - 'metadata_only': False, 'release': '4.el8+1308+551bfa71', - 'buildroot_id': 4321122, 'payloadhash': '0621ab2091256d21c47dcac868e7fc2a', - 'size': 878684}], []) + session.listTaggedRPMS.return_value = ( + [ + { + "build_id": 735939, + "name": "tar", + "extra": None, + "arch": "ppc64le", + "buildtime": 1533299221, + "id": 6021394, + "epoch": 2, + "version": "1.30", + "metadata_only": False, + "release": "4.el8+1308+551bfa71", + "buildroot_id": 4321122, + "payloadhash": "0621ab2091256d21c47dcac868e7fc2a", + "size": 878684, + }, + { + "build_id": 735939, + "name": "bar", + "extra": None, + "arch": "ppc64le", + "buildtime": 1533299221, + "id": 6021394, + "epoch": 2, + "version": "1.30", + "metadata_only": False, + "release": "4.el8+1308+551bfa71", + "buildroot_id": 4321122, + "payloadhash": "0621ab2091256d21c47dcac868e7fc2a", + "size": 878684, + }, + ], + [], + ) # Module builds generated by init_data uses generic modulemd file and # the module's name/stream/version/context does not have to match it. @@ -603,79 +689,88 @@ class TestKojiBuilder: db.session.commit() ret = KojiModuleBuilder.get_built_rpms_in_module_build(mmd) - assert set(ret) == set( - ['bar-2:1.30-4.el8+1308+551bfa71', 'tar-2:1.30-4.el8+1308+551bfa71']) + assert set(ret) == set(["bar-2:1.30-4.el8+1308+551bfa71", "tar-2:1.30-4.el8+1308+551bfa71"]) session.assert_not_called() - @pytest.mark.parametrize('br_filtered_rpms,expected', ( + @pytest.mark.parametrize( + "br_filtered_rpms,expected", ( - ['perl-Tangerine-0.23-1.module+0+d027b723', 'not-in-tag-5.0-1.module+0+d027b723'], - ['not-in-tag-5.0-1.module+0+d027b723'] + ( + ["perl-Tangerine-0.23-1.module+0+d027b723", "not-in-tag-5.0-1.module+0+d027b723"], + ["not-in-tag-5.0-1.module+0+d027b723"], + ), + ( + [ + "perl-Tangerine-0.23-1.module+0+d027b723", + "perl-List-Compare-0.53-5.module+0+d027b723", + ], + [], + ), + ( + [ + "perl-Tangerine-0.23-1.module+0+d027b723", + "perl-List-Compare-0.53-5.module+0+d027b723", + "perl-Tangerine-0.23-1.module+0+d027b723", + ], + [], + ), + ( + [ + "perl-Tangerine-0.23-1.module+0+diff_module", + "not-in-tag-5.0-1.module+0+d027b723", + ], + [ + "perl-Tangerine-0.23-1.module+0+diff_module", + "not-in-tag-5.0-1.module+0+d027b723", + ], + ), + ([], []), ), - ( - ['perl-Tangerine-0.23-1.module+0+d027b723', - 'perl-List-Compare-0.53-5.module+0+d027b723'], - [] - ), - ( - ['perl-Tangerine-0.23-1.module+0+d027b723', - 'perl-List-Compare-0.53-5.module+0+d027b723', - 'perl-Tangerine-0.23-1.module+0+d027b723'], - [] - ), - ( - ['perl-Tangerine-0.23-1.module+0+diff_module', 'not-in-tag-5.0-1.module+0+d027b723'], - ['perl-Tangerine-0.23-1.module+0+diff_module', 'not-in-tag-5.0-1.module+0+d027b723'] - ), - ( - [], - [] - ), - )) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + ) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_get_filtered_rpms_on_self_dep(self, ClientSession, br_filtered_rpms, expected): session = ClientSession.return_value session.listTaggedRPMS.return_value = ( [ { - 'build_id': 12345, - 'epoch': None, - 'name': 'perl-Tangerine', - 'release': '1.module+0+d027b723', - 'version': '0.23' + "build_id": 12345, + "epoch": None, + "name": "perl-Tangerine", + "release": "1.module+0+d027b723", + "version": "0.23", }, { - 'build_id': 23456, - 'epoch': None, - 'name': 'perl-List-Compare', - 'release': '5.module+0+d027b723', - 'version': '0.53' + "build_id": 23456, + "epoch": None, + "name": "perl-List-Compare", + "release": "5.module+0+d027b723", + "version": "0.53", }, { - 'build_id': 34567, - 'epoch': None, - 'name': 'tangerine', - 'release': '3.module+0+d027b723', - 'version': '0.22' - } + "build_id": 34567, + "epoch": None, + "name": "tangerine", + "release": "3.module+0+d027b723", + "version": "0.22", + }, ], [ { - 'build_id': 12345, - 'name': 'perl-Tangerine', - 'nvr': 'perl-Tangerine-0.23-1.module+0+d027b723' + "build_id": 12345, + "name": "perl-Tangerine", + "nvr": "perl-Tangerine-0.23-1.module+0+d027b723", }, { - 'build_id': 23456, - 'name': 'perl-List-Compare', - 'nvr': 'perl-List-Compare-0.53-5.module+0+d027b723' + "build_id": 23456, + "name": "perl-List-Compare", + "nvr": "perl-List-Compare-0.53-5.module+0+d027b723", }, { - 'build_id': 34567, - 'name': 'tangerine', - 'nvr': 'tangerine-0.22-3.module+0+d027b723' - } - ] + "build_id": 34567, + "name": "tangerine", + "nvr": "tangerine-0.22-3.module+0+d027b723", + }, + ], ) reuse_component_init_data() current_module = module_build_service.models.ModuleBuild.query.get(3) @@ -683,21 +778,29 @@ class TestKojiBuilder: assert set(rv) == set(expected) session.assert_not_called() - @pytest.mark.parametrize('cg_enabled,cg_devel_enabled', [ - (False, False), - (True, False), - (True, True), - ]) - @mock.patch('module_build_service.builder.KojiModuleBuilder.KojiContentGenerator') + @pytest.mark.parametrize( + "cg_enabled,cg_devel_enabled", [(False, False), (True, False), (True, True)] + ) + @mock.patch("module_build_service.builder.KojiModuleBuilder.KojiContentGenerator") def test_finalize(self, mock_koji_cg_cls, cg_enabled, cg_devel_enabled): self.module.state = 2 - with patch('module_build_service.config.Config.koji_enable_content_generator', - new_callable=mock.PropertyMock, return_value=cg_enabled): - with patch('module_build_service.config.Config.koji_cg_devel_module', - new_callable=mock.PropertyMock, return_value=cg_devel_enabled): + with patch( + "module_build_service.config.Config.koji_enable_content_generator", + new_callable=mock.PropertyMock, + return_value=cg_enabled, + ): + with patch( + "module_build_service.config.Config.koji_cg_devel_module", + new_callable=mock.PropertyMock, + return_value=cg_devel_enabled, + ): builder = FakeKojiModuleBuilder( - owner=self.module.owner, module=self.module, config=conf, - tag_name='module-nginx-1.2', components=[]) + owner=self.module.owner, + module=self.module, + config=conf, + tag_name="module-nginx-1.2", + components=[], + ) builder.finalize() mock_koji_cg = mock_koji_cg_cls.return_value @@ -710,17 +813,17 @@ class TestKojiBuilder: else: mock_koji_cg.koji_import.assert_not_called() - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_get_anonymous_session(self, ClientSession): - mbs_config = mock.Mock(koji_profile='koji', koji_config='conf/koji.conf') + mbs_config = mock.Mock(koji_profile="koji", koji_config="conf/koji.conf") session = KojiModuleBuilder.get_session(mbs_config, login=False) assert ClientSession.return_value == session assert ClientSession.return_value.krb_login.assert_not_called - @patch.dict('sys.modules', krbV=MagicMock()) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch.dict("sys.modules", krbV=MagicMock()) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_ensure_builder_use_a_logged_in_koji_session(self, ClientSession): - builder = KojiModuleBuilder('owner', MagicMock(), conf, 'module-tag', []) + builder = KojiModuleBuilder("owner", MagicMock(), conf, "module-tag", []) builder.koji_session.krb_login.assert_called_once() @@ -730,55 +833,53 @@ class TestGetDistTagSRPM: def setup_method(self): clean_database() - self.tmp_srpm_build_dir = tempfile.mkdtemp(prefix='test-koji-builder-') - self.spec_file = os.path.join(self.tmp_srpm_build_dir, 'module-build-macros.spec') - self.srpms_dir = os.path.join(self.tmp_srpm_build_dir, 'SRPMS') + self.tmp_srpm_build_dir = tempfile.mkdtemp(prefix="test-koji-builder-") + self.spec_file = os.path.join(self.tmp_srpm_build_dir, "module-build-macros.spec") + self.srpms_dir = os.path.join(self.tmp_srpm_build_dir, "SRPMS") os.mkdir(self.srpms_dir) - self.expected_srpm_file = os.path.join( - self.srpms_dir, 'module-build-macros.src.rpm') + self.expected_srpm_file = os.path.join(self.srpms_dir, "module-build-macros.src.rpm") # Don't care about the content, just assert the existence. - with open(self.expected_srpm_file, 'w') as f: - f.write('') + with open(self.expected_srpm_file, "w") as f: + f.write("") module_nsvc = dict( - name='testmodule', - stream='master', - version='1', - context=module_build_service.models.DEFAULT_MODULE_CONTEXT + name="testmodule", + stream="master", + version="1", + context=module_build_service.models.DEFAULT_MODULE_CONTEXT, ) xmd = { - 'mbs': { - 'buildrequires': { - 'modulea': { - 'filtered_rpms': ['baz-devel-0:0.1-6.fc28', - 'baz-doc-0:0.1-6.fc28'], + "mbs": { + "buildrequires": { + "modulea": { + "filtered_rpms": ["baz-devel-0:0.1-6.fc28", "baz-doc-0:0.1-6.fc28"] + }, + "platform": { + "filtered_rpms": [], + "stream_collision_modules": ["modulefoo-s-v-c"], + "ursine_rpms": ["foo-0:1.0-1.fc28", "bar-0:2.0-1.fc28"], }, - 'platform': { - 'filtered_rpms': [], - 'stream_collision_modules': ['modulefoo-s-v-c'], - 'ursine_rpms': ['foo-0:1.0-1.fc28', 'bar-0:2.0-1.fc28'] - } }, - 'koji_tag': 'module-{name}-{stream}-{version}-{context}' - .format(**module_nsvc) + "koji_tag": "module-{name}-{stream}-{version}-{context}".format(**module_nsvc), } } from tests import make_module + self.module_build = make_module( - '{name}:{stream}:{version}:{context}'.format(**module_nsvc), - xmd=xmd) + "{name}:{stream}:{version}:{context}".format(**module_nsvc), xmd=xmd + ) def teardown_method(self): shutil.rmtree(self.tmp_srpm_build_dir) clean_database() - @patch('tempfile.mkdtemp') - @patch('module_build_service.builder.KojiModuleBuilder.execute_cmd') + @patch("tempfile.mkdtemp") + @patch("module_build_service.builder.KojiModuleBuilder.execute_cmd") def _build_srpm(self, execute_cmd, mkdtemp): mkdtemp.return_value = self.tmp_srpm_build_dir - return KojiModuleBuilder.get_disttag_srpm('disttag', self.module_build) + return KojiModuleBuilder.get_disttag_srpm("disttag", self.module_build) def test_return_srpm_file(self): srpm_file = self._build_srpm() @@ -787,17 +888,17 @@ class TestGetDistTagSRPM: def test_filtered_rpms_are_added(self): self._build_srpm() - with open(self.spec_file, 'r') as f: + with open(self.spec_file, "r") as f: content = f.read() - for nevr in ['baz-devel-0:0.1-6.fc28', 'baz-doc-0:0.1-6.fc28']: - assert KojiModuleBuilder.format_conflicts_line(nevr) + '\n' in content + for nevr in ["baz-devel-0:0.1-6.fc28", "baz-doc-0:0.1-6.fc28"]: + assert KojiModuleBuilder.format_conflicts_line(nevr) + "\n" in content def test_ursine_rpms_are_added(self): self._build_srpm() - with open(self.spec_file, 'r') as f: + with open(self.spec_file, "r") as f: content = f.read() - assert '# modulefoo-s-v-c\n' in content - for nevr in ['foo-0:1.0-1.fc28', 'bar-0:2.0-1.fc28']: - assert KojiModuleBuilder.format_conflicts_line(nevr) + '\n' in content + assert "# modulefoo-s-v-c\n" in content + for nevr in ["foo-0:1.0-1.fc28", "bar-0:2.0-1.fc28"]: + assert KojiModuleBuilder.format_conflicts_line(nevr) + "\n" in content diff --git a/tests/test_builder/test_mock.py b/tests/test_builder/test_mock.py index b94afdbd..4ad0a4a1 100644 --- a/tests/test_builder/test_mock.py +++ b/tests/test_builder/test_mock.py @@ -17,7 +17,6 @@ from tests import clean_database, make_module class TestMockModuleBuilder: - def setup_method(self, test_method): clean_database() self.resultdir = tempfile.mkdtemp() @@ -32,60 +31,63 @@ class TestMockModuleBuilder: "module_id": 2, "package": "ed", "format": "rpms", - "scmurl": ("https://src.fedoraproject.org/rpms/ed" - "?#01bf8330812fea798671925cc537f2f29b0bd216"), + "scmurl": ( + "https://src.fedoraproject.org/rpms/ed" + "?#01bf8330812fea798671925cc537f2f29b0bd216" + ), "batch": 2, - "ref": "01bf8330812fea798671925cc537f2f29b0bd216" + "ref": "01bf8330812fea798671925cc537f2f29b0bd216", }, { "module_id": 2, "package": "mksh", "format": "rpms", - "scmurl": ("https://src.fedoraproject.org/rpms/mksh" - "?#f70fd11ddf96bce0e2c64309706c29156b39141d"), + "scmurl": ( + "https://src.fedoraproject.org/rpms/mksh" + "?#f70fd11ddf96bce0e2c64309706c29156b39141d" + ), "batch": 3, - "ref": "f70fd11ddf96bce0e2c64309706c29156b39141d" + "ref": "f70fd11ddf96bce0e2c64309706c29156b39141d", }, ] base_dir = os.path.abspath(os.path.dirname(__file__)) - mmd = Modulemd.Module().new_from_file(os.path.join( - base_dir, '..', 'staged_data', 'testmodule-with-filters.yaml')) + mmd = Modulemd.Module().new_from_file( + os.path.join(base_dir, "..", "staged_data", "testmodule-with-filters.yaml")) mmd.upgrade() mmd.set_xmd(glib.dict_values({ - 'mbs': { - 'rpms': { - 'ed': {'ref': '01bf8330812fea798671925cc537f2f29b0bd216'}, - 'mksh': {'ref': 'f70fd11ddf96bce0e2c64309706c29156b39141d'} + "mbs": { + "rpms": { + "ed": {"ref": "01bf8330812fea798671925cc537f2f29b0bd216"}, + "mksh": {"ref": "f70fd11ddf96bce0e2c64309706c29156b39141d"}, }, - 'buildrequires': - { - 'host': { - 'version': '20171024133034', - 'filtered_rpms': [], - 'stream': 'master', - 'ref': '6df253bb3c53e84706c01b8ab2d5cac24f0b6d45', - 'context': '00000000' + "buildrequires": { + "host": { + "version": "20171024133034", + "filtered_rpms": [], + "stream": "master", + "ref": "6df253bb3c53e84706c01b8ab2d5cac24f0b6d45", + "context": "00000000", }, - 'platform': { - 'version': '20171028112959', - 'filtered_rpms': [], - 'stream': 'master', - 'ref': '4f7787370a931d57421f9f9555fc41c3e31ff1fa', - 'context': '00000000' + "platform": { + "version": "20171028112959", + "filtered_rpms": [], + "stream": "master", + "ref": "4f7787370a931d57421f9f9555fc41c3e31ff1fa", + "context": "00000000", + }, + }, + "scmurl": "file:///testdir", + "commit": "5566bc792ec7a03bb0e28edd1b104a96ba342bd8", + "requires": { + "platform": { + "version": "20171028112959", + "filtered_rpms": [], + "stream": "master", + "ref": "4f7787370a931d57421f9f9555fc41c3e31ff1fa", + "context": "00000000", } }, - 'scmurl': 'file:///testdir', - 'commit': '5566bc792ec7a03bb0e28edd1b104a96ba342bd8', - 'requires': { - 'platform': { - 'version': '20171028112959', - 'filtered_rpms': [], - 'stream': 'master', - 'ref': '4f7787370a931d57421f9f9555fc41c3e31ff1fa', - 'context': '00000000' - } - } } })) module = ModuleBuild.create( @@ -113,37 +115,39 @@ class TestMockModuleBuilder: @mock.patch("module_build_service.conf.system", new="mock") def test_createrepo_filter_last_batch(self, *args): with make_session(conf) as session: - module = self._create_module_with_filters(session, 3, koji.BUILD_STATES['COMPLETE']) + module = self._create_module_with_filters(session, 3, koji.BUILD_STATES["COMPLETE"]) - builder = MockModuleBuilder("mcurlej", module, conf, module.koji_tag, - module.component_builds) + builder = MockModuleBuilder( + "mcurlej", module, conf, module.koji_tag, module.component_builds + ) builder.resultsdir = self.resultdir rpms = [ "ed-1.14.1-4.module+24957a32.x86_64.rpm", "mksh-56b-1.module+24957a32.x86_64.rpm", - "module-build-macros-0.1-1.module+24957a32.noarch.rpm" + "module-build-macros-0.1-1.module+24957a32.noarch.rpm", ] rpm_qf_output = dedent("""\ ed 0 1.14.1 4.module+24957a32 x86_64 mksh 0 56b-1 module+24957a32 x86_64 module-build-macros 0 0.1 1.module+24957a32 noarch - """) + """) with mock.patch("os.listdir", return_value=rpms): with mock.patch("subprocess.check_output", return_value=rpm_qf_output): builder._createrepo() with open(os.path.join(self.resultdir, "pkglist"), "r") as fd: pkglist = fd.read().strip() - rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split('\n')] + rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split("\n")] assert "ed" not in rpm_names @mock.patch("module_build_service.conf.system", new="mock") def test_createrepo_not_last_batch(self): with make_session(conf) as session: - module = self._create_module_with_filters(session, 2, koji.BUILD_STATES['COMPLETE']) + module = self._create_module_with_filters(session, 2, koji.BUILD_STATES["COMPLETE"]) - builder = MockModuleBuilder("mcurlej", module, conf, module.koji_tag, - module.component_builds) + builder = MockModuleBuilder( + "mcurlej", module, conf, module.koji_tag, module.component_builds + ) builder.resultsdir = self.resultdir rpms = [ "ed-1.14.1-4.module+24957a32.x86_64.rpm", @@ -152,23 +156,23 @@ class TestMockModuleBuilder: rpm_qf_output = dedent("""\ ed 0 1.14.1 4.module+24957a32 x86_64 mksh 0 56b-1 module+24957a32 x86_64 - """) + """) with mock.patch("os.listdir", return_value=rpms): with mock.patch("subprocess.check_output", return_value=rpm_qf_output): builder._createrepo() with open(os.path.join(self.resultdir, "pkglist"), "r") as fd: pkglist = fd.read().strip() - rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split('\n')] + rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split("\n")] assert "ed" in rpm_names @mock.patch("module_build_service.conf.system", new="mock") def test_createrepo_empty_rmp_list(self, *args): with make_session(conf) as session: - module = self._create_module_with_filters(session, 3, koji.BUILD_STATES['COMPLETE']) + module = self._create_module_with_filters(session, 3, koji.BUILD_STATES["COMPLETE"]) - builder = MockModuleBuilder("mcurlej", module, conf, module.koji_tag, - module.component_builds) + builder = MockModuleBuilder( + "mcurlej", module, conf, module.koji_tag, module.component_builds) builder.resultsdir = self.resultdir rpms = [] with mock.patch("os.listdir", return_value=rpms): @@ -180,7 +184,6 @@ class TestMockModuleBuilder: class TestMockModuleBuilderAddRepos: - def setup_method(self, test_method): clean_database(add_platform_module=False) import_fake_base_module("platform:f29:1:000000") @@ -190,27 +193,32 @@ class TestMockModuleBuilderAddRepos: @mock.patch("module_build_service.conf.system", new="mock") @mock.patch( - 'module_build_service.config.Config.base_module_repofiles', + "module_build_service.config.Config.base_module_repofiles", new_callable=mock.PropertyMock, return_value=["/etc/yum.repos.d/bar.repo", "/etc/yum.repos.d/bar-updates.repo"], - create=True) + create=True, + ) @mock.patch("module_build_service.builder.MockModuleBuilder.open", create=True) @mock.patch( - "module_build_service.builder.MockModuleBuilder.MockModuleBuilder._load_mock_config") + "module_build_service.builder.MockModuleBuilder.MockModuleBuilder._load_mock_config" + ) @mock.patch( - "module_build_service.builder.MockModuleBuilder.MockModuleBuilder._write_mock_config") - def test_buildroot_add_repos(self, write_config, load_config, patched_open, - base_module_repofiles): + "module_build_service.builder.MockModuleBuilder.MockModuleBuilder._write_mock_config" + ) + def test_buildroot_add_repos( + self, write_config, load_config, patched_open, base_module_repofiles + ): patched_open.side_effect = [ mock.mock_open(read_data="[fake]\nrepofile 1\n").return_value, mock.mock_open(read_data="[fake]\nrepofile 2\n").return_value, - mock.mock_open(read_data="[fake]\nrepofile 3\n").return_value] + mock.mock_open(read_data="[fake]\nrepofile 3\n").return_value, + ] builder = MockModuleBuilder("user", self.app, conf, "module-app", []) dependencies = { "repofile://": [self.platform.mmd()], - "repofile:///etc/yum.repos.d/foo.repo": [self.foo.mmd(), self.app.mmd()] + "repofile:///etc/yum.repos.d/foo.repo": [self.foo.mmd(), self.app.mmd()], } builder.buildroot_add_repos(dependencies) diff --git a/tests/test_content_generator.py b/tests/test_content_generator.py index 8eba0fec..4853368c 100644 --- a/tests/test_content_generator.py +++ b/tests/test_content_generator.py @@ -30,7 +30,7 @@ from os import path from module_build_service.utils import to_text_type import module_build_service.messaging -import module_build_service.scheduler.handlers.repos # noqa +import module_build_service.scheduler.handlers.repos # noqa from module_build_service import models, conf, build_logs, Modulemd, glib from mock import patch, Mock, call, mock_open @@ -48,23 +48,25 @@ GET_USER_RV = { "krb_principal": "mszyslak@FEDORAPROJECT.ORG", "name": "Moe Szyslak", "status": 0, - "usertype": 0 + "usertype": 0, } class TestBuild: - def setup_method(self, test_method): init_data(1, contexts=True) module = models.ModuleBuild.query.filter_by(id=2).one() module.cg_build_koji_tag = "f27-module-candidate" self.cg = KojiContentGenerator(module, conf) - self.p_read_config = patch('koji.read_config', return_value={ - 'authtype': 'kerberos', - 'timeout': 60, - 'server': 'http://koji.example.com/' - }) + self.p_read_config = patch( + "koji.read_config", + return_value={ + "authtype": "kerberos", + "timeout": 60, + "server": "http://koji.example.com/", + }, + ) self.mock_read_config = self.p_read_config.start() # Ensure that there is no build log from other tests @@ -79,10 +81,12 @@ class TestBuild: # Necessary to restart the twisted reactor for the next test. import sys - del sys.modules['twisted.internet.reactor'] - del sys.modules['moksha.hub.reactor'] - del sys.modules['moksha.hub'] - import moksha.hub.reactor # noqa + + del sys.modules["twisted.internet.reactor"] + del sys.modules["moksha.hub.reactor"] + del sys.modules["moksha.hub"] + import moksha.hub.reactor # noqa + try: file_path = build_logs.path(self.cg.module) os.remove(file_path) @@ -91,15 +95,17 @@ class TestBuild: @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") @patch("subprocess.Popen") - @patch("subprocess.check_output", return_value='1.4') + @patch("subprocess.check_output", return_value="1.4") @patch("pkg_resources.get_distribution") @patch("platform.linux_distribution") @patch("platform.machine") - @patch(("module_build_service.builder.KojiContentGenerator.KojiContentGenerator." - "_koji_rpms_in_tag")) + @patch( + "module_build_service.builder.KojiContentGenerator.KojiContentGenerator._koji_rpms_in_tag" + ) @pytest.mark.parametrize("devel", (False, True)) - def test_get_generator_json(self, rpms_in_tag, machine, distro, pkg_res, coutput, popen, - ClientSession, devel): + def test_get_generator_json( + self, rpms_in_tag, machine, distro, pkg_res, coutput, popen, ClientSession, devel + ): """ Test generation of content generator json """ koji_session = ClientSession.return_value koji_session.getUser.return_value = GET_USER_RV @@ -109,21 +115,21 @@ class TestBuild: pkg_res.return_value = Mock() pkg_res.return_value.version = "current-tested-version" rpm_mock = Mock() - rpm_out = b"rpm-name;1.0;r1;x86_64;(none);sigmd5:1;sigpgp:p;siggpg:g\n" \ - b"rpm-name-2;2.0;r2;i686;1;sigmd5:2;sigpgp:p2;siggpg:g2" - attrs = {'communicate.return_value': (rpm_out, 'error'), - 'wait.return_value': 0} + rpm_out = ( + b"rpm-name;1.0;r1;x86_64;(none);sigmd5:1;sigpgp:p;siggpg:g\n" + b"rpm-name-2;2.0;r2;i686;1;sigmd5:2;sigpgp:p2;siggpg:g2" + ) + attrs = {"communicate.return_value": (rpm_out, "error"), "wait.return_value": 0} rpm_mock.configure_mock(**attrs) popen.return_value = rpm_mock tests_dir = path.abspath(path.dirname(__file__)) - rpm_in_tag_path = path.join(tests_dir, - "test_get_generator_json_rpms_in_tag.json") + rpm_in_tag_path = path.join(tests_dir, "test_get_generator_json_rpms_in_tag.json") with open(rpm_in_tag_path) as rpms_in_tag_file: rpms_in_tag.return_value = json.load(rpms_in_tag_file) - expected_output_path = path.join(tests_dir, - "test_get_generator_json_expected_output_with_log.json") + expected_output_path = path.join( + tests_dir, "test_get_generator_json_expected_output_with_log.json") with open(expected_output_path) as expected_output_file: expected_output = json.load(expected_output_file) @@ -148,14 +154,16 @@ class TestBuild: @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") @patch("subprocess.Popen") - @patch("subprocess.check_output", return_value='1.4') + @patch("subprocess.check_output", return_value="1.4") @patch("pkg_resources.get_distribution") @patch("platform.linux_distribution") @patch("platform.machine") - @patch(("module_build_service.builder.KojiContentGenerator.KojiContentGenerator." - "_koji_rpms_in_tag")) - def test_get_generator_json_no_log(self, rpms_in_tag, machine, distro, pkg_res, coutput, popen, - ClientSession): + @patch( + "module_build_service.builder.KojiContentGenerator.KojiContentGenerator._koji_rpms_in_tag" + ) + def test_get_generator_json_no_log( + self, rpms_in_tag, machine, distro, pkg_res, coutput, popen, ClientSession + ): """ Test generation of content generator json """ koji_session = ClientSession.return_value koji_session.getUser.return_value = GET_USER_RV @@ -165,21 +173,20 @@ class TestBuild: pkg_res.return_value = Mock() pkg_res.return_value.version = "current-tested-version" rpm_mock = Mock() - rpm_out = b"rpm-name;1.0;r1;x86_64;(none);sigmd5:1;sigpgp:p;siggpg:g\n" \ - b"rpm-name-2;2.0;r2;i686;1;sigmd5:2;sigpgp:p2;siggpg:g2" - attrs = {'communicate.return_value': (rpm_out, 'error'), - 'wait.return_value': 0} + rpm_out = ( + b"rpm-name;1.0;r1;x86_64;(none);sigmd5:1;sigpgp:p;siggpg:g\n" + b"rpm-name-2;2.0;r2;i686;1;sigmd5:2;sigpgp:p2;siggpg:g2" + ) + attrs = {"communicate.return_value": (rpm_out, "error"), "wait.return_value": 0} rpm_mock.configure_mock(**attrs) popen.return_value = rpm_mock tests_dir = path.abspath(path.dirname(__file__)) - rpm_in_tag_path = path.join(tests_dir, - "test_get_generator_json_rpms_in_tag.json") + rpm_in_tag_path = path.join(tests_dir, "test_get_generator_json_rpms_in_tag.json") with open(rpm_in_tag_path) as rpms_in_tag_file: rpms_in_tag.return_value = json.load(rpms_in_tag_file) - expected_output_path = path.join(tests_dir, - "test_get_generator_json_expected_output.json") + expected_output_path = path.join(tests_dir, "test_get_generator_json_expected_output.json") with open(expected_output_path) as expected_output_file: expected_output = json.load(expected_output_file) self.cg._load_koji_tag(koji_session) @@ -216,7 +223,7 @@ class TestBuild: """ Test that the CG build is tagged. """ koji_session = ClientSession.return_value koji_session.getUser.return_value = GET_USER_RV - koji_session.getTag.return_value = {'id': 123} + koji_session.getTag.return_value = {"id": 123} self.cg._tag_cg_build() @@ -232,13 +239,14 @@ class TestBuild: """ Test that the CG build is tagged to default tag. """ koji_session = ClientSession.return_value koji_session.getUser.return_value = GET_USER_RV - koji_session.getTag.side_effect = [{}, {'id': 123}] + koji_session.getTag.side_effect = [{}, {"id": 123}] self.cg._tag_cg_build() assert koji_session.getTag.mock_calls == [ call(self.cg.module.cg_build_koji_tag), - call(conf.koji_cg_default_build_tag)] + call(conf.koji_cg_default_build_tag), + ] koji_session.tagBuild.assert_called_once_with(123, "nginx-0-2.10e50d06") # tagBuild requires logging into a session in advance. @@ -250,7 +258,7 @@ class TestBuild: """ Test that the CG build is not tagged when no tag set. """ koji_session = ClientSession.return_value koji_session.getUser.return_value = GET_USER_RV - koji_session.getTag.side_effect = [{}, {'id': 123}] + koji_session.getTag.side_effect = [{}, {"id": 123}] self.cg.module.cg_build_koji_tag = None self.cg._tag_cg_build() @@ -275,19 +283,18 @@ class TestBuild: @patch("module_build_service.builder.KojiContentGenerator.open", create=True) def test_get_arch_mmd_output(self, patched_open): - patched_open.return_value = mock_open( - read_data=self.cg.mmd.encode("utf-8")).return_value + patched_open.return_value = mock_open(read_data=self.cg.mmd.encode("utf-8")).return_value ret = self.cg._get_arch_mmd_output("./fake-dir", "x86_64") assert ret == { - 'arch': 'x86_64', - 'buildroot_id': 1, - 'checksum': '96b7739ffa3918e6ac3e3bd422b064ea', - 'checksum_type': 'md5', - 'components': [], - 'extra': {'typeinfo': {'module': {}}}, - 'filename': 'modulemd.x86_64.txt', - 'filesize': 1138, - 'type': 'file' + "arch": "x86_64", + "buildroot_id": 1, + "checksum": "96b7739ffa3918e6ac3e3bd422b064ea", + "checksum_type": "md5", + "components": [], + "extra": {"typeinfo": {"module": {}}}, + "filename": "modulemd.x86_64.txt", + "filesize": 1138, + "type": "file", } @patch("module_build_service.builder.KojiContentGenerator.open", create=True) @@ -298,17 +305,18 @@ class TestBuild: mmd.set_rpm_artifacts(rpm_artifacts) mmd_data = to_text_type(mmd.dumps()).encode("utf-8") - patched_open.return_value = mock_open( - read_data=mmd_data).return_value + patched_open.return_value = mock_open(read_data=mmd_data).return_value - self.cg.rpms = [{ - "name": "dhcp", - "version": "4.3.5", - "release": "5.module_2118aef6", - "arch": "x86_64", - "epoch": "12", - "payloadhash": "hash", - }] + self.cg.rpms = [ + { + "name": "dhcp", + "version": "4.3.5", + "release": "5.module_2118aef6", + "arch": "x86_64", + "epoch": "12", + "payloadhash": "hash", + } + ] self.cg.rpms_dict = { "dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64": { @@ -323,21 +331,25 @@ class TestBuild: ret = self.cg._get_arch_mmd_output("./fake-dir", "x86_64") assert ret == { - 'arch': 'x86_64', - 'buildroot_id': 1, - 'checksum': '502e46889affec24d98a281289104d4d', - 'checksum_type': 'md5', - 'components': [{u'arch': 'x86_64', - u'epoch': '12', - u'name': 'dhcp', - u'release': '5.module_2118aef6', - u'sigmd5': 'hash', - u'type': u'rpm', - u'version': '4.3.5'}], - 'extra': {'typeinfo': {'module': {}}}, - 'filename': 'modulemd.x86_64.txt', - 'filesize': 319, - 'type': 'file' + "arch": "x86_64", + "buildroot_id": 1, + "checksum": "502e46889affec24d98a281289104d4d", + "checksum_type": "md5", + "components": [ + { + u"arch": "x86_64", + u"epoch": "12", + u"name": "dhcp", + u"release": "5.module_2118aef6", + u"sigmd5": "hash", + u"type": u"rpm", + u"version": "4.3.5", + } + ], + "extra": {"typeinfo": {"module": {}}}, + "filename": "modulemd.x86_64.txt", + "filesize": 319, + "type": "file", } @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") @@ -348,69 +360,71 @@ class TestBuild: rpms = [ { - 'id': 1, - 'arch': 'src', - 'epoch': None, - 'build_id': 875991, - 'name': 'module-build-macros', - 'release': '1.module_92011fe6', - 'version': '0.1' + "id": 1, + "arch": "src", + "epoch": None, + "build_id": 875991, + "name": "module-build-macros", + "release": "1.module_92011fe6", + "version": "0.1", }, { - 'id': 2, - 'arch': 'noarch', - 'epoch': None, - 'build_id': 875991, - 'name': 'module-build-macros', - 'release': '1.module_92011fe6', - 'version': '0.1' + "id": 2, + "arch": "noarch", + "epoch": None, + "build_id": 875991, + "name": "module-build-macros", + "release": "1.module_92011fe6", + "version": "0.1", }, { - 'id': 3, - 'arch': 'src', - 'epoch': 3, - 'build_id': 875636, - 'name': 'ed', - 'release': '2.module_bd6e0eb1', - 'version': '1.14.1' + "id": 3, + "arch": "src", + "epoch": 3, + "build_id": 875636, + "name": "ed", + "release": "2.module_bd6e0eb1", + "version": "1.14.1", }, { - 'id': 4, - 'arch': 'x86_64', - 'epoch': 3, - 'build_id': 875636, - 'name': 'ed', - 'release': '2.module_bd6e0eb1', - 'version': '1.14.1' + "id": 4, + "arch": "x86_64", + "epoch": 3, + "build_id": 875636, + "name": "ed", + "release": "2.module_bd6e0eb1", + "version": "1.14.1", }, ] builds = [ { - 'build_id': 875636, - 'epoch': 3, - 'name': 'ed', - 'release': '2.module_bd6e0eb1', - 'version': '1.14.1', - 'nvr': 'ed-2.module_bd6e0eb1-1.14.1', + "build_id": 875636, + "epoch": 3, + "name": "ed", + "release": "2.module_bd6e0eb1", + "version": "1.14.1", + "nvr": "ed-2.module_bd6e0eb1-1.14.1", }, { - 'build_id': 875991, - 'epoch': None, - 'name': 'module-build-macros', - 'release': '1.module_92011fe6', - 'version': '0.1', - 'nvr': 'module-build-macros-0.1-1.module_92011fe6', - } + "build_id": 875991, + "epoch": None, + "name": "module-build-macros", + "release": "1.module_92011fe6", + "version": "0.1", + "nvr": "module-build-macros-0.1-1.module_92011fe6", + }, ] koji_session.listTaggedRPMS.return_value = (rpms, builds) koji_session.multiCall.side_effect = [ # getRPMHeaders response - [[{'excludearch': ["x86_64"], 'exclusivearch': [], 'license': 'MIT'}], - [{'excludearch': [], 'exclusivearch': ["x86_64"], 'license': 'GPL'}], - [{'license': 'MIT'}], - [{'license': 'GPL'}]] + [ + [{"excludearch": ["x86_64"], "exclusivearch": [], "license": "MIT"}], + [{"excludearch": [], "exclusivearch": ["x86_64"], "license": "GPL"}], + [{"license": "MIT"}], + [{"license": "GPL"}], + ] ] rpms = self.cg._koji_rpms_in_tag("tag") @@ -448,33 +462,33 @@ class TestBuild: rpms = [ { - 'id': 1, - 'arch': 'src', - 'epoch': None, - 'build_id': 875991, - 'name': 'module-build-macros', - 'release': '1.module_92011fe6', - 'version': '0.1' + "id": 1, + "arch": "src", + "epoch": None, + "build_id": 875991, + "name": "module-build-macros", + "release": "1.module_92011fe6", + "version": "0.1", }, { - 'id': 2, - 'arch': 'noarch', - 'epoch': None, - 'build_id': 875991, - 'name': 'module-build-macros', - 'release': '1.module_92011fe6', - 'version': '0.1' + "id": 2, + "arch": "noarch", + "epoch": None, + "build_id": 875991, + "name": "module-build-macros", + "release": "1.module_92011fe6", + "version": "0.1", }, ] builds = [ { - 'build_id': 875991, - 'epoch': None, - 'name': 'module-build-macros', - 'release': '1.module_92011fe6', - 'version': '0.1', - 'nvr': 'module-build-macros-0.1-1.module_92011fe6', + "build_id": 875991, + "epoch": None, + "name": "module-build-macros", + "release": "1.module_92011fe6", + "version": "0.1", + "nvr": "module-build-macros-0.1-1.module_92011fe6", } ] @@ -487,8 +501,7 @@ class TestBuild: with pytest.raises(RuntimeError) as cm: self.cg._koji_rpms_in_tag("tag") - assert str(cm.value) == ( - "No RPM headers received from Koji for RPM module-build-macros") + assert str(cm.value) == ("No RPM headers received from Koji for RPM module-build-macros") koji_session.multiCall.side_effect = [ # getRPMHeaders response @@ -498,11 +511,19 @@ class TestBuild: with pytest.raises(RuntimeError) as cm: self.cg._koji_rpms_in_tag("tag") assert str(cm.value) == ( - "No RPM 'license' header received from Koji for RPM module-build-macros") + "No RPM 'license' header received from Koji for RPM module-build-macros" + ) - def _add_test_rpm(self, nevra, srpm_nevra, multilib=None, - koji_srpm_nevra=None, excludearch=None, exclusivearch=None, - license=None): + def _add_test_rpm( + self, + nevra, + srpm_nevra, + multilib=None, + koji_srpm_nevra=None, + excludearch=None, + exclusivearch=None, + license=None, + ): """ Helper method to add test RPM to ModuleBuild used by KojiContentGenerator and also to Koji tag used to generate the Content Generator build. @@ -553,24 +574,34 @@ class TestBuild: @pytest.mark.parametrize("devel", (False, True)) def test_fill_in_rpms_list(self, devel): - self._add_test_rpm("dhcp-12:4.3.5-5.module_2118aef6.src", - "dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64", - "dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.i686", - "dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.s390x", - "dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.i686", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.s390x", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") + self._add_test_rpm( + "dhcp-12:4.3.5-5.module_2118aef6.src", "dhcp-12:4.3.5-5.module_2118aef6.src") + self._add_test_rpm( + "dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64", "dhcp-12:4.3.5-5.module_2118aef6.src") + self._add_test_rpm( + "dhcp-libs-12:4.3.5-5.module_2118aef6.i686", "dhcp-12:4.3.5-5.module_2118aef6.src") + self._add_test_rpm( + "dhcp-libs-12:4.3.5-5.module_2118aef6.s390x", "dhcp-12:4.3.5-5.module_2118aef6.src") + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.i686", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.s390x", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) self.cg.devel = devel mmd = self.cg.module.mmd() @@ -589,41 +620,52 @@ class TestBuild: # is not enabled for them - therefore we want to include them in -devel. assert set(mmd.get_rpm_artifacts().get()) == set([ "dhcp-libs-12:4.3.5-5.module_2118aef6.i686", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.i686"]) + "perl-Tangerine-12:4.3.5-5.module_2118aef6.i686", + ]) def test_fill_in_rpms_exclusivearch(self): - self._add_test_rpm("dhcp-12:4.3.5-5.module_2118aef6.src", - "dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.noarch", - "dhcp-12:4.3.5-5.module_2118aef6.src", - exclusivearch=["x86_64"]) - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.noarch", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - exclusivearch=["ppc64le"]) + self._add_test_rpm( + "dhcp-12:4.3.5-5.module_2118aef6.src", "dhcp-12:4.3.5-5.module_2118aef6.src") + self._add_test_rpm( + "dhcp-libs-12:4.3.5-5.module_2118aef6.noarch", + "dhcp-12:4.3.5-5.module_2118aef6.src", + exclusivearch=["x86_64"], + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.noarch", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + exclusivearch=["ppc64le"], + ) mmd = self.cg.module.mmd() mmd = self.cg._fill_in_rpms_list(mmd, "x86_64") # Only dhcp-libs should be filled in, because perl-Tangerine has different # exclusivearch. - assert set(mmd.get_rpm_artifacts().get()) == set([ - "dhcp-12:4.3.5-5.module_2118aef6.src", - "dhcp-libs-12:4.3.5-5.module_2118aef6.noarch", - ]) + assert set(mmd.get_rpm_artifacts().get()) == set( + ["dhcp-12:4.3.5-5.module_2118aef6.src", "dhcp-libs-12:4.3.5-5.module_2118aef6.noarch"]) def test_fill_in_rpms_excludearch(self): - self._add_test_rpm("dhcp-12:4.3.5-5.module_2118aef6.src", - "dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.noarch", - "dhcp-12:4.3.5-5.module_2118aef6.src", - excludearch=["x86_64"]) - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.noarch", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - excludearch=["ppc64le"]) + self._add_test_rpm( + "dhcp-12:4.3.5-5.module_2118aef6.src", "dhcp-12:4.3.5-5.module_2118aef6.src") + self._add_test_rpm( + "dhcp-libs-12:4.3.5-5.module_2118aef6.noarch", + "dhcp-12:4.3.5-5.module_2118aef6.src", + excludearch=["x86_64"], + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.noarch", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + excludearch=["ppc64le"], + ) mmd = self.cg.module.mmd() mmd = self.cg._fill_in_rpms_list(mmd, "x86_64") @@ -636,24 +678,36 @@ class TestBuild: @pytest.mark.parametrize("devel", (False, True)) def test_fill_in_rpms_rpm_whitelist(self, devel): - self._add_test_rpm("python27-dhcp-12:4.3.5-5.module_2118aef6.src", - "dhcp-12:4.3.5-5.module_2118aef6.src", - koji_srpm_nevra="python27-dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("python27-dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64", - "dhcp-12:4.3.5-5.module_2118aef6.src", - koji_srpm_nevra="python27-dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("python27-dhcp-libs-12:4.3.5-5.module_2118aef6.i686", - "dhcp-12:4.3.5-5.module_2118aef6.src", - koji_srpm_nevra="python27-dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - koji_srpm_nevra="foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - koji_srpm_nevra="foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.i686", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - koji_srpm_nevra="foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src") + self._add_test_rpm( + "python27-dhcp-12:4.3.5-5.module_2118aef6.src", + "dhcp-12:4.3.5-5.module_2118aef6.src", + koji_srpm_nevra="python27-dhcp-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "python27-dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64", + "dhcp-12:4.3.5-5.module_2118aef6.src", + koji_srpm_nevra="python27-dhcp-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "python27-dhcp-libs-12:4.3.5-5.module_2118aef6.i686", + "dhcp-12:4.3.5-5.module_2118aef6.src", + koji_srpm_nevra="python27-dhcp-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + koji_srpm_nevra="foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + koji_srpm_nevra="foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.i686", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + koji_srpm_nevra="foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) self.cg.devel = devel mmd = self.cg.module.mmd() @@ -680,34 +734,56 @@ class TestBuild: @pytest.mark.parametrize("devel", (False, True)) def test_fill_in_rpms_list_filters(self, devel): - self._add_test_rpm("dhcp-12:4.3.5-5.module_2118aef6.src", - "dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64", - "dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("dhcp-libs-debuginfo-12:4.3.5-5.module_2118aef6.x86_64", - "dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("dhcp-libs-debugsource-12:4.3.5-5.module_2118aef6.x86_64", - "dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.i686", - "dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("dhcp-libs-debuginfo-12:4.3.5-5.module_2118aef6.i686", - "dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("dhcp-libs-debugsource-12:4.3.5-5.module_2118aef6.i686", - "dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-debuginfo-12:4.3.5-5.module_2118aef6.x86_64", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-debugsource-12:4.3.5-5.module_2118aef6.x86_64", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.i686", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-debuginfo-12:4.3.5-5.module_2118aef6.i686", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-debugsource-12:4.3.5-5.module_2118aef6.i686", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") + self._add_test_rpm( + "dhcp-12:4.3.5-5.module_2118aef6.src", "dhcp-12:4.3.5-5.module_2118aef6.src") + self._add_test_rpm( + "dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64", "dhcp-12:4.3.5-5.module_2118aef6.src") + self._add_test_rpm( + "dhcp-libs-debuginfo-12:4.3.5-5.module_2118aef6.x86_64", + "dhcp-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "dhcp-libs-debugsource-12:4.3.5-5.module_2118aef6.x86_64", + "dhcp-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "dhcp-libs-12:4.3.5-5.module_2118aef6.i686", "dhcp-12:4.3.5-5.module_2118aef6.src") + self._add_test_rpm( + "dhcp-libs-debuginfo-12:4.3.5-5.module_2118aef6.i686", + "dhcp-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "dhcp-libs-debugsource-12:4.3.5-5.module_2118aef6.i686", + "dhcp-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "perl-Tangerine-debuginfo-12:4.3.5-5.module_2118aef6.x86_64", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "perl-Tangerine-debugsource-12:4.3.5-5.module_2118aef6.x86_64", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.i686", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "perl-Tangerine-debuginfo-12:4.3.5-5.module_2118aef6.i686", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "perl-Tangerine-debugsource-12:4.3.5-5.module_2118aef6.i686", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) self.cg.devel = devel mmd = self.cg.module.mmd() @@ -741,24 +817,36 @@ class TestBuild: @pytest.mark.parametrize("devel", (False, True)) def test_fill_in_rpms_list_multilib(self, devel): - self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.src", - "dhcp-libs-12:4.3.5-5.module_2118aef6.src", - multilib=["x86_64"]) - self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64", - "dhcp-libs-12:4.3.5-5.module_2118aef6.src", - multilib=["x86_64"]) - self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.i686", - "dhcp-libs-12:4.3.5-5.module_2118aef6.src", - multilib=["x86_64"]) - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - multilib=["ppc64le"]) - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - multilib=["ppc64le"]) - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.i686", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - multilib=["ppc64le"]) + self._add_test_rpm( + "dhcp-libs-12:4.3.5-5.module_2118aef6.src", + "dhcp-libs-12:4.3.5-5.module_2118aef6.src", + multilib=["x86_64"], + ) + self._add_test_rpm( + "dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64", + "dhcp-libs-12:4.3.5-5.module_2118aef6.src", + multilib=["x86_64"], + ) + self._add_test_rpm( + "dhcp-libs-12:4.3.5-5.module_2118aef6.i686", + "dhcp-libs-12:4.3.5-5.module_2118aef6.src", + multilib=["x86_64"], + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + multilib=["ppc64le"], + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + multilib=["ppc64le"], + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.i686", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + multilib=["ppc64le"], + ) self.cg.devel = devel mmd = self.cg.module.mmd() @@ -775,31 +863,36 @@ class TestBuild: "perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64", ]) else: - assert set(mmd.get_rpm_artifacts().get()) == set([ - "perl-Tangerine-12:4.3.5-5.module_2118aef6.i686"]) + assert set(mmd.get_rpm_artifacts().get()) == set( + ["perl-Tangerine-12:4.3.5-5.module_2118aef6.i686"]) @pytest.mark.parametrize( - "licenses, expected", ( - (["GPL", "MIT"], ["GPL", "MIT"]), - (["GPL", ""], ["GPL"]), - (["GPL", "GPL"], ["GPL"]), - ) + "licenses, expected", + ((["GPL", "MIT"], ["GPL", "MIT"]), (["GPL", ""], ["GPL"]), (["GPL", "GPL"], ["GPL"])), ) def test_fill_in_rpms_list_license(self, licenses, expected): - self._add_test_rpm("dhcp-12:4.3.5-5.module_2118aef6.src", - "dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64", - "dhcp-12:4.3.5-5.module_2118aef6.src", - license=licenses[0]) - self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.i686", - "dhcp-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", - license=licenses[1]) - self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.i686", - "perl-Tangerine-12:4.3.5-5.module_2118aef6.src") + self._add_test_rpm( + "dhcp-12:4.3.5-5.module_2118aef6.src", "dhcp-12:4.3.5-5.module_2118aef6.src") + self._add_test_rpm( + "dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64", + "dhcp-12:4.3.5-5.module_2118aef6.src", + license=licenses[0], + ) + self._add_test_rpm( + "dhcp-libs-12:4.3.5-5.module_2118aef6.i686", "dhcp-12:4.3.5-5.module_2118aef6.src") + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + license=licenses[1], + ) + self._add_test_rpm( + "perl-Tangerine-12:4.3.5-5.module_2118aef6.i686", + "perl-Tangerine-12:4.3.5-5.module_2118aef6.src", + ) mmd = self.cg.module.mmd() mmd = self.cg._fill_in_rpms_list(mmd, "x86_64") @@ -812,12 +905,16 @@ class TestBuild: # A build has ExcludeArch: i686 (because it only works on 64 bit arches). # A noarch package is built there, and this noarch packages should be # included in x86_64 repo. - self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.noarch", - "dhcp-12:4.3.5-5.module_2118aef6.src", - excludearch=["i686"]) - self._add_test_rpm("dhcp-12:4.3.5-5.module_2118aef6.src", - "dhcp-12:4.3.5-5.module_2118aef6.src", - excludearch=["i686"]) + self._add_test_rpm( + "dhcp-libs-12:4.3.5-5.module_2118aef6.noarch", + "dhcp-12:4.3.5-5.module_2118aef6.src", + excludearch=["i686"], + ) + self._add_test_rpm( + "dhcp-12:4.3.5-5.module_2118aef6.src", + "dhcp-12:4.3.5-5.module_2118aef6.src", + excludearch=["i686"], + ) self.cg.devel = devel mmd = self.cg.module.mmd() @@ -828,7 +925,8 @@ class TestBuild: # multilib set. The "dhcp" SRPM should be also included. assert set(mmd.get_rpm_artifacts().get()) == set([ "dhcp-libs-12:4.3.5-5.module_2118aef6.noarch", - "dhcp-12:4.3.5-5.module_2118aef6.src"]) + "dhcp-12:4.3.5-5.module_2118aef6.src", + ]) else: assert set(mmd.get_rpm_artifacts().get()) == set([]) @@ -849,14 +947,19 @@ class TestBuild: assert "mbs" not in mmd.get_xmd().keys() - @patch('module_build_service.builder.KojiContentGenerator.SCM') + @patch("module_build_service.builder.KojiContentGenerator.SCM") def test_prepare_file_directory_modulemd_src(self, mocked_scm): - FakeSCM(mocked_scm, 'testmodule', 'testmodule_init.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, + "testmodule", + "testmodule_init.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) mmd = self.cg.module.mmd() - mmd.set_xmd(glib.dict_values({"mbs": { - "commit": "foo", - "scmurl": "git://localhost/modules/foo.git#master"}})) + mmd.set_xmd( + glib.dict_values( + {"mbs": {"commit": "foo", "scmurl": "git://localhost/modules/foo.git#master"}}) + ) self.cg.module.modulemd = to_text_type(mmd.dumps()) file_dir = self.cg._prepare_file_directory() with io.open(path.join(file_dir, "modulemd.src.txt"), encoding="utf-8") as mmd: @@ -883,8 +986,8 @@ class TestBuild: @patch("module_build_service.builder.KojiContentGenerator.KojiContentGenerator._tag_cg_build") @patch("module_build_service.builder.KojiContentGenerator.KojiContentGenerator._load_koji_tag") def test_koji_cg_koji_import(self, tag_loader, tagger, cl_session): - ''' Tests whether build is still tagged even if there's an exception in CGImport ''' + """ Tests whether build is still tagged even if there's an exception in CGImport """ cl_session.return_value.CGImport = Mock( - side_effect=koji.GenericError('Build already exists asdv')) + side_effect=koji.GenericError("Build already exists asdv")) self.cg.koji_import() tagger.assert_called() diff --git a/tests/test_logger.py b/tests/test_logger.py index d0f5063a..b7bc18bc 100644 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -33,24 +33,25 @@ from tests import init_data class TestLogger: - def setup_method(self, test_method): init_data(1) log.debug(test_method.__module__) try: # py2 - test_id = '.'.join([ + test_id = ".".join([ path.splitext(path.basename(__file__))[0], test_method.im_class.__name__, - test_method.im_func.__name__]) + test_method.im_func.__name__, + ]) except AttributeError: # py3 - test_id = '.'.join([ + test_id = ".".join([ path.splitext(path.basename(__file__))[0], test_method.__self__.__class__.__name__, - test_method.__self__.__class__.__name__]) + test_method.__self__.__class__.__name__, + ]) - self.base = tempfile.mkdtemp(prefix='mbs-', suffix='-%s' % test_id) + self.base = tempfile.mkdtemp(prefix="mbs-", suffix="-%s" % test_id) self.name_format = "build-{id}.log" print("Storing build logs in %r" % self.base) self.build_log = ModuleBuildLogs(self.base, self.name_format) diff --git a/tests/test_manage.py b/tests/test_manage.py index b9327cd9..b885deb2 100644 --- a/tests/test_manage.py +++ b/tests/test_manage.py @@ -30,14 +30,17 @@ class TestMBSManage: def setup_method(self, test_method): init_data() - @pytest.mark.parametrize(('identifier', 'is_valid'), ( - ('', False), - ('spam', False), - ('spam:bacon', True), - ('spam:bacon:eggs', True), - ('spam:bacon:eggs:ham', True), - ('spam:bacon:eggs:ham:sausage', False), - )) + @pytest.mark.parametrize( + ("identifier", "is_valid"), + ( + ("", False), + ("spam", False), + ("spam:bacon", True), + ("spam:bacon:eggs", True), + ("spam:bacon:eggs:ham", True), + ("spam:bacon:eggs:ham:sausage", False), + ), + ) def test_retire_identifier_validation(self, identifier, is_valid): if is_valid: retire(identifier) @@ -45,29 +48,31 @@ class TestMBSManage: with pytest.raises(ValueError): retire(identifier) - @pytest.mark.parametrize(('overrides', 'identifier', 'changed_count'), ( - ({'name': 'pickme'}, 'pickme:eggs', 1), - ({'stream': 'pickme'}, 'spam:pickme', 1), - ({'version': 'pickme'}, 'spam:eggs:pickme', 1), - ({'context': 'pickme'}, 'spam:eggs:ham:pickme', 1), - - ({}, 'spam:eggs', 3), - ({'version': 'pickme'}, 'spam:eggs', 3), - ({'context': 'pickme'}, 'spam:eggs:ham', 3), - )) - @patch('module_build_service.manage.prompt_bool') + @pytest.mark.parametrize( + ("overrides", "identifier", "changed_count"), + ( + ({"name": "pickme"}, "pickme:eggs", 1), + ({"stream": "pickme"}, "spam:pickme", 1), + ({"version": "pickme"}, "spam:eggs:pickme", 1), + ({"context": "pickme"}, "spam:eggs:ham:pickme", 1), + ({}, "spam:eggs", 3), + ({"version": "pickme"}, "spam:eggs", 3), + ({"context": "pickme"}, "spam:eggs:ham", 3), + ), + ) + @patch("module_build_service.manage.prompt_bool") def test_retire_build(self, prompt_bool, overrides, identifier, changed_count): prompt_bool.return_value = True with make_session(conf) as session: - module_builds = session.query(ModuleBuild).filter_by(state=BUILD_STATES['ready']).all() + module_builds = session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all() # Verify our assumption of the amount of ModuleBuilds in database assert len(module_builds) == 3 for x, build in enumerate(module_builds): - build.name = 'spam' - build.stream = 'eggs' - build.version = 'ham' + build.name = "spam" + build.stream = "eggs" + build.version = "ham" build.context = str(x) for attr, value in overrides.items(): @@ -77,38 +82,44 @@ class TestMBSManage: retire(identifier) retired_module_builds = ( - session.query(ModuleBuild).filter_by(state=BUILD_STATES['garbage']).all()) + session.query(ModuleBuild).filter_by(state=BUILD_STATES["garbage"]).all() + ) assert len(retired_module_builds) == changed_count for x in range(changed_count): assert retired_module_builds[x].id == module_builds[x].id - assert retired_module_builds[x].state == BUILD_STATES['garbage'] + assert retired_module_builds[x].state == BUILD_STATES["garbage"] - @pytest.mark.parametrize(('confirm_prompt', 'confirm_arg', 'confirm_expected'), ( - (True, False, True), - (True, True, True), - (False, False, False), - (False, True, True), - )) - @patch('module_build_service.manage.prompt_bool') - def test_retire_build_confirm_prompt(self, prompt_bool, confirm_prompt, confirm_arg, - confirm_expected): + @pytest.mark.parametrize( + ("confirm_prompt", "confirm_arg", "confirm_expected"), + ( + (True, False, True), + (True, True, True), + (False, False, False), + (False, True, True) + ), + ) + @patch("module_build_service.manage.prompt_bool") + def test_retire_build_confirm_prompt( + self, prompt_bool, confirm_prompt, confirm_arg, confirm_expected + ): prompt_bool.return_value = confirm_prompt with make_session(conf) as session: - module_builds = session.query(ModuleBuild).filter_by(state=BUILD_STATES['ready']).all() + module_builds = session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all() # Verify our assumption of the amount of ModuleBuilds in database assert len(module_builds) == 3 for x, build in enumerate(module_builds): - build.name = 'spam' - build.stream = 'eggs' + build.name = "spam" + build.stream = "eggs" session.commit() - retire('spam:eggs', confirm_arg) + retire("spam:eggs", confirm_arg) retired_module_builds = ( - session.query(ModuleBuild).filter_by(state=BUILD_STATES['garbage']).all()) + session.query(ModuleBuild).filter_by(state=BUILD_STATES["garbage"]).all() + ) expected_changed_count = 3 if confirm_expected else 0 assert len(retired_module_builds) == expected_changed_count diff --git a/tests/test_messaging.py b/tests/test_messaging.py index 172de13c..1e8954e2 100644 --- a/tests/test_messaging.py +++ b/tests/test_messaging.py @@ -22,29 +22,28 @@ from module_build_service import messaging -from module_build_service.messaging import KojiRepoChange # noqa +from module_build_service.messaging import KojiRepoChange # noqa class TestFedmsgMessaging: - def test_buildsys_state_change(self): # https://fedora-fedmsg.readthedocs.io/en/latest/topics.html#id134 buildsys_state_change_msg = { - 'msg': { - 'attribute': 'state', - 'build_id': 614503, - 'instance': 'primary', - 'name': 'plasma-systemsettings', - 'new': 1, - 'old': 0, - 'owner': 'dvratil', - 'release': '1.fc23', - 'task_id': 9053697, - 'version': '5.2.1' + "msg": { + "attribute": "state", + "build_id": 614503, + "instance": "primary", + "name": "plasma-systemsettings", + "new": 1, + "old": 0, + "owner": "dvratil", + "release": "1.fc23", + "task_id": 9053697, + "version": "5.2.1", }, - 'msg_id': '2015-51be4c8e-8ab6-4dcb-ac0d-37b257765c71', - 'timestamp': 1424789698.0, - 'topic': 'org.fedoraproject.prod.buildsys.build.state.change' + "msg_id": "2015-51be4c8e-8ab6-4dcb-ac0d-37b257765c71", + "timestamp": 1424789698.0, + "topic": "org.fedoraproject.prod.buildsys.build.state.change", } msg = messaging.FedmsgMessageParser().parse(buildsys_state_change_msg) @@ -64,11 +63,11 @@ class TestFedmsgMessaging: "user": "mbs/mbs.fedoraproject.org", "version": "0.1", "owner": "mbs/mbs.fedoraproject.org", - "release": "1.module_0c3d13fd" + "release": "1.module_0c3d13fd", }, - 'msg_id': '2015-51be4c8e-8ab6-4dcb-ac0d-37b257765c71', - 'timestamp': 1424789698.0, - 'topic': 'org.fedoraproject.prod.buildsys.tag' + "msg_id": "2015-51be4c8e-8ab6-4dcb-ac0d-37b257765c71", + "timestamp": 1424789698.0, + "topic": "org.fedoraproject.prod.buildsys.tag", } msg = messaging.FedmsgMessageParser().parse(buildsys_tag_msg) @@ -83,11 +82,11 @@ class TestFedmsgMessaging: "instance": "primary", "repo_id": 728809, "tag": "module-f0f7e44f3c6cccab-build", - "tag_id": 653 + "tag_id": 653, }, - 'msg_id': '2015-51be4c8e-8ab6-4dcb-ac0d-37b257765c71', - 'timestamp': 1424789698.0, - 'topic': 'org.fedoraproject.prod.buildsys.repo.done' + "msg_id": "2015-51be4c8e-8ab6-4dcb-ac0d-37b257765c71", + "timestamp": 1424789698.0, + "topic": "org.fedoraproject.prod.buildsys.repo.done", } msg = messaging.FedmsgMessageParser().parse(buildsys_tag_msg) diff --git a/tests/test_mmd_resolver.py b/tests/test_mmd_resolver.py index 0e34a80b..6804a2ce 100644 --- a/tests/test_mmd_resolver.py +++ b/tests/test_mmd_resolver.py @@ -32,7 +32,6 @@ from module_build_service import glib class TestMMDResolver: - def setup_method(self, test_method): self.mmd_resolver = MMDResolver() @@ -87,7 +86,8 @@ class TestMMDResolver: return mmd @pytest.mark.parametrize( - "deps, expected", ( + "deps, expected", + ( ([], "None"), ([{"x": []}], "module(x)"), ([{"x": ["1"]}], "(module(x) with module(x:1))"), @@ -96,7 +96,7 @@ class TestMMDResolver: ([{"x": ["-1", "2"]}], "(module(x) with module(x:2))"), ([{"x": [], "y": []}], "(module(x) and module(y))"), ([{"x": []}, {"y": []}], "(module(x) or module(y))"), - ) + ), ) def test_deps2reqs(self, deps, expected): # Sort by keys here to avoid unordered dicts @@ -105,48 +105,64 @@ class TestMMDResolver: assert str(reqs) == expected @pytest.mark.parametrize( - "buildrequires, expected", ( - ({"platform": []}, [ - [["platform:f28:0:c0:x86_64"], - ["platform:f29:0:c0:x86_64"]], - ]), - ({"platform": ["f28"]}, [ - [["platform:f28:0:c0:x86_64"]], - ]), - ({"platform": ["-f28"]}, [ - [["platform:f29:0:c0:x86_64"]], - ]), - ({"gtk": [], "qt": []}, [ - [["gtk:3:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"], - ["gtk:4:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"], - ["gtk:3:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"], - ["gtk:4:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"]], - ]), - ({"gtk": [], "qt": [], "platform": []}, [ - [["gtk:3:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"], - ["gtk:4:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"], - ["gtk:3:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"], - ["gtk:4:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"], - ["gtk:3:0:c9:x86_64", "qt:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"], - ["gtk:4:0:c9:x86_64", "qt:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"], - ["gtk:3:0:c9:x86_64", "qt:5:0:c9:x86_64", "platform:f29:0:c0:x86_64"], - ["gtk:4:0:c9:x86_64", "qt:5:0:c9:x86_64", "platform:f29:0:c0:x86_64"]], - ]), - ([{"qt": [], "platform": ["f28"]}, - {"gtk": [], "platform": ["-f28"]}], [ - [["qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"], - ["qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"]], - [["gtk:3:0:c9:x86_64", "platform:f29:0:c0:x86_64"], - ["gtk:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"]], - ]), - ({"mess": []}, [ - [["mess:1:0:c0:x86_64", "gtk:3:0:c8:x86_64", "platform:f28:0:c0:x86_64"]], - ]), - ({"mess": [], "platform": []}, [ - [["mess:1:0:c0:x86_64", "gtk:3:0:c8:x86_64", "platform:f28:0:c0:x86_64"], - ["mess:1:0:c0:x86_64", "gtk:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"]], - ]), - ) + "buildrequires, expected", + ( + ({"platform": []}, [[["platform:f28:0:c0:x86_64"], ["platform:f29:0:c0:x86_64"]]]), + ({"platform": ["f28"]}, [[["platform:f28:0:c0:x86_64"]]]), + ({"platform": ["-f28"]}, [[["platform:f29:0:c0:x86_64"]]]), + ( + {"gtk": [], "qt": []}, + [ + [ + ["gtk:3:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"], + ["gtk:4:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"], + ["gtk:3:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"], + ["gtk:4:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"], + ] + ], + ), + ( + {"gtk": [], "qt": [], "platform": []}, + [ + [ + ["gtk:3:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"], + ["gtk:4:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"], + ["gtk:3:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"], + ["gtk:4:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"], + ["gtk:3:0:c9:x86_64", "qt:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"], + ["gtk:4:0:c9:x86_64", "qt:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"], + ["gtk:3:0:c9:x86_64", "qt:5:0:c9:x86_64", "platform:f29:0:c0:x86_64"], + ["gtk:4:0:c9:x86_64", "qt:5:0:c9:x86_64", "platform:f29:0:c0:x86_64"], + ] + ], + ), + ( + [{"qt": [], "platform": ["f28"]}, {"gtk": [], "platform": ["-f28"]}], + [ + [ + ["qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"], + ["qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"], + ], + [ + ["gtk:3:0:c9:x86_64", "platform:f29:0:c0:x86_64"], + ["gtk:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"], + ], + ], + ), + ( + {"mess": []}, + [[["mess:1:0:c0:x86_64", "gtk:3:0:c8:x86_64", "platform:f28:0:c0:x86_64"]]], + ), + ( + {"mess": [], "platform": []}, + [ + [ + ["mess:1:0:c0:x86_64", "gtk:3:0:c8:x86_64", "platform:f28:0:c0:x86_64"], + ["mess:1:0:c0:x86_64", "gtk:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"], + ] + ], + ), + ), ) def test_solve(self, buildrequires, expected): modules = ( @@ -160,8 +176,10 @@ class TestMMDResolver: ("qt:4:0:c9", {"platform": ["f29"]}), ("qt:5:0:c8", {"platform": ["f28"]}), ("qt:5:0:c9", {"platform": ["f29"]}), - ("mess:1:0:c0", [{"gtk": ["3"], "platform": ["f28"]}, - {"gtk": ["4"], "platform": ["-f28"]}]), + ( + "mess:1:0:c0", + [{"gtk": ["3"], "platform": ["f28"]}, {"gtk": ["4"], "platform": ["-f28"]}], + ), ) for n, req in modules: self.mmd_resolver.add_modules(self._make_mmd(n, req)) @@ -169,58 +187,77 @@ class TestMMDResolver: app = self._make_mmd("app:1:0", buildrequires) expanded = self.mmd_resolver.solve(app) - expected = set(frozenset(["app:1:0:%d:src" % c] + e) - for c, exp in enumerate(expected) - for e in exp) + expected = set( + frozenset(["app:1:0:%d:src" % c] + e) for c, exp in enumerate(expected) for e in exp + ) assert expanded == expected @pytest.mark.parametrize( - "buildrequires, xmd_buildrequires, expected", ( + "buildrequires, xmd_buildrequires, expected", + ( # BR all platform streams -> build for all platform streams. - ({"platform": []}, {}, [ - [["platform:el8.2.0.z:0:c0:x86_64"], - ["platform:el8.1.0:0:c0:x86_64"], - ["platform:el8.0.0:0:c0:x86_64"], - ["platform:el7.6.0:0:c0:x86_64"]], - ]), + ( + {"platform": []}, + {}, + [ + [ + ["platform:el8.2.0.z:0:c0:x86_64"], + ["platform:el8.1.0:0:c0:x86_64"], + ["platform:el8.0.0:0:c0:x86_64"], + ["platform:el7.6.0:0:c0:x86_64"], + ] + ], + ), # BR "el8" platform stream -> build for all el8 platform streams. - ({"platform": ["el8"]}, {}, [ - [["platform:el8.2.0.z:0:c0:x86_64"], - ["platform:el8.1.0:0:c0:x86_64"], - ["platform:el8.0.0:0:c0:x86_64"]], - ]), + ( + {"platform": ["el8"]}, + {}, + [ + [ + ["platform:el8.2.0.z:0:c0:x86_64"], + ["platform:el8.1.0:0:c0:x86_64"], + ["platform:el8.0.0:0:c0:x86_64"], + ] + ], + ), # BR "el8.1.0" platfrom stream -> build just for el8.1.0. - ({"platform": ["el8"]}, ["platform:el8.1.0"], [ - [["platform:el8.1.0:0:c0:x86_64"]], - ]), + ({"platform": ["el8"]}, ["platform:el8.1.0"], [[["platform:el8.1.0:0:c0:x86_64"]]]), # BR platform:el8.1.0 and gtk:3, which is not built against el8.1.0, # but it is built only against el8.0.0 -> cherry-pick gtk:3 from el8.0.0 # and build once against platform:el8.1.0. - ({"platform": ["el8"], "gtk": ["3"]}, ["platform:el8.1.0"], [ - [["platform:el8.1.0:0:c0:x86_64", "gtk:3:0:c8:x86_64", ]], - ]), + ( + {"platform": ["el8"], "gtk": ["3"]}, + ["platform:el8.1.0"], + [[["platform:el8.1.0:0:c0:x86_64", "gtk:3:0:c8:x86_64"]]], + ), # BR platform:el8.2.0 and gtk:3, this time gtk:3 build against el8.2.0 exists # -> use both platform and gtk from el8.2.0 and build once. - ({"platform": ["el8"], "gtk": ["3"]}, ["platform:el8.2.0.z"], [ - [["platform:el8.2.0.z:0:c0:x86_64", "gtk:3:1:c8:x86_64", ]], - ]), + ( + {"platform": ["el8"], "gtk": ["3"]}, + ["platform:el8.2.0.z"], + [[["platform:el8.2.0.z:0:c0:x86_64", "gtk:3:1:c8:x86_64"]]], + ), # BR platform:el8.2.0 and mess:1 which is built against platform:el8.1.0 and # requires gtk:3 which is built against platform:el8.2.0 and platform:el8.0.0 # -> Use platform:el8.2.0 and # -> cherry-pick mess:1 from el8.1.0 and # -> use gtk:3:1 from el8.2.0. - ({"platform": ["el8"], "mess": ["1"]}, ["platform:el8.2.0.z"], [ - [["platform:el8.2.0.z:0:c0:x86_64", "mess:1:0:c0:x86_64", "gtk:3:1:c8:x86_64", ]], - ]), + ( + {"platform": ["el8"], "mess": ["1"]}, + ["platform:el8.2.0.z"], + [[["platform:el8.2.0.z:0:c0:x86_64", "mess:1:0:c0:x86_64", "gtk:3:1:c8:x86_64"]]], + ), # BR platform:el8.1.0 and mess:1 which is built against platform:el8.1.0 and # requires gtk:3 which is built against platform:el8.2.0 and platform:el8.0.0 # -> Use platform:el8.1.0 and # -> Used mess:1 from el8.1.0 and # -> cherry-pick gtk:3:0 from el8.0.0. - ({"platform": ["el8"], "mess": ["1"]}, ["platform:el8.1.0"], [ - [["platform:el8.1.0:0:c0:x86_64", "mess:1:0:c0:x86_64", "gtk:3:0:c8:x86_64", ]], - ]), + ( + {"platform": ["el8"], "mess": ["1"]}, + ["platform:el8.1.0"], + [[["platform:el8.1.0:0:c0:x86_64", "mess:1:0:c0:x86_64", "gtk:3:0:c8:x86_64"]]], + ), # BR platform:el8.0.0 and mess:1 which is built against platform:el8.1.0 and # requires gtk:3 which is built against platform:el8.2.0 and platform:el8.0.0 # -> No valid combination, because mess:1 is only available in el8.1.0 and later. @@ -230,7 +267,7 @@ class TestMMDResolver: # ({"platform": ["el8"], "gtk": ["3"]}, {}, [ # [["platform:el8.2.0:0:c0:x86_64", "gtk:3:1:c8:x86_64"]], # ]), - ) + ), ) def test_solve_virtual_streams(self, buildrequires, xmd_buildrequires, expected): modules = ( @@ -244,8 +281,7 @@ class TestMMDResolver: ("mess:1:0:c0", [{"gtk": ["3"], "platform": ["el8"]}], {"platform:el8.1.0"}, None), ) for n, req, xmd_br, virtual_streams in modules: - self.mmd_resolver.add_modules(self._make_mmd( - n, req, xmd_br, virtual_streams)) + self.mmd_resolver.add_modules(self._make_mmd(n, req, xmd_br, virtual_streams)) app = self._make_mmd("app:1:0", buildrequires, xmd_buildrequires) if not expected: @@ -255,69 +291,65 @@ class TestMMDResolver: else: expanded = self.mmd_resolver.solve(app) - expected = set(frozenset(["app:1:0:%d:src" % c] + e) - for c, exp in enumerate(expected) - for e in exp) + expected = set( + frozenset(["app:1:0:%d:src" % c] + e) for c, exp in enumerate(expected) for e in exp) assert expanded == expected - @pytest.mark.parametrize('app_buildrequires, modules, err_msg_regex', ( - # app --br--> gtk:1 --req--> bar:1* ---req---> platform:f29 - # \--br--> foo:1 --req--> bar:2* ---req--/ + @pytest.mark.parametrize( + "app_buildrequires, modules, err_msg_regex", ( - {'gtk': '1', 'foo': '1'}, + # app --br--> gtk:1 --req--> bar:1* ---req---> platform:f29 + # \--br--> foo:1 --req--> bar:2* ---req--/ ( - ('platform:f29:0:c0', {}), - ('gtk:1:1:c01', {'bar': ['1']}), - ('bar:1:0:c02', {'platform': ['f29']}), - ('foo:1:1:c03', {'bar': ['2']}), - ('bar:2:0:c04', {'platform': ['f29']}), + {"gtk": "1", "foo": "1"}, + ( + ("platform:f29:0:c0", {}), + ("gtk:1:1:c01", {"bar": ["1"]}), + ("bar:1:0:c02", {"platform": ["f29"]}), + ("foo:1:1:c03", {"bar": ["2"]}), + ("bar:2:0:c04", {"platform": ["f29"]}), + ), + "bar:1:0:c02 and bar:2:0:c04", ), - 'bar:1:0:c02 and bar:2:0:c04', - ), - # app --br--> gtk:1 --req--> bar:1* ----------req----------> platform:f29 - # \--br--> foo:1 --req--> baz:1 --req--> bar:2* --req--/ - ( - {'gtk': '1', 'foo': '1'}, + # app --br--> gtk:1 --req--> bar:1* ----------req----------> platform:f29 + # \--br--> foo:1 --req--> baz:1 --req--> bar:2* --req--/ ( - ('platform:f29:0:c0', {}), - - ('gtk:1:1:c01', {'bar': ['1']}), - ('bar:1:0:c02', {'platform': ['f29']}), - - ('foo:1:1:c03', {'baz': ['1']}), - ('baz:1:1:c04', {'bar': ['2']}), - ('bar:2:0:c05', {'platform': ['f29']}), + {"gtk": "1", "foo": "1"}, + ( + ("platform:f29:0:c0", {}), + ("gtk:1:1:c01", {"bar": ["1"]}), + ("bar:1:0:c02", {"platform": ["f29"]}), + ("foo:1:1:c03", {"baz": ["1"]}), + ("baz:1:1:c04", {"bar": ["2"]}), + ("bar:2:0:c05", {"platform": ["f29"]}), + ), + "bar:1:0:c02 and bar:2:0:c05", ), - 'bar:1:0:c02 and bar:2:0:c05', - ), - # Test multiple conflicts pairs are detected. - # app --br--> gtk:1 --req--> bar:1* ---------req-----------\ - # \--br--> foo:1 --req--> baz:1 --req--> bar:2* ---req---> platform:f29 - # \--br--> pkga:1 --req--> perl:5' -------req-----------/ - # \--br--> pkgb:1 --req--> perl:6' -------req-----------/ - ( - {'gtk': '1', 'foo': '1', 'pkga': '1', 'pkgb': '1'}, + # Test multiple conflicts pairs are detected. + # app --br--> gtk:1 --req--> bar:1* ---------req-----------\ + # \--br--> foo:1 --req--> baz:1 --req--> bar:2* ---req---> platform:f29 + # \--br--> pkga:1 --req--> perl:5' -------req-----------/ + # \--br--> pkgb:1 --req--> perl:6' -------req-----------/ ( - ('platform:f29:0:c0', {}), - - ('gtk:1:1:c01', {'bar': ['1']}), - ('bar:1:0:c02', {'platform': ['f29']}), - - ('foo:1:1:c03', {'baz': ['1']}), - ('baz:1:1:c04', {'bar': ['2']}), - ('bar:2:0:c05', {'platform': ['f29']}), - - ('pkga:1:0:c06', {'perl': ['5']}), - ('perl:5:0:c07', {'platform': ['f29']}), - - ('pkgb:1:0:c08', {'perl': ['6']}), - ('perl:6:0:c09', {'platform': ['f29']}), + {"gtk": "1", "foo": "1", "pkga": "1", "pkgb": "1"}, + ( + ("platform:f29:0:c0", {}), + ("gtk:1:1:c01", {"bar": ["1"]}), + ("bar:1:0:c02", {"platform": ["f29"]}), + ("foo:1:1:c03", {"baz": ["1"]}), + ("baz:1:1:c04", {"bar": ["2"]}), + ("bar:2:0:c05", {"platform": ["f29"]}), + ("pkga:1:0:c06", {"perl": ["5"]}), + ("perl:5:0:c07", {"platform": ["f29"]}), + ("pkgb:1:0:c08", {"perl": ["6"]}), + ("perl:6:0:c09", {"platform": ["f29"]}), + ), + # MMD Resolver should still catch a conflict + "bar:1:0:c02 and bar:2:0:c05", ), - # MMD Resolver should still catch a conflict - 'bar:1:0:c02 and bar:2:0:c05', ), - )) + ) def test_solve_stream_conflicts(self, app_buildrequires, modules, err_msg_regex): for n, req in modules: self.mmd_resolver.add_modules(self._make_mmd(n, req)) @@ -349,7 +381,8 @@ class TestMMDResolver: # Build only against f28 and f29, because "gtk:3" is not built against f30. expected = set([ - frozenset(['gtk:3:0:c8:x86_64', 'app:1:0:0:src', 'platform:f28:0:c0:x86_64']), - frozenset(['gtk:3:0:c9:x86_64', 'app:1:0:0:src', 'platform:f29:0:c0:x86_64'])]) + frozenset(["gtk:3:0:c8:x86_64", "app:1:0:0:src", "platform:f28:0:c0:x86_64"]), + frozenset(["gtk:3:0:c9:x86_64", "app:1:0:0:src", "platform:f29:0:c0:x86_64"]), + ]) assert expanded == expected diff --git a/tests/test_models/__init__.py b/tests/test_models/__init__.py index 3ce14454..d45a666e 100644 --- a/tests/test_models/__init__.py +++ b/tests/test_models/__init__.py @@ -34,7 +34,7 @@ app = module_build_service.app conf = init_config(app) -datadir = os.path.dirname(__file__) + '/data/' +datadir = os.path.dirname(__file__) + "/data/" def module_build_from_modulemd(yaml): @@ -43,22 +43,22 @@ def module_build_from_modulemd(yaml): build.name = mmd.get_name() build.stream = mmd.get_stream() build.version = mmd.get_version() - build.state = BUILD_STATES['ready'] + build.state = BUILD_STATES["ready"] build.modulemd = yaml build.koji_tag = None build.batch = 0 - build.owner = 'some_other_user' + build.owner = "some_other_user" build.time_submitted = datetime(2016, 9, 3, 12, 28, 33) build.time_modified = datetime(2016, 9, 3, 12, 28, 40) build.time_completed = None - build.rebuild_strategy = 'changed-and-after' + build.rebuild_strategy = "changed-and-after" return build def init_data(): clean_database() for filename in os.listdir(datadir): - with open(datadir + filename, 'r') as f: + with open(datadir + filename, "r") as f: yaml = f.read() build = module_build_from_modulemd(yaml) db.session.add(build) diff --git a/tests/test_models/test_models.py b/tests/test_models/test_models.py index 1391b773..e4aec75d 100644 --- a/tests/test_models/test_models.py +++ b/tests/test_models/test_models.py @@ -27,7 +27,7 @@ from mock import patch from module_build_service import conf, Modulemd from module_build_service.models import ComponentBuild, ModuleBuild, make_session from module_build_service.utils import to_text_type -from tests import (init_data as init_data_contexts, clean_database, make_module) +from tests import init_data as init_data_contexts, clean_database, make_module from tests.test_models import init_data, module_build_from_modulemd @@ -38,15 +38,16 @@ class TestModels: def test_app_sqlalchemy_events(self): with make_session(conf) as session: component_build = ComponentBuild() - component_build.package = 'before_models_committed' - component_build.scmurl = \ - ('git://pkgs.domain.local/rpms/before_models_committed?' - '#9999999999999999999999999999999999999999') - component_build.format = 'rpms' + component_build.package = "before_models_committed" + component_build.scmurl = ( + "git://pkgs.domain.local/rpms/before_models_committed?" + "#9999999999999999999999999999999999999999" + ) + component_build.format = "rpms" component_build.task_id = 999999999 component_build.state = 1 - component_build.nvr = ('before_models_committed-0.0.0-0' - '.module_before_models_committed_0_0') + component_build.nvr = \ + "before_models_committed-0.0.0-0.module_before_models_committed_0_0" component_build.batch = 1 component_build.module_id = 1 @@ -66,16 +67,20 @@ class TestModels: determined""" build = ModuleBuild.query.filter_by(id=1).one() yaml_path = os.path.join( - os.path.dirname(__file__), '..', 'staged_data', 'testmodule_dependencies.yaml') + os.path.dirname(__file__), "..", "staged_data", "testmodule_dependencies.yaml") mmd = Modulemd.Module.new_from_file(yaml_path) mmd.upgrade() build.modulemd = to_text_type(mmd.dumps()) - (build.ref_build_context, build.build_context, build.runtime_context, - build.context) = ModuleBuild.contexts_from_mmd(build.modulemd) - assert build.ref_build_context == 'f6e2aeec7576196241b9afa0b6b22acf2b6873d7' - assert build.build_context == '089df24993c037e10174f3fa7342ab4dc191a4d4' - assert build.runtime_context == 'bbc84c7b817ab3dd54916c0bcd6c6bdf512f7f9c' - assert build.context == '3ee22b28' + ( + build.ref_build_context, + build.build_context, + build.runtime_context, + build.context, + ) = ModuleBuild.contexts_from_mmd(build.modulemd) + assert build.ref_build_context == "f6e2aeec7576196241b9afa0b6b22acf2b6873d7" + assert build.build_context == "089df24993c037e10174f3fa7342ab4dc191a4d4" + assert build.runtime_context == "bbc84c7b817ab3dd54916c0bcd6c6bdf512f7f9c" + assert build.context == "3ee22b28" def test_siblings_property(self): """ Tests that the siblings property returns the ID of all modules with @@ -83,73 +88,75 @@ class TestModels: """ clean_database() yaml_path = os.path.join( - os.path.dirname(__file__), '..', 'staged_data', 'formatted_testmodule.yaml') + os.path.dirname(__file__), "..", "staged_data", "formatted_testmodule.yaml") mmd = Modulemd.Module.new_from_file(yaml_path) mmd.upgrade() with make_session(conf) as session: for i in range(3): build = module_build_from_modulemd(to_text_type(mmd.dumps())) - build.build_context = 'f6e2aeec7576196241b9afa0b6b22acf2b6873d' + str(i) - build.runtime_context = 'bbc84c7b817ab3dd54916c0bcd6c6bdf512f7f9c' + str(i) + build.build_context = "f6e2aeec7576196241b9afa0b6b22acf2b6873d" + str(i) + build.runtime_context = "bbc84c7b817ab3dd54916c0bcd6c6bdf512f7f9c" + str(i) session.add(build) session.commit() build_one = ModuleBuild.query.get(2) assert build_one.siblings == [3, 4] - @pytest.mark.parametrize('stream,right_pad,expected', [ - ['f27', True, 270000.0], - ['f27.02.30', True, 270230.0], - ['f27', False, 27.0], - ['f27.02.30', False, 270230.0], - ['el8', True, 080000.0], - ['el8.1.0', True, 080100.0], - ['el8.z', True, 080000.2], - ['el8.1.0.z', True, 080100.3], - ]) - @patch.object(conf, 'stream_suffixes', new={ - r'el\d+\.z': 0.2, r'el\d+\.\d+\.\d+\.z': 0.3 - }) + @pytest.mark.parametrize( + "stream,right_pad,expected", + [ + ["f27", True, 270000.0], + ["f27.02.30", True, 270230.0], + ["f27", False, 27.0], + ["f27.02.30", False, 270230.0], + ["el8", True, 080000.0], + ["el8.1.0", True, 080100.0], + ["el8.z", True, 080000.2], + ["el8.1.0.z", True, 080100.3], + ], + ) + @patch.object(conf, "stream_suffixes", new={r"el\d+\.z": 0.2, r"el\d+\.\d+\.\d+\.z": 0.3}) def test_get_stream_version(self, stream, right_pad, expected): assert expected == ModuleBuild.get_stream_version(stream, right_pad) class TestModelsGetStreamsContexts: - def test_get_last_build_in_all_streams(self): init_data_contexts(contexts=True) with make_session(conf) as session: - builds = ModuleBuild.get_last_build_in_all_streams( - session, "nginx") - builds = ["%s:%s:%s" % (build.name, build.stream, str(build.version)) - for build in builds] + builds = ModuleBuild.get_last_build_in_all_streams(session, "nginx") + builds = [ + "%s:%s:%s" % (build.name, build.stream, str(build.version)) for build in builds + ] assert builds == ["nginx:%d:%d" % (i, i + 2) for i in range(10)] def test_get_last_build_in_all_stream_last_version(self): init_data_contexts(contexts=False) with make_session(conf) as session: - builds = ModuleBuild.get_last_build_in_all_streams( - session, "nginx") - builds = ["%s:%s:%s" % (build.name, build.stream, str(build.version)) - for build in builds] + builds = ModuleBuild.get_last_build_in_all_streams(session, "nginx") + builds = [ + "%s:%s:%s" % (build.name, build.stream, str(build.version)) for build in builds + ] assert builds == ["nginx:1:11"] def test_get_last_builds_in_stream(self): init_data_contexts(contexts=True) with make_session(conf) as session: - builds = ModuleBuild.get_last_builds_in_stream( - session, "nginx", "1") - builds = ["%s:%s:%s:%s" % (build.name, build.stream, str(build.version), - build.context) for build in builds] - assert builds == ['nginx:1:3:d5a6c0fa', 'nginx:1:3:795e97c1'] + builds = ModuleBuild.get_last_builds_in_stream(session, "nginx", "1") + builds = [ + "%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context) + for build in builds + ] + assert builds == ["nginx:1:3:d5a6c0fa", "nginx:1:3:795e97c1"] def test_get_last_builds_in_stream_version_lte(self): init_data_contexts(1, multiple_stream_versions=True) with make_session(conf) as session: - builds = ModuleBuild.get_last_builds_in_stream_version_lte( - session, "platform", 290100) - builds = set(["%s:%s:%s:%s" % (build.name, build.stream, str(build.version), - build.context) for build in builds]) - assert builds == set(['platform:f29.0.0:3:00000000', 'platform:f29.1.0:3:00000000']) + builds = ModuleBuild.get_last_builds_in_stream_version_lte(session, "platform", 290100) + builds = set([ + "%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context) + for build in builds + ]) + assert builds == set(["platform:f29.0.0:3:00000000", "platform:f29.1.0:3:00000000"]) def test_get_last_builds_in_stream_version_lte_different_versions(self): """ @@ -166,12 +173,16 @@ class TestModelsGetStreamsContexts: make_module("platform:f29.3.0:20:c11", {}, {}, virtual_streams=["f29"]) with make_session(conf) as session: - builds = ModuleBuild.get_last_builds_in_stream_version_lte( - session, "platform", 290200) - builds = set(["%s:%s:%s:%s" % (build.name, build.stream, str(build.version), - build.context) for build in builds]) - assert builds == set(['platform:f29.1.0:15:c11', 'platform:f29.1.0:15:c11.another', - 'platform:f29.2.0:1:c11']) + builds = ModuleBuild.get_last_builds_in_stream_version_lte(session, "platform", 290200) + builds = set([ + "%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context) + for build in builds + ]) + assert builds == set([ + "platform:f29.1.0:15:c11", + "platform:f29.1.0:15:c11.another", + "platform:f29.2.0:1:c11", + ]) def test_get_module_count(self): clean_database(False) diff --git a/tests/test_monitor.py b/tests/test_monitor.py index 0d48b26c..8d1139b7 100644 --- a/tests/test_monitor.py +++ b/tests/test_monitor.py @@ -41,47 +41,52 @@ class TestViews: init_data(2) def test_metrics(self): - rv = self.client.get('/module-build-service/1/monitor/metrics') + rv = self.client.get("/module-build-service/1/monitor/metrics") - assert len([l for l in rv.get_data(as_text=True).splitlines() - if (l.startswith('# TYPE') and '_created ' not in l)]) == num_of_metrics + count = len([ + l for l in rv.get_data(as_text=True).splitlines() + if (l.startswith("# TYPE") and "_created " not in l) + ]) + assert count == num_of_metrics def test_standalone_metrics_server_disabled_by_default(): with pytest.raises(requests.exceptions.ConnectionError): - requests.get('http://127.0.0.1:10040/metrics') + requests.get("http://127.0.0.1:10040/metrics") def test_standalone_metrics_server(): - os.environ['MONITOR_STANDALONE_METRICS_SERVER_ENABLE'] = 'true' + os.environ["MONITOR_STANDALONE_METRICS_SERVER_ENABLE"] = "true" reload_module(module_build_service.monitor) - r = requests.get('http://127.0.0.1:10040/metrics') - - assert len([l for l in r.text.splitlines() - if (l.startswith('# TYPE') and '_created ' not in l)]) == num_of_metrics + r = requests.get("http://127.0.0.1:10040/metrics") + count = len([ + l for l in r.text.splitlines() + if (l.startswith("# TYPE") and "_created " not in l) + ]) + assert count == num_of_metrics -@mock.patch('module_build_service.monitor.builder_failed_counter.labels') -@mock.patch('module_build_service.monitor.builder_success_counter.inc') +@mock.patch("module_build_service.monitor.builder_failed_counter.labels") +@mock.patch("module_build_service.monitor.builder_success_counter.inc") def test_monitor_state_changing_success(succ_cnt, failed_cnt): conf = mbs_config.Config(TestConfiguration) - b = make_module('pkg:0.1:1:c1', requires_list={'platform': 'el8'}) - b.transition(conf, models.BUILD_STATES['wait']) - b.transition(conf, models.BUILD_STATES['build']) - b.transition(conf, models.BUILD_STATES['done']) + b = make_module("pkg:0.1:1:c1", requires_list={"platform": "el8"}) + b.transition(conf, models.BUILD_STATES["wait"]) + b.transition(conf, models.BUILD_STATES["build"]) + b.transition(conf, models.BUILD_STATES["done"]) succ_cnt.assert_called_once() failed_cnt.assert_not_called() -@mock.patch('module_build_service.monitor.builder_failed_counter.labels') -@mock.patch('module_build_service.monitor.builder_success_counter.inc') +@mock.patch("module_build_service.monitor.builder_failed_counter.labels") +@mock.patch("module_build_service.monitor.builder_success_counter.inc") def test_monitor_state_changing_failure(succ_cnt, failed_cnt): - failure_type = 'user' + failure_type = "user" conf = mbs_config.Config(TestConfiguration) - b = make_module('pkg:0.1:1:c1', requires_list={'platform': 'el8'}) - b.transition(conf, models.BUILD_STATES['wait']) - b.transition(conf, models.BUILD_STATES['build']) - b.transition(conf, models.BUILD_STATES['failed'], failure_type=failure_type) + b = make_module("pkg:0.1:1:c1", requires_list={"platform": "el8"}) + b.transition(conf, models.BUILD_STATES["wait"]) + b.transition(conf, models.BUILD_STATES["build"]) + b.transition(conf, models.BUILD_STATES["failed"], failure_type=failure_type) succ_cnt.assert_not_called() failed_cnt.assert_called_once_with(reason=failure_type) diff --git a/tests/test_resolver/test_db.py b/tests/test_resolver/test_db.py index ade8a414..03a09b59 100644 --- a/tests/test_resolver/test_db.py +++ b/tests/test_resolver/test_db.py @@ -38,64 +38,63 @@ base_dir = os.path.join(os.path.dirname(__file__), "..") class TestDBModule: - def setup_method(self): tests.reuse_component_init_data() def test_get_buildrequired_modulemds(self): - mmd = load_mmd_file(os.path.join(base_dir, 'staged_data', 'platform.yaml')) - mmd.set_stream('f30.1.3') + mmd = load_mmd_file(os.path.join(base_dir, "staged_data", "platform.yaml")) + mmd.set_stream("f30.1.3") import_mmd(db.session, mmd) - platform_f300103 = ModuleBuild.query.filter_by(stream='f30.1.3').one() + platform_f300103 = ModuleBuild.query.filter_by(stream="f30.1.3").one() mmd.set_name("testmodule") mmd.set_stream("master") mmd.set_version(20170109091357) mmd.set_context("123") build = ModuleBuild( - name='testmodule', - stream='master', + name="testmodule", + stream="master", version=20170109091357, state=5, - build_context='dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3', - runtime_context='ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7', - context='7c29193d', - koji_tag='module-testmodule-master-20170109091357-7c29193d', - scmurl='https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79', + build_context="dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3", + runtime_context="ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7", + context="7c29193d", + koji_tag="module-testmodule-master-20170109091357-7c29193d", + scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79", batch=3, - owner='Dr. Pepper', + owner="Dr. Pepper", time_submitted=datetime(2018, 11, 15, 16, 8, 18), time_modified=datetime(2018, 11, 15, 16, 19, 35), - rebuild_strategy='changed-and-after', - modulemd=to_text_type(mmd.dumps()) + rebuild_strategy="changed-and-after", + modulemd=to_text_type(mmd.dumps()), ) build.buildrequires.append(platform_f300103) db.session.add(build) db.session.commit() - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db') + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db") result = resolver.get_buildrequired_modulemds( "testmodule", "master", platform_f300103.mmd().dup_nsvc()) nsvcs = set([m.dup_nsvc() for m in result]) - assert nsvcs == set(['testmodule:master:20170109091357:123']) + assert nsvcs == set(["testmodule:master:20170109091357:123"]) - @pytest.mark.parametrize('stream_versions', [False, True]) + @pytest.mark.parametrize("stream_versions", [False, True]) def test_get_module_modulemds_stream_versions(self, stream_versions): tests.init_data(1, multiple_stream_versions=True) - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db') + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db") result = resolver.get_module_modulemds( "platform", "f29.1.0", stream_version_lte=stream_versions) nsvcs = set([mmd.dup_nsvc() for mmd in result]) if stream_versions: - assert nsvcs == set(['platform:f29.1.0:3:00000000', 'platform:f29.0.0:3:00000000']) + assert nsvcs == set(["platform:f29.1.0:3:00000000", "platform:f29.0.0:3:00000000"]) else: - assert nsvcs == set(['platform:f29.1.0:3:00000000']) + assert nsvcs == set(["platform:f29.1.0:3:00000000"]) - @pytest.mark.parametrize('empty_buildrequires', [False, True]) + @pytest.mark.parametrize("empty_buildrequires", [False, True]) def test_get_module_build_dependencies(self, empty_buildrequires): """ Tests that the buildrequires of testmodule are returned """ - expected = set(['module-f28-build']) + expected = set(["module-f28-build"]) module = models.ModuleBuild.query.get(2) if empty_buildrequires: expected = set() @@ -104,14 +103,14 @@ class TestDBModule: # Wipe out the dependencies mmd.set_dependencies() xmd = glib.from_variant_dict(mmd.get_xmd()) - xmd['mbs']['buildrequires'] = {} + xmd["mbs"]["buildrequires"] = {} mmd.set_xmd(glib.dict_values(xmd)) module.modulemd = to_text_type(mmd.dumps()) db.session.add(module) db.session.commit() - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db') + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db") result = resolver.get_module_build_dependencies( - 'testmodule', 'master', '20170109091357', '78e4a6fd').keys() + "testmodule", "master", "20170109091357", "78e4a6fd").keys() assert set(result) == expected def test_get_module_build_dependencies_recursive(self): @@ -121,126 +120,150 @@ class TestDBModule: # Add testmodule2 that requires testmodule module = models.ModuleBuild.query.get(3) mmd = module.mmd() - mmd.set_name('testmodule2') + mmd.set_name("testmodule2") mmd.set_version(20180123171545) requires = mmd.get_dependencies()[0].get_requires() - requires['testmodule'] = Modulemd.SimpleSet() - requires['testmodule'].add('master') + requires["testmodule"] = Modulemd.SimpleSet() + requires["testmodule"].add("master") mmd.get_dependencies()[0].set_requires(requires) xmd = glib.from_variant_dict(mmd.get_xmd()) - xmd['mbs']['requires']['testmodule'] = { - 'filtered_rpms': [], - 'ref': '620ec77321b2ea7b0d67d82992dda3e1d67055b4', - 'stream': 'master', - 'version': '20180205135154' + xmd["mbs"]["requires"]["testmodule"] = { + "filtered_rpms": [], + "ref": "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + "stream": "master", + "version": "20180205135154", } mmd.set_xmd(glib.dict_values(xmd)) module.modulemd = to_text_type(mmd.dumps()) - module.name = 'testmodule2' + module.name = "testmodule2" module.version = str(mmd.get_version()) - module.koji_tag = 'module-ae2adf69caf0e1b6' + module.koji_tag = "module-ae2adf69caf0e1b6" - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db') + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db") result = resolver.get_module_build_dependencies( - 'testmodule2', 'master', '20180123171545', 'c40c156c').keys() - assert set(result) == set(['module-f28-build']) + "testmodule2", "master", "20180123171545", "c40c156c").keys() + assert set(result) == set(["module-f28-build"]) - @patch("module_build_service.config.Config.system", - new_callable=PropertyMock, return_value="test") - @patch("module_build_service.config.Config.mock_resultsdir", - new_callable=PropertyMock, - return_value=os.path.join(base_dir, 'staged_data', "local_builds")) - def test_get_module_build_dependencies_recursive_requires( - self, resultdir, conf_system): + @patch( + "module_build_service.config.Config.system", new_callable=PropertyMock, return_value="test" + ) + @patch( + "module_build_service.config.Config.mock_resultsdir", + new_callable=PropertyMock, + return_value=os.path.join(base_dir, "staged_data", "local_builds"), + ) + def test_get_module_build_dependencies_recursive_requires(self, resultdir, conf_system): """ Tests that it returns the requires of the buildrequires recursively """ with app.app_context(): utils.load_local_builds(["platform", "parent", "child", "testmodule"]) - build = models.ModuleBuild.local_modules( - db.session, "child", "master") - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db') + build = models.ModuleBuild.local_modules(db.session, "child", "master") + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db") result = resolver.get_module_build_dependencies(mmd=build[0].mmd()).keys() - local_path = os.path.join(base_dir, 'staged_data', "local_builds") + local_path = os.path.join(base_dir, "staged_data", "local_builds") - expected = [ - os.path.join( - local_path, - 'module-parent-master-20170816080815/results'), - ] + expected = [os.path.join(local_path, "module-parent-master-20170816080815/results")] assert set(result) == set(expected) def test_resolve_requires(self): build = models.ModuleBuild.query.get(2) - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db') - result = resolver.resolve_requires([":".join([ - build.name, build.stream, build.version, build.context])]) + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db") + result = resolver.resolve_requires( + [":".join([build.name, build.stream, build.version, build.context])] + ) assert result == { - 'testmodule': { - 'stream': 'master', 'version': '20170109091357', 'context': u'78e4a6fd', - 'ref': 'ff1ea79fc952143efeed1851aa0aa006559239ba', - 'koji_tag': 'module-testmodule-master-20170109091357-78e4a6fd' - }} + "testmodule": { + "stream": "master", + "version": "20170109091357", + "context": u"78e4a6fd", + "ref": "ff1ea79fc952143efeed1851aa0aa006559239ba", + "koji_tag": "module-testmodule-master-20170109091357-78e4a6fd", + } + } def test_resolve_profiles(self): """ Tests that the profiles get resolved recursively """ mmd = models.ModuleBuild.query.get(2).mmd() - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db') - result = resolver.resolve_profiles(mmd, ('buildroot', 'srpm-buildroot')) + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db") + result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot")) expected = { - 'buildroot': - set(['unzip', 'tar', 'cpio', 'gawk', 'gcc', 'xz', 'sed', - 'findutils', 'util-linux', 'bash', 'info', 'bzip2', - 'grep', 'redhat-rpm-config', 'fedora-release', - 'diffutils', 'make', 'patch', 'shadow-utils', 'coreutils', - 'which', 'rpm-build', 'gzip', 'gcc-c++']), - 'srpm-buildroot': - set(['shadow-utils', 'redhat-rpm-config', 'rpm-build', - 'fedora-release', 'fedpkg-minimal', 'gnupg2', - 'bash']) + "buildroot": set([ + "unzip", + "tar", + "cpio", + "gawk", + "gcc", + "xz", + "sed", + "findutils", + "util-linux", + "bash", + "info", + "bzip2", + "grep", + "redhat-rpm-config", + "fedora-release", + "diffutils", + "make", + "patch", + "shadow-utils", + "coreutils", + "which", + "rpm-build", + "gzip", + "gcc-c++", + ]), + "srpm-buildroot": set([ + "shadow-utils", + "redhat-rpm-config", + "rpm-build", + "fedora-release", + "fedpkg-minimal", + "gnupg2", + "bash", + ]), } assert result == expected - @patch("module_build_service.config.Config.system", - new_callable=PropertyMock, return_value="test") - @patch("module_build_service.config.Config.mock_resultsdir", - new_callable=PropertyMock, - return_value=os.path.join(base_dir, 'staged_data', "local_builds")) + @patch( + "module_build_service.config.Config.system", new_callable=PropertyMock, return_value="test" + ) + @patch( + "module_build_service.config.Config.mock_resultsdir", + new_callable=PropertyMock, + return_value=os.path.join(base_dir, "staged_data", "local_builds"), + ) def test_resolve_profiles_local_module(self, local_builds, conf_system): """ Test that profiles get resolved recursively on local builds """ with app.app_context(): - utils.load_local_builds(['platform']) + utils.load_local_builds(["platform"]) mmd = models.ModuleBuild.query.get(2).mmd() - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs') - result = resolver.resolve_profiles(mmd, ('buildroot', 'srpm-buildroot')) - expected = { - 'buildroot': - set(['foo']), - 'srpm-buildroot': - set(['bar']) - } + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs") + result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot")) + expected = {"buildroot": set(["foo"]), "srpm-buildroot": set(["bar"])} assert result == expected def test_get_latest_with_virtual_stream(self): tests.init_data(1, multiple_stream_versions=True) - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db') - mmd = resolver.get_latest_with_virtual_stream('platform', 'f29') + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db") + mmd = resolver.get_latest_with_virtual_stream("platform", "f29") assert mmd - assert mmd.get_stream() == 'f29.2.0' + assert mmd.get_stream() == "f29.2.0" def test_get_latest_with_virtual_stream_none(self): - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db') - mmd = resolver.get_latest_with_virtual_stream('platform', 'doesnotexist') + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db") + mmd = resolver.get_latest_with_virtual_stream("platform", "doesnotexist") assert not mmd def test_get_module_count(self): - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db') - count = resolver.get_module_count(name='platform', stream='f28') + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db") + count = resolver.get_module_count(name="platform", stream="f28") assert count == 1 diff --git a/tests/test_resolver/test_local.py b/tests/test_resolver/test_local.py index feddd51a..fbcc4479 100644 --- a/tests/test_resolver/test_local.py +++ b/tests/test_resolver/test_local.py @@ -35,42 +35,41 @@ base_dir = os.path.join(os.path.dirname(__file__), "..") class TestLocalResolverModule: - def setup_method(self): tests.reuse_component_init_data() def test_get_buildrequired_modulemds(self): - mmd = load_mmd_file(os.path.join(base_dir, 'staged_data', 'platform.yaml')) - mmd.set_stream('f8') + mmd = load_mmd_file(os.path.join(base_dir, "staged_data", "platform.yaml")) + mmd.set_stream("f8") import_mmd(db.session, mmd) - platform_f8 = ModuleBuild.query.filter_by(stream='f8').one() + platform_f8 = ModuleBuild.query.filter_by(stream="f8").one() mmd.set_name("testmodule") mmd.set_stream("master") mmd.set_version(20170109091357) mmd.set_context("123") build = ModuleBuild( - name='testmodule', - stream='master', + name="testmodule", + stream="master", version=20170109091357, state=5, - build_context='dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3', - runtime_context='ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7', - context='7c29193d', - koji_tag='module-testmodule-master-20170109091357-7c29193d', - scmurl='https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79', + build_context="dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3", + runtime_context="ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7", + context="7c29193d", + koji_tag="module-testmodule-master-20170109091357-7c29193d", + scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79", batch=3, - owner='Dr. Pepper', + owner="Dr. Pepper", time_submitted=datetime(2018, 11, 15, 16, 8, 18), time_modified=datetime(2018, 11, 15, 16, 19, 35), - rebuild_strategy='changed-and-after', - modulemd=to_text_type(mmd.dumps()) + rebuild_strategy="changed-and-after", + modulemd=to_text_type(mmd.dumps()), ) db.session.add(build) db.session.commit() - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='local') + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="local") result = resolver.get_buildrequired_modulemds( "testmodule", "master", platform_f8.mmd().dup_nsvc()) nsvcs = set([m.dup_nsvc() for m in result]) - assert nsvcs == set(['testmodule:master:20170109091357:9c690d0e', - 'testmodule:master:20170109091357:123']) + assert nsvcs == set( + ["testmodule:master:20170109091357:9c690d0e", "testmodule:master:20170109091357:123"]) diff --git a/tests/test_resolver/test_mbs.py b/tests/test_resolver/test_mbs.py index de7dc6a0..1d7d9a3e 100644 --- a/tests/test_resolver/test_mbs.py +++ b/tests/test_resolver/test_mbs.py @@ -33,7 +33,6 @@ base_dir = os.path.join(os.path.dirname(__file__), "..") class TestMBSModule: - @patch("requests.Session") def test_get_module_modulemds_nsvc(self, mock_session, testmodule_mmd_9c690d0e): """ Tests for querying a module from mbs """ @@ -46,20 +45,20 @@ class TestMBSModule: "stream": "master", "version": "20180205135154", "context": "9c690d0e", - "modulemd": testmodule_mmd_9c690d0e + "modulemd": testmodule_mmd_9c690d0e, } ], - "meta": {"next": None} + "meta": {"next": None}, } mock_session().get.return_value = mock_res - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs') + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs") module_mmds = resolver.get_module_modulemds( - 'testmodule', 'master', '20180205135154', '9c690d0e', virtual_streams=["f28"]) + "testmodule", "master", "20180205135154", "9c690d0e", virtual_streams=["f28"] + ) nsvcs = set( - '{}:{}:{}:{}'.format(m.peek_name(), m.peek_stream(), - m.peek_version(), m.peek_context()) + "{}:{}:{}:{}".format(m.peek_name(), m.peek_stream(), m.peek_version(), m.peek_context()) for m in module_mmds ) expected = set(["testmodule:master:20180205135154:9c690d0e"]) @@ -80,8 +79,9 @@ class TestMBSModule: assert nsvcs == expected @patch("requests.Session") - def test_get_module_modulemds_partial(self, mock_session, testmodule_mmd_9c690d0e, - testmodule_mmd_c2c572ed): + def test_get_module_modulemds_partial( + self, mock_session, testmodule_mmd_9c690d0e, testmodule_mmd_c2c572ed + ): """ Test for querying MBS without the context of a module """ version = "20180205135154" @@ -95,29 +95,30 @@ class TestMBSModule: "stream": "master", "version": version, "context": "9c690d0e", - "modulemd": testmodule_mmd_9c690d0e + "modulemd": testmodule_mmd_9c690d0e, }, { "name": "testmodule", "stream": "master", "version": version, "context": "c2c572ed", - "modulemd": testmodule_mmd_c2c572ed - } + "modulemd": testmodule_mmd_c2c572ed, + }, ], - "meta": {"next": None} + "meta": {"next": None}, } mock_session().get.return_value = mock_res - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs') - ret = resolver.get_module_modulemds('testmodule', 'master', version) + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs") + ret = resolver.get_module_modulemds("testmodule", "master", version) nsvcs = set( - '{}:{}:{}:{}'.format(m.peek_name(), m.peek_stream(), - m.peek_version(), m.peek_context()) + "{}:{}:{}:{}".format(m.peek_name(), m.peek_stream(), m.peek_version(), m.peek_context()) for m in ret ) - expected = set(["testmodule:master:20180205135154:9c690d0e", - "testmodule:master:20180205135154:c2c572ed"]) + expected = set([ + "testmodule:master:20180205135154:9c690d0e", + "testmodule:master:20180205135154:c2c572ed", + ]) mbs_url = tests.conf.mbs_url expected_query = { "name": "testmodule", @@ -127,14 +128,15 @@ class TestMBSModule: "order_desc_by": "version", "page": 1, "per_page": 10, - "state": "ready" + "state": "ready", } mock_session().get.assert_called_once_with(mbs_url, params=expected_query) assert nsvcs == expected @patch("requests.Session") - def test_get_module_build_dependencies(self, mock_session, platform_mmd, - testmodule_mmd_9c690d0e): + def test_get_module_build_dependencies( + self, mock_session, platform_mmd, testmodule_mmd_9c690d0e + ): """ Tests that we return just direct build-time dependencies of testmodule. """ @@ -148,11 +150,12 @@ class TestMBSModule: "stream": "master", "version": "20180205135154", "context": "9c690d0e", - "modulemd": testmodule_mmd_9c690d0e + "modulemd": testmodule_mmd_9c690d0e, } ], - "meta": {"next": None} - }, { + "meta": {"next": None}, + }, + { "items": [ { "name": "platform", @@ -160,57 +163,63 @@ class TestMBSModule: "version": "3", "context": "00000000", "modulemd": platform_mmd, - "koji_tag": "module-f28-build" + "koji_tag": "module-f28-build", } ], - "meta": {"next": None} - } + "meta": {"next": None}, + }, ] mock_session().get.return_value = mock_res - expected = set(['module-f28-build']) - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs') + expected = set(["module-f28-build"]) + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs") result = resolver.get_module_build_dependencies( - 'testmodule', 'master', '20180205135154', '9c690d0e').keys() + "testmodule", "master", "20180205135154", "9c690d0e").keys() - expected_queries = [{ - "name": "testmodule", - "stream": "master", - "version": "20180205135154", - "context": "9c690d0e", - "verbose": True, - "order_desc_by": "version", - "page": 1, - "per_page": 10, - "state": "ready" - }, { - "name": "platform", - "stream": "f28", - "version": "3", - "context": "00000000", - "verbose": True, - "order_desc_by": "version", - "page": 1, - "per_page": 10, - "state": "ready" - }] + expected_queries = [ + { + "name": "testmodule", + "stream": "master", + "version": "20180205135154", + "context": "9c690d0e", + "verbose": True, + "order_desc_by": "version", + "page": 1, + "per_page": 10, + "state": "ready", + }, + { + "name": "platform", + "stream": "f28", + "version": "3", + "context": "00000000", + "verbose": True, + "order_desc_by": "version", + "page": 1, + "per_page": 10, + "state": "ready", + }, + ] mbs_url = tests.conf.mbs_url - expected_calls = [call(mbs_url, params=expected_queries[0]), - call(mbs_url, params=expected_queries[1])] + expected_calls = [ + call(mbs_url, params=expected_queries[0]), + call(mbs_url, params=expected_queries[1]), + ] mock_session().get.mock_calls = expected_calls assert mock_session().get.call_count == 2 assert set(result) == expected @patch("requests.Session") - def test_get_module_build_dependencies_empty_buildrequires(self, mock_session, - testmodule_mmd_9c690d0e): + def test_get_module_build_dependencies_empty_buildrequires( + self, mock_session, testmodule_mmd_9c690d0e + ): mmd = module_build_service.utils.load_mmd(testmodule_mmd_9c690d0e) # Wipe out the dependencies mmd.set_dependencies() xmd = glib.from_variant_dict(mmd.get_xmd()) - xmd['mbs']['buildrequires'] = {} + xmd["mbs"]["buildrequires"] = {} mmd.set_xmd(glib.dict_values(xmd)) mock_res = Mock() @@ -224,10 +233,10 @@ class TestMBSModule: "version": "20180205135154", "context": "9c690d0e", "modulemd": mmd.dumps(), - "build_deps": [] + "build_deps": [], } ], - "meta": {"next": None} + "meta": {"next": None}, } ] @@ -235,9 +244,10 @@ class TestMBSModule: expected = set() - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs') + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs") result = resolver.get_module_build_dependencies( - 'testmodule', 'master', '20180205135154', '9c690d0e').keys() + "testmodule", "master", "20180205135154", "9c690d0e" + ).keys() mbs_url = tests.conf.mbs_url expected_query = { "name": "testmodule", @@ -248,7 +258,7 @@ class TestMBSModule: "order_desc_by": "version", "page": 1, "per_page": 10, - "state": "ready" + "state": "ready", } mock_session().get.assert_called_once_with(mbs_url, params=expected_query) assert set(result) == expected @@ -265,27 +275,53 @@ class TestMBSModule: "stream": "f28", "version": "3", "context": "00000000", - "modulemd": platform_mmd + "modulemd": platform_mmd, } ], - "meta": {"next": None} + "meta": {"next": None}, } mock_session().get.return_value = mock_res - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs') - result = resolver.resolve_profiles(formatted_testmodule_mmd, - ('buildroot', 'srpm-buildroot')) + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs") + result = resolver.resolve_profiles( + formatted_testmodule_mmd, ("buildroot", "srpm-buildroot") + ) expected = { - 'buildroot': - set(['unzip', 'tar', 'cpio', 'gawk', 'gcc', 'xz', 'sed', - 'findutils', 'util-linux', 'bash', 'info', 'bzip2', - 'grep', 'redhat-rpm-config', 'fedora-release', - 'diffutils', 'make', 'patch', 'shadow-utils', 'coreutils', - 'which', 'rpm-build', 'gzip', 'gcc-c++']), - 'srpm-buildroot': - set(['shadow-utils', 'redhat-rpm-config', 'rpm-build', - 'fedora-release', 'fedpkg-minimal', 'gnupg2', - 'bash']) + "buildroot": set([ + "unzip", + "tar", + "cpio", + "gawk", + "gcc", + "xz", + "sed", + "findutils", + "util-linux", + "bash", + "info", + "bzip2", + "grep", + "redhat-rpm-config", + "fedora-release", + "diffutils", + "make", + "patch", + "shadow-utils", + "coreutils", + "which", + "rpm-build", + "gzip", + "gcc-c++", + ]), + "srpm-buildroot": set([ + "shadow-utils", + "redhat-rpm-config", + "rpm-build", + "fedora-release", + "fedpkg-minimal", + "gnupg2", + "bash", + ]), } mbs_url = tests.conf.mbs_url @@ -298,92 +334,84 @@ class TestMBSModule: "order_desc_by": "version", "page": 1, "per_page": 10, - "state": "ready" + "state": "ready", } mock_session().get.assert_called_once_with(mbs_url, params=expected_query) assert result == expected - @patch("module_build_service.config.Config.system", - new_callable=PropertyMock, return_value="test") - @patch("module_build_service.config.Config.mock_resultsdir", - new_callable=PropertyMock, - return_value=os.path.join(base_dir, 'staged_data', "local_builds")) - def test_resolve_profiles_local_module(self, local_builds, conf_system, - formatted_testmodule_mmd): + @patch( + "module_build_service.config.Config.system", new_callable=PropertyMock, return_value="test" + ) + @patch( + "module_build_service.config.Config.mock_resultsdir", + new_callable=PropertyMock, + return_value=os.path.join(base_dir, "staged_data", "local_builds"), + ) + def test_resolve_profiles_local_module( + self, local_builds, conf_system, formatted_testmodule_mmd + ): tests.clean_database() with app.app_context(): - module_build_service.utils.load_local_builds(['platform']) + module_build_service.utils.load_local_builds(["platform"]) - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs') - result = resolver.resolve_profiles(formatted_testmodule_mmd, - ('buildroot', 'srpm-buildroot')) - expected = { - 'buildroot': - set(['foo']), - 'srpm-buildroot': - set(['bar']) - } + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs") + result = resolver.resolve_profiles( + formatted_testmodule_mmd, ("buildroot", "srpm-buildroot")) + expected = {"buildroot": set(["foo"]), "srpm-buildroot": set(["bar"])} assert result == expected def test_get_empty_buildrequired_modulemds(self): - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs') + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs") - with patch.object(resolver, 'session') as session: + with patch.object(resolver, "session") as session: session.get.return_value = Mock(ok=True) - session.get.return_value.json.return_value = { - 'items': [], 'meta': {'next': None} - } + session.get.return_value.json.return_value = {"items": [], "meta": {"next": None}} - result = resolver.get_buildrequired_modulemds( - 'nodejs', '10', 'platform:el8:1:00000000') + result = resolver.get_buildrequired_modulemds("nodejs", "10", "platform:el8:1:00000000") assert [] == result def test_get_buildrequired_modulemds(self): - resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs') + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs") - with patch.object(resolver, 'session') as session: + with patch.object(resolver, "session") as session: session.get.return_value = Mock(ok=True) session.get.return_value.json.return_value = { - 'items': [{ - 'name': 'nodejs', 'stream': '10', - 'version': 1, 'context': 'c1', - 'modulemd': tests.make_module( - 'nodejs:10:1:c1', store_to_db=False).dumps(), - }, { - 'name': 'nodejs', 'stream': '10', - 'version': 2, 'context': 'c1', - 'modulemd': tests.make_module( - 'nodejs:10:2:c1', store_to_db=False).dumps(), - }], 'meta': {'next': None} + "items": [ + { + "name": "nodejs", + "stream": "10", + "version": 1, + "context": "c1", + "modulemd": tests.make_module("nodejs:10:1:c1", store_to_db=False).dumps(), + }, + { + "name": "nodejs", + "stream": "10", + "version": 2, + "context": "c1", + "modulemd": tests.make_module("nodejs:10:2:c1", store_to_db=False).dumps(), + }, + ], + "meta": {"next": None}, } - result = resolver.get_buildrequired_modulemds( - 'nodejs', '10', 'platform:el8:1:00000000') + result = resolver.get_buildrequired_modulemds("nodejs", "10", "platform:el8:1:00000000") assert 1 == len(result) mmd = result[0] - assert 'nodejs' == mmd.get_name() - assert '10' == mmd.get_stream() + assert "nodejs" == mmd.get_name() + assert "10" == mmd.get_stream() assert 1 == mmd.get_version() - assert 'c1' == mmd.get_context() + assert "c1" == mmd.get_context() @patch("requests.Session") def test_get_module_count(self, mock_session): mock_res = Mock() mock_res.ok.return_value = True mock_res.json.return_value = { - "items": [ - { - "name": "platform", - "stream": "f28", - "version": "3", - "context": "00000000", - } - ], - "meta": { - "total": 5 - } + "items": [{"name": "platform", "stream": "f28", "version": "3", "context": "00000000"}], + "meta": {"total": 5}, } mock_session.return_value.get.return_value = mock_res @@ -393,13 +421,7 @@ class TestMBSModule: assert count == 5 mock_session.return_value.get.assert_called_once_with( "https://mbs.fedoraproject.org/module-build-service/1/module-builds/", - params={ - "name": "platform", - "page": 1, - "per_page": 1, - "short": True, - "stream": "f28", - } + params={"name": "platform", "page": 1, "per_page": 1, "short": True, "stream": "f28"}, ) @patch("requests.Session") @@ -416,9 +438,7 @@ class TestMBSModule: "version": "3", } ], - "meta": { - "total": 5 - } + "meta": {"total": 5}, } mock_session.return_value.get.return_value = mock_res @@ -435,5 +455,5 @@ class TestMBSModule: "per_page": 1, "verbose": True, "virtual_stream": "virtualf28", - } + }, ) diff --git a/tests/test_scheduler/test_consumer.py b/tests/test_scheduler/test_consumer.py index 68eea6f5..3d9bcb6a 100644 --- a/tests/test_scheduler/test_consumer.py +++ b/tests/test_scheduler/test_consumer.py @@ -24,7 +24,6 @@ from module_build_service.messaging import KojiTagChange, KojiRepoChange class TestConsumer: - def test_get_abstracted_msg_fedmsg(self): """ Test the output of get_abstracted_msg() when using the @@ -51,17 +50,17 @@ class TestConsumer: "user": "bodhi", "version": "15.1.0", "owner": "orion", - "release": "1.el7" - } + "release": "1.el7", + }, } msg_obj = consumer.get_abstracted_msg(msg) assert isinstance(msg_obj, KojiTagChange) - assert msg_obj.msg_id == msg['msg_id'] - assert msg_obj.tag == msg['msg']['tag'] - assert msg_obj.artifact == msg['msg']['name'] + assert msg_obj.msg_id == msg["msg_id"] + assert msg_obj.tag == msg["msg"]["tag"] + assert msg_obj.artifact == msg["msg"]["name"] - @patch('module_build_service.scheduler.consumer.models') - @patch.object(MBSConsumer, 'process_message') + @patch("module_build_service.scheduler.consumer.models") + @patch.object(MBSConsumer, "process_message") def test_consume_fedmsg(self, process_message, models): """ Test the MBSConsumer.consume() method when using the @@ -86,13 +85,13 @@ class TestConsumer: "instance": "primary", "repo_id": 400859, "tag": "f22-build", - "tag_id": 278 - } - } + "tag_id": 278, + }, + }, } consumer.consume(msg) assert process_message.call_count == 1 msg_obj = process_message.call_args[0][1] assert isinstance(msg_obj, KojiRepoChange) - assert msg_obj.msg_id == msg['body']['msg_id'] - assert msg_obj.repo_tag == msg['body']['msg']['tag'] + assert msg_obj.msg_id == msg["body"]["msg_id"] + assert msg_obj.repo_tag == msg["body"]["msg"]["tag"] diff --git a/tests/test_scheduler/test_greenwave.py b/tests/test_scheduler/test_greenwave.py index 9deeb4c5..a3e6bc1d 100644 --- a/tests/test_scheduler/test_greenwave.py +++ b/tests/test_scheduler/test_greenwave.py @@ -39,56 +39,50 @@ class TestGetCorrespondingModuleBuild: def setup_method(self, method): clean_database() - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_module_build_nvr_does_not_exist_in_koji(self, ClientSession): ClientSession.return_value.getBuild.return_value = None - assert get_corresponding_module_build('n-v-r') is None + assert get_corresponding_module_build("n-v-r") is None - @pytest.mark.parametrize('build_info', [ - # Build info does not have key extra - {'id': 1000, 'name': 'ed'}, - # Build info contains key extra, but it is not for the module build - { - 'extra': {'submitter': 'osbs', 'image': {}} - }, - # Key module_build_service_id is missing - { - 'extra': {'typeinfo': {'module': {}}} - } - ]) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @pytest.mark.parametrize( + "build_info", + [ + # Build info does not have key extra + {"id": 1000, "name": "ed"}, + # Build info contains key extra, but it is not for the module build + {"extra": {"submitter": "osbs", "image": {}}}, + # Key module_build_service_id is missing + {"extra": {"typeinfo": {"module": {}}}}, + ], + ) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_cannot_find_module_build_id_from_build_info(self, ClientSession, build_info): ClientSession.return_value.getBuild.return_value = build_info - assert get_corresponding_module_build('n-v-r') is None + assert get_corresponding_module_build("n-v-r") is None - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_corresponding_module_build_id_does_not_exist_in_db(self, ClientSession): fake_module_build_id, = db.session.query(func.max(ModuleBuild.id)).first() ClientSession.return_value.getBuild.return_value = { - 'extra': {'typeinfo': {'module': { - 'module_build_service_id': fake_module_build_id + 1 - }}} + "extra": {"typeinfo": {"module": {"module_build_service_id": fake_module_build_id + 1}}} } - assert get_corresponding_module_build('n-v-r') is None + assert get_corresponding_module_build("n-v-r") is None - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_find_the_module_build(self, ClientSession): expected_module_build = ( - db.session.query(ModuleBuild) - .filter(ModuleBuild.name == 'platform').first() + db.session.query(ModuleBuild).filter(ModuleBuild.name == "platform").first() ) ClientSession.return_value.getBuild.return_value = { - 'extra': {'typeinfo': {'module': { - 'module_build_service_id': expected_module_build.id - }}} + "extra": {"typeinfo": {"module": {"module_build_service_id": expected_module_build.id}}} } - build = get_corresponding_module_build('n-v-r') + build = get_corresponding_module_build("n-v-r") assert expected_module_build.id == build.id assert expected_module_build.name == build.name @@ -97,82 +91,80 @@ class TestGetCorrespondingModuleBuild: class TestDecisionUpdateHandler: """Test handler decision_update""" - @patch('module_build_service.scheduler.handlers.greenwave.log') + @patch("module_build_service.scheduler.handlers.greenwave.log") def test_decision_context_is_not_match(self, log): - msg = Mock(msg_id='msg-id-1', - decision_context='bodhi_update_push_testing') + msg = Mock(msg_id="msg-id-1", decision_context="bodhi_update_push_testing") decision_update(conf, db.session, msg) log.debug.assert_called_once_with( 'Skip Greenwave message %s as MBS only handles messages with the decision context "%s"', - 'msg-id-1', 'osci_compose_gate_modules' + "msg-id-1", + "osci_compose_gate_modules", ) - @patch('module_build_service.scheduler.handlers.greenwave.log') + @patch("module_build_service.scheduler.handlers.greenwave.log") def test_not_satisfy_policies(self, log): - msg = Mock(msg_id='msg-id-1', - decision_context='osci_compose_gate_modules', - policies_satisfied=False, - subject_identifier='pkg-0.1-1.c1') + msg = Mock( + msg_id="msg-id-1", + decision_context="osci_compose_gate_modules", + policies_satisfied=False, + subject_identifier="pkg-0.1-1.c1", + ) decision_update(conf, db.session, msg) log.debug.assert_called_once_with( - 'Skip to handle module build %s because it has not satisfied ' - 'Greenwave policies.', - msg.subject_identifier + "Skip to handle module build %s because it has not satisfied Greenwave policies.", + msg.subject_identifier, ) - @patch('module_build_service.messaging.publish') - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch("module_build_service.messaging.publish") + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_transform_from_done_to_ready(self, ClientSession, publish): clean_database() # This build should be queried and transformed to ready state - module_build = make_module('pkg:0.1:1:c1', requires_list={'platform': 'el8'}) + module_build = make_module("pkg:0.1:1:c1", requires_list={"platform": "el8"}) module_build.transition( - conf, BUILD_STATES['done'], 'Move to done directly for running test.') + conf, BUILD_STATES["done"], "Move to done directly for running test." + ) # Assert this call below first_publish_call = call( - service='mbs', - topic='module.state.change', + service="mbs", + topic="module.state.change", msg=module_build.json(show_tasks=False), - conf=conf + conf=conf, ) db.session.refresh(module_build) ClientSession.return_value.getBuild.return_value = { - 'extra': {'typeinfo': {'module': { - 'module_build_service_id': module_build.id - }}} + "extra": {"typeinfo": {"module": {"module_build_service_id": module_build.id}}} } msg = { - 'msg_id': 'msg-id-1', - 'topic': 'org.fedoraproject.prod.greenwave.decision.update', - 'msg': { - 'decision_context': 'osci_compose_gate_modules', - 'policies_satisfied': True, - 'subject_identifier': 'pkg-0.1-1.c1' - } + "msg_id": "msg-id-1", + "topic": "org.fedoraproject.prod.greenwave.decision.update", + "msg": { + "decision_context": "osci_compose_gate_modules", + "policies_satisfied": True, + "subject_identifier": "pkg-0.1-1.c1", + }, } - hub = Mock(config={ - 'validate_signatures': False - }) + hub = Mock(config={"validate_signatures": False}) consumer = MBSConsumer(hub) consumer.consume(msg) # Load module build again to check its state is moved correctly module_build = ( - db.session.query(ModuleBuild) - .filter(ModuleBuild.id == module_build.id).first() - ) + db.session.query(ModuleBuild).filter(ModuleBuild.id == module_build.id).first()) - assert BUILD_STATES['ready'] == module_build.state + assert BUILD_STATES["ready"] == module_build.state publish.assert_has_calls([ first_publish_call, - call(service='mbs', - topic='module.state.change', - msg=module_build.json(show_tasks=False), - conf=conf), + call( + service="mbs", + topic="module.state.change", + msg=module_build.json(show_tasks=False), + conf=conf, + ), ]) diff --git a/tests/test_scheduler/test_module_init.py b/tests/test_scheduler/test_module_init.py index 229f5b5b..b4a33f67 100644 --- a/tests/test_scheduler/test_module_init.py +++ b/tests/test_scheduler/test_module_init.py @@ -35,20 +35,16 @@ from module_build_service.models import make_session, ModuleBuild, ComponentBuil class TestModuleInit: - def setup_method(self, test_method): self.fn = module_build_service.scheduler.handlers.modules.init - self.staged_data_dir = os.path.join( - os.path.dirname(__file__), '../', 'staged_data') - testmodule_yml_path = os.path.join( - self.staged_data_dir, 'testmodule_init.yaml') - with open(testmodule_yml_path, 'r') as f: + self.staged_data_dir = os.path.join(os.path.dirname(__file__), "../", "staged_data") + testmodule_yml_path = os.path.join(self.staged_data_dir, "testmodule_init.yaml") + with open(testmodule_yml_path, "r") as f: yaml = to_text_type(f.read()) - scmurl = 'git://pkgs.domain.local/modules/testmodule?#620ec77' + scmurl = "git://pkgs.domain.local/modules/testmodule?#620ec77" clean_database() with make_session(conf) as session: - ModuleBuild.create( - session, conf, 'testmodule', '1', 3, yaml, scmurl, 'mprahl') + ModuleBuild.create(session, conf, "testmodule", "1", 3, yaml, scmurl, "mprahl") def teardown_method(self, test_method): try: @@ -57,13 +53,19 @@ class TestModuleInit: except Exception: pass - @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder." - "get_built_rpms_in_module_build") - @patch('module_build_service.scm.SCM') - @patch('module_build_service.scheduler.handlers.modules.handle_stream_collision_modules') + @patch( + "module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder." + "get_built_rpms_in_module_build" + ) + @patch("module_build_service.scm.SCM") + @patch("module_build_service.scheduler.handlers.modules.handle_stream_collision_modules") def test_init_basic(self, rscm, mocked_scm, built_rpms): - FakeSCM(mocked_scm, 'testmodule', 'testmodule_init.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, + "testmodule", + "testmodule_init.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) built_rpms.return_value = [ "foo-0:2.4.48-3.el8+1308+551bfa71", @@ -71,7 +73,8 @@ class TestModuleInit: "bar-0:2.5.48-3.el8+1308+551bfa71", "bar-debuginfo-0:2.5.48-3.el8+1308+551bfa71", "x-0:2.5.48-3.el8+1308+551bfa71", - "x-debuginfo-0:2.5.48-3.el8+1308+551bfa71"] + "x-debuginfo-0:2.5.48-3.el8+1308+551bfa71", + ] platform_build = ModuleBuild.query.get(1) mmd = platform_build.mmd() @@ -83,7 +86,8 @@ class TestModuleInit: db.session.commit() msg = module_build_service.messaging.MBSModule( - msg_id=None, module_build_id=2, module_build_state='init') + msg_id=None, module_build_id=2, module_build_state="init" + ) with make_session(conf) as session: self.fn(config=conf, session=session, msg=msg) @@ -91,10 +95,12 @@ class TestModuleInit: # Make sure the module entered the wait state assert build.state == 1, build.state # Make sure format_mmd was run properly - xmd_mbs = build.mmd().get_xmd()['mbs'] + xmd_mbs = build.mmd().get_xmd()["mbs"] assert type(xmd_mbs) is GLib.Variant assert xmd_mbs["buildrequires"]["platform"]["filtered_rpms"] == [ - 'foo-0:2.4.48-3.el8+1308+551bfa71', 'bar-0:2.5.48-3.el8+1308+551bfa71'] + "foo-0:2.4.48-3.el8+1308+551bfa71", + "bar-0:2.5.48-3.el8+1308+551bfa71", + ] return build def test_init_called_twice(self): @@ -113,16 +119,16 @@ class TestModuleInit: new_mmd = yaml.safe_load(build.modulemd) assert old_mmd == new_mmd - @patch('module_build_service.scm.SCM') + @patch("module_build_service.scm.SCM") def test_init_scm_not_available(self, mocked_scm): def mocked_scm_get_latest(): raise RuntimeError("Failed in mocked_scm_get_latest") - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") mocked_scm.return_value.get_latest = mocked_scm_get_latest msg = module_build_service.messaging.MBSModule( - msg_id=None, module_build_id=2, module_build_state='init') + msg_id=None, module_build_id=2, module_build_state="init") with make_session(conf) as session: self.fn(config=conf, session=session, msg=msg) build = ModuleBuild.query.filter_by(id=2).one() @@ -130,51 +136,56 @@ class TestModuleInit: # since the git server is not available assert build.state == 4, build.state - @patch("module_build_service.config.Config.modules_allow_repository", - new_callable=PropertyMock, return_value=True) - @patch('module_build_service.scm.SCM') + @patch( + "module_build_service.config.Config.modules_allow_repository", + new_callable=PropertyMock, + return_value=True, + ) + @patch("module_build_service.scm.SCM") def test_init_includedmodule(self, mocked_scm, mocked_mod_allow_repo): - FakeSCM(mocked_scm, "includedmodules", ['testmodule_init.yaml']) - includedmodules_yml_path = os.path.join( - self.staged_data_dir, 'includedmodules.yaml') - with open(includedmodules_yml_path, 'r') as f: + FakeSCM(mocked_scm, "includedmodules", ["testmodule_init.yaml"]) + includedmodules_yml_path = os.path.join(self.staged_data_dir, "includedmodules.yaml") + with open(includedmodules_yml_path, "r") as f: yaml = to_text_type(f.read()) - scmurl = 'git://pkgs.domain.local/modules/includedmodule?#da95886' + scmurl = "git://pkgs.domain.local/modules/includedmodule?#da95886" with make_session(conf) as session: - ModuleBuild.create( - session, conf, 'includemodule', '1', 3, yaml, scmurl, 'mprahl') + ModuleBuild.create(session, conf, "includemodule", "1", 3, yaml, scmurl, "mprahl") msg = module_build_service.messaging.MBSModule( - msg_id=None, module_build_id=3, module_build_state='init') + msg_id=None, module_build_id=3, module_build_state="init") self.fn(config=conf, session=session, msg=msg) build = ModuleBuild.query.filter_by(id=3).one() assert build.state == 1 - assert build.name == 'includemodule' + assert build.name == "includemodule" batches = {} for comp_build in ComponentBuild.query.filter_by(module_id=3).all(): batches[comp_build.package] = comp_build.batch - assert batches['perl-List-Compare'] == 2 - assert batches['perl-Tangerine'] == 2 - assert batches['foo'] == 2 - assert batches['tangerine'] == 3 - assert batches['file'] == 4 + assert batches["perl-List-Compare"] == 2 + assert batches["perl-Tangerine"] == 2 + assert batches["foo"] == 2 + assert batches["tangerine"] == 3 + assert batches["file"] == 4 # Test that the RPMs are properly merged in xmd xmd_rpms = { - 'perl-List-Compare': {'ref': '4f26aeafdb'}, - 'perl-Tangerine': {'ref': '4f26aeafdb'}, - 'tangerine': {'ref': '4f26aeafdb'}, - 'foo': {'ref': '93dea37599'}, - 'file': {'ref': 'a2740663f8'}, + "perl-List-Compare": {"ref": "4f26aeafdb"}, + "perl-Tangerine": {"ref": "4f26aeafdb"}, + "tangerine": {"ref": "4f26aeafdb"}, + "foo": {"ref": "93dea37599"}, + "file": {"ref": "a2740663f8"}, } - assert build.mmd().get_xmd()['mbs']['rpms'] == xmd_rpms + assert build.mmd().get_xmd()["mbs"]["rpms"] == xmd_rpms - @patch('module_build_service.models.ModuleBuild.from_module_event') - @patch('module_build_service.scm.SCM') + @patch("module_build_service.models.ModuleBuild.from_module_event") + @patch("module_build_service.scm.SCM") def test_init_when_get_latest_raises(self, mocked_scm, mocked_from_module_event): - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '7035bd33614972ac66559ac1fdd019ff6027ad22', - get_latest_raise=True) + FakeSCM( + mocked_scm, + "testmodule", + "testmodule.yaml", + "7035bd33614972ac66559ac1fdd019ff6027ad22", + get_latest_raise=True, + ) msg = module_build_service.messaging.MBSModule( - msg_id=None, module_build_id=2, module_build_state='init') + msg_id=None, module_build_id=2, module_build_state="init") with make_session(conf) as session: build = session.query(ModuleBuild).filter_by(id=2).one() mocked_from_module_event.return_value = build @@ -183,4 +194,4 @@ class TestModuleInit: session.refresh(build) # Make sure the module entered the failed state assert build.state == 4, build.state - assert 'Failed to get the latest commit for' in build.state_reason + assert "Failed to get the latest commit for" in build.state_reason diff --git a/tests/test_scheduler/test_module_wait.py b/tests/test_scheduler/test_module_wait.py index dd3eedd2..6fa27a0e 100644 --- a/tests/test_scheduler/test_module_wait.py +++ b/tests/test_scheduler/test_module_wait.py @@ -48,31 +48,31 @@ class TestModuleWait: except Exception: pass - @patch('module_build_service.builder.GenericBuilder.create_from_module') - @patch('module_build_service.models.ModuleBuild.from_module_event') + @patch("module_build_service.builder.GenericBuilder.create_from_module") + @patch("module_build_service.models.ModuleBuild.from_module_event") def test_init_basic(self, from_module_event, create_builder): builder = mock.Mock() - builder.get_disttag_srpm.return_value = 'some srpm disttag' + builder.get_disttag_srpm.return_value = "some srpm disttag" builder.build.return_value = 1234, 1, "", None - builder.module_build_tag = {'name': 'some-tag-build'} + builder.module_build_tag = {"name": "some-tag-build"} create_builder.return_value = builder mocked_module_build = mock.Mock() - mocked_module_build.name = 'foo' - mocked_module_build.stream = 'stream' - mocked_module_build.version = '1' - mocked_module_build.context = '1234567' + mocked_module_build.name = "foo" + mocked_module_build.stream = "stream" + mocked_module_build.version = "1" + mocked_module_build.context = "1234567" mocked_module_build.state = 1 mocked_module_build.id = 1 mocked_module_build.json.return_value = { - 'name': 'foo', - 'stream': '1', - 'version': 1, - 'state': 'some state', - 'id': 1 + "name": "foo", + "stream": "1", + "version": 1, + "state": "some state", + "id": 1, } formatted_testmodule_yml_path = os.path.join( - base_dir, 'staged_data', 'formatted_testmodule.yaml') + base_dir, "staged_data", "formatted_testmodule.yaml") mmd = Modulemd.Module().new_from_file(formatted_testmodule_yml_path) mmd.upgrade() mocked_module_build.id = 1 @@ -81,18 +81,21 @@ class TestModuleWait: from_module_event.return_value = mocked_module_build - msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=1, - module_build_state='some state') - with patch.object(module_build_service.resolver, 'system_resolver'): + msg = module_build_service.messaging.MBSModule( + msg_id=None, module_build_id=1, module_build_state="some state") + with patch.object(module_build_service.resolver, "system_resolver"): self.fn(config=self.config, session=self.session, msg=msg) - @patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", - return_value={'build': [], 'srpm-build': []}) + @patch( + "module_build_service.builder.GenericBuilder.default_buildroot_groups", + return_value={"build": [], "srpm-build": []}, + ) @patch("module_build_service.builder.GenericBuilder.create_from_module") - @patch('module_build_service.resolver.DBResolver') - @patch('module_build_service.resolver.GenericResolver') + @patch("module_build_service.resolver.DBResolver") + @patch("module_build_service.resolver.GenericResolver") def test_new_repo_called_when_macros_reused( - self, generic_resolver, resolver, create_builder, dbg): + self, generic_resolver, resolver, create_builder, dbg + ): """ Test that newRepo is called when module-build-macros build is reused. """ @@ -104,35 +107,45 @@ class TestModuleWait: builder = mock.MagicMock() builder.koji_session = koji_session builder.module_build_tag = {"name": "module-123-build"} - builder.get_disttag_srpm.return_value = 'some srpm disttag' - builder.build.return_value = (1234, koji.BUILD_STATES['COMPLETE'], "", - "module-build-macros-1-1") + builder.get_disttag_srpm.return_value = "some srpm disttag" + builder.build.return_value = ( + 1234, + koji.BUILD_STATES["COMPLETE"], + "", + "module-build-macros-1-1", + ) create_builder.return_value = builder resolver = mock.MagicMock() - resolver.backend = 'db' + resolver.backend = "db" resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357" - with patch.object(module_build_service.resolver, 'system_resolver', new=resolver): - msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=2, - module_build_state='some state') + with patch.object(module_build_service.resolver, "system_resolver", new=resolver): + msg = module_build_service.messaging.MBSModule( + msg_id=None, module_build_id=2, module_build_state="some state") module_build_service.scheduler.handlers.modules.wait( config=conf, session=db.session, msg=msg) koji_session.newRepo.assert_called_once_with("module-123-build") # When module-build-macros is reused, it still has to appear only # once in database. - builds_count = db.session.query(ComponentBuild).filter_by( - package="module-build-macros", module_id=2).count() + builds_count = ( + db.session.query(ComponentBuild) + .filter_by(package="module-build-macros", module_id=2) + .count() + ) assert builds_count == 1 - @patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", - return_value={'build': [], 'srpm-build': []}) + @patch( + "module_build_service.builder.GenericBuilder.default_buildroot_groups", + return_value={"build": [], "srpm-build": []}, + ) @patch("module_build_service.builder.GenericBuilder.create_from_module") - @patch('module_build_service.resolver.DBResolver') - @patch('module_build_service.resolver.GenericResolver') + @patch("module_build_service.resolver.DBResolver") + @patch("module_build_service.resolver.GenericResolver") def test_new_repo_not_called_when_macros_not_reused( - self, generic_resolver, resolver, create_builder, dbg): + self, generic_resolver, resolver, create_builder, dbg + ): """ Test that newRepo is called everytime for module-build-macros """ @@ -144,29 +157,36 @@ class TestModuleWait: builder = mock.MagicMock() builder.koji_session = koji_session builder.module_build_tag = {"name": "module-123-build"} - builder.get_disttag_srpm.return_value = 'some srpm disttag' - builder.build.return_value = (1234, koji.BUILD_STATES['BUILDING'], "", - "module-build-macros-1-1") + builder.get_disttag_srpm.return_value = "some srpm disttag" + builder.build.return_value = ( + 1234, + koji.BUILD_STATES["BUILDING"], + "", + "module-build-macros-1-1", + ) create_builder.return_value = builder resolver = mock.MagicMock() - resolver.backend = 'db' + resolver.backend = "db" resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357" - with patch.object(module_build_service.resolver, 'system_resolver', new=resolver): - msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=2, - module_build_state='some state') + with patch.object(module_build_service.resolver, "system_resolver", new=resolver): + msg = module_build_service.messaging.MBSModule( + msg_id=None, module_build_id=2, module_build_state="some state") module_build_service.scheduler.handlers.modules.wait( config=conf, session=db.session, msg=msg) assert koji_session.newRepo.called - @patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", - return_value={'build': [], 'srpm-build': []}) + @patch( + "module_build_service.builder.GenericBuilder.default_buildroot_groups", + return_value={"build": [], "srpm-build": []}, + ) @patch("module_build_service.builder.GenericBuilder.create_from_module") - @patch('module_build_service.resolver.DBResolver') - @patch('module_build_service.resolver.GenericResolver') + @patch("module_build_service.resolver.DBResolver") + @patch("module_build_service.resolver.GenericResolver") def test_set_cg_build_koji_tag_fallback_to_default( - self, generic_resolver, resolver, create_builder, dbg): + self, generic_resolver, resolver, create_builder, dbg + ): """ Test that build.cg_build_koji_tag fallbacks to default tag. """ @@ -182,39 +202,59 @@ class TestModuleWait: builder = mock.MagicMock() builder.koji_session = koji_session builder.module_build_tag = {"name": "module-123-build"} - builder.get_disttag_srpm.return_value = 'some srpm disttag' - builder.build.return_value = (1234, koji.BUILD_STATES['BUILDING'], "", - "module-build-macros-1-1") + builder.get_disttag_srpm.return_value = "some srpm disttag" + builder.build.return_value = ( + 1234, + koji.BUILD_STATES["BUILDING"], + "", + "module-build-macros-1-1", + ) create_builder.return_value = builder resolver = mock.MagicMock() - resolver.backend = 'db' + resolver.backend = "db" resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357" resolver.get_module_build_dependencies.return_value = { - "module-bootstrap-tag": [base_mmd]} + "module-bootstrap-tag": [base_mmd] + } - with patch.object(module_build_service.resolver, 'system_resolver', new=resolver): - msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=2, - module_build_state='some state') + with patch.object(module_build_service.resolver, "system_resolver", new=resolver): + msg = module_build_service.messaging.MBSModule( + msg_id=None, module_build_id=2, module_build_state="some state") module_build_service.scheduler.handlers.modules.wait( config=conf, session=db.session, msg=msg) module_build = ModuleBuild.query.filter_by(id=2).one() assert module_build.cg_build_koji_tag == "modular-updates-candidate" - @pytest.mark.parametrize('koji_cg_tag_build,expected_cg_koji_build_tag', [ - [True, 'f27-modular-updates-candidate'], - [False, None], - ]) - @patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", - return_value={'build': [], 'srpm-build': []}) + @pytest.mark.parametrize( + "koji_cg_tag_build,expected_cg_koji_build_tag", + [ + [True, "f27-modular-updates-candidate"], + [False, None] + ], + ) + @patch( + "module_build_service.builder.GenericBuilder.default_buildroot_groups", + return_value={"build": [], "srpm-build": []}, + ) @patch("module_build_service.builder.GenericBuilder.create_from_module") - @patch('module_build_service.resolver.DBResolver') - @patch('module_build_service.resolver.GenericResolver') - @patch("module_build_service.config.Config.base_module_names", - new_callable=mock.PropertyMock, return_value=["base-runtime", "platform"]) + @patch("module_build_service.resolver.DBResolver") + @patch("module_build_service.resolver.GenericResolver") + @patch( + "module_build_service.config.Config.base_module_names", + new_callable=mock.PropertyMock, + return_value=["base-runtime", "platform"], + ) def test_set_cg_build_koji_tag( - self, cfg, generic_resolver, resolver, create_builder, dbg, - koji_cg_tag_build, expected_cg_koji_build_tag): + self, + cfg, + generic_resolver, + resolver, + create_builder, + dbg, + koji_cg_tag_build, + expected_cg_koji_build_tag, + ): """ Test that build.cg_build_koji_tag is set. """ @@ -230,23 +270,33 @@ class TestModuleWait: builder = mock.MagicMock() builder.koji_session = koji_session builder.module_build_tag = {"name": "module-123-build"} - builder.get_disttag_srpm.return_value = 'some srpm disttag' - builder.build.return_value = (1234, koji.BUILD_STATES['BUILDING'], "", - "module-build-macros-1-1") + builder.get_disttag_srpm.return_value = "some srpm disttag" + builder.build.return_value = ( + 1234, + koji.BUILD_STATES["BUILDING"], + "", + "module-build-macros-1-1", + ) create_builder.return_value = builder resolver = mock.MagicMock() - resolver.backend = 'db' + resolver.backend = "db" resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357" resolver.get_module_build_dependencies.return_value = { - "module-bootstrap-tag": [base_mmd]} + "module-bootstrap-tag": [base_mmd] + } - with patch.object(module_build_service.scheduler.handlers.modules.conf, - 'koji_cg_tag_build', new=koji_cg_tag_build): - with patch.object(module_build_service.resolver, 'system_resolver', new=resolver): - msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=2, - module_build_state='some state') + with patch.object( + module_build_service.scheduler.handlers.modules.conf, + "koji_cg_tag_build", + new=koji_cg_tag_build, + ): + with patch.object(module_build_service.resolver, "system_resolver", new=resolver): + msg = module_build_service.messaging.MBSModule( + msg_id=None, module_build_id=2, module_build_state="some state" + ) module_build_service.scheduler.handlers.modules.wait( - config=conf, session=db.session, msg=msg) + config=conf, session=db.session, msg=msg + ) module_build = ModuleBuild.query.filter_by(id=2).one() assert module_build.cg_build_koji_tag == expected_cg_koji_build_tag diff --git a/tests/test_scheduler/test_poller.py b/tests/test_scheduler/test_poller.py index 4198b7e4..38dab62a 100644 --- a/tests/test_scheduler/test_poller.py +++ b/tests/test_scheduler/test_poller.py @@ -30,30 +30,35 @@ import six.moves.queue as queue from datetime import datetime, timedelta -@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", - return_value={'build': [], 'srpm-build': []}) +@patch( + "module_build_service.builder.GenericBuilder.default_buildroot_groups", + return_value={"build": [], "srpm-build": []}, +) @patch("module_build_service.scheduler.consumer.get_global_consumer") @patch("module_build_service.builder.GenericBuilder.create_from_module") class TestPoller: - def setup_method(self, test_method): reuse_component_init_data() - self.p_read_config = patch('koji.read_config', return_value={ - 'authtype': 'kerberos', - 'timeout': 60, - 'server': 'http://koji.example.com/' - }) + self.p_read_config = patch( + "koji.read_config", + return_value={ + "authtype": "kerberos", + "timeout": 60, + "server": "http://koji.example.com/", + }, + ) self.mock_read_config = self.p_read_config.start() def teardown_method(self, test_method): self.p_read_config.stop() clean_database() - @pytest.mark.parametrize('fresh', [True, False]) - @patch('module_build_service.utils.batches.start_build_component') - def test_process_paused_module_builds(self, start_build_component, create_builder, - global_consumer, dbg, fresh): + @pytest.mark.parametrize("fresh", [True, False]) + @patch("module_build_service.utils.batches.start_build_component") + def test_process_paused_module_builds( + self, start_build_component, create_builder, global_consumer, dbg, fresh + ): """ Tests general use-case of process_paused_module_builds. """ @@ -102,7 +107,8 @@ class TestPoller: @patch.dict("sys.modules", krbV=mock.MagicMock()) @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_trigger_new_repo_when_failed( - self, ClientSession, create_builder, global_consumer, dbg): + self, ClientSession, create_builder, global_consumer, dbg + ): """ Tests that we call koji_sesion.newRepo when newRepo task failed. """ @@ -111,8 +117,8 @@ class TestPoller: global_consumer.return_value = consumer koji_session = ClientSession.return_value - koji_session.getTag = lambda tag_name: {'name': tag_name} - koji_session.getTaskInfo.return_value = {'state': koji.TASK_STATES['FAILED']} + koji_session.getTag = lambda tag_name: {"name": tag_name} + koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["FAILED"]} koji_session.newRepo.return_value = 123456 builder = mock.MagicMock() @@ -133,10 +139,11 @@ class TestPoller: koji_session.newRepo.assert_called_once_with( "module-testmodule-master-20170219191323-c40c156c-build") - @patch.dict('sys.modules', krbV=mock.MagicMock()) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch.dict("sys.modules", krbV=mock.MagicMock()) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_trigger_new_repo_when_succeeded( - self, ClientSession, create_builder, global_consumer, dbg): + self, ClientSession, create_builder, global_consumer, dbg + ): """ Tests that we do not call koji_sesion.newRepo when newRepo task succeeded. @@ -146,8 +153,8 @@ class TestPoller: global_consumer.return_value = consumer koji_session = ClientSession.return_value - koji_session.getTag = lambda tag_name: {'name': tag_name} - koji_session.getTaskInfo.return_value = {'state': koji.TASK_STATES['CLOSED']} + koji_session.getTag = lambda tag_name: {"name": tag_name} + koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["CLOSED"]} koji_session.newRepo.return_value = 123456 builder = mock.MagicMock() @@ -173,7 +180,8 @@ class TestPoller: assert module_build.new_repo_task_id == 0 def test_process_paused_module_builds_waiting_for_repo( - self, create_builder, global_consumer, dbg): + self, create_builder, global_consumer, dbg + ): """ Tests that process_paused_module_builds does not start new batch when we are waiting for repo. @@ -206,10 +214,11 @@ class TestPoller: for component in components: assert component.state is None - @patch.dict('sys.modules', krbV=mock.MagicMock()) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch.dict("sys.modules", krbV=mock.MagicMock()) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_old_build_targets_are_not_associated_with_any_module_builds( - self, ClientSession, create_builder, global_consumer, dbg): + self, ClientSession, create_builder, global_consumer, dbg + ): consumer = mock.MagicMock() consumer.incoming = queue.Queue() global_consumer.return_value = consumer @@ -217,8 +226,8 @@ class TestPoller: koji_session = ClientSession.return_value # No created module build has any of these tags. koji_session.getBuildTargets.return_value = [ - {'dest_tag_name': 'module-xxx-1'}, - {'dest_tag_name': 'module-yyy-2'}, + {"dest_tag_name": "module-xxx-1"}, + {"dest_tag_name": "module-yyy-2"}, ] hub = mock.MagicMock() @@ -227,17 +236,16 @@ class TestPoller: koji_session.deleteBuildTarget.assert_not_called() - @patch.dict('sys.modules', krbV=mock.MagicMock()) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch.dict("sys.modules", krbV=mock.MagicMock()) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_dont_delete_base_module_build_target( - self, ClientSession, create_builder, global_consumer, dbg): + self, ClientSession, create_builder, global_consumer, dbg + ): module_build = models.ModuleBuild.query.filter_by(id=3).one() koji_session = ClientSession.return_value # No created module build has any of these tags. - koji_session.getBuildTargets.return_value = [ - {'dest_tag_name': module_build.koji_tag}, - ] + koji_session.getBuildTargets.return_value = [{"dest_tag_name": module_build.koji_tag}] consumer = mock.MagicMock() consumer.incoming = queue.Queue() @@ -245,7 +253,7 @@ class TestPoller: # If module build's name is one of base module names, build target # should not be deleted. - with patch.object(conf, 'base_module_names', new=[module_build.name]): + with patch.object(conf, "base_module_names", new=[module_build.name]): hub = mock.MagicMock() poller = MBSProducer(hub) @@ -253,17 +261,16 @@ class TestPoller: koji_session.deleteBuildTarget.assert_not_called() - @patch.dict('sys.modules', krbV=mock.MagicMock()) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch.dict("sys.modules", krbV=mock.MagicMock()) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_dont_delete_build_target_for_unfinished_module_builds( - self, ClientSession, create_builder, global_consumer, dbg): + self, ClientSession, create_builder, global_consumer, dbg + ): module_build = models.ModuleBuild.query.filter_by(id=3).one() koji_session = ClientSession.return_value # No created module build has any of these tags. - koji_session.getBuildTargets.return_value = [ - {'dest_tag_name': module_build.koji_tag}, - ] + koji_session.getBuildTargets.return_value = [{"dest_tag_name": module_build.koji_tag}] consumer = mock.MagicMock() consumer.incoming = queue.Queue() @@ -271,7 +278,7 @@ class TestPoller: # Each time when a module build is in one of these state, build target # should not be deleted. - for state in ['init', 'wait', 'build']: + for state in ["init", "wait", "build"]: module_build.state = state db.session.commit() @@ -281,19 +288,20 @@ class TestPoller: koji_session.deleteBuildTarget.assert_not_called() - @patch.dict('sys.modules', krbV=mock.MagicMock()) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch.dict("sys.modules", krbV=mock.MagicMock()) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_only_delete_build_target_with_allowed_koji_tag_prefix( - self, ClientSession, create_builder, global_consumer, dbg): + self, ClientSession, create_builder, global_consumer, dbg + ): module_build_2 = models.ModuleBuild.query.filter_by(id=2).one() module_build_3 = models.ModuleBuild.query.filter_by(id=3).one() # Only module build 1's build target should be deleted. - module_build_2.koji_tag = 'module-tag1' - module_build_2.state = models.BUILD_STATES['done'] + module_build_2.koji_tag = "module-tag1" + module_build_2.state = models.BUILD_STATES["done"] # Ensure to exceed the koji_target_delete_time easily later for deletion module_build_2.time_completed = datetime.utcnow() - timedelta(hours=24) - module_build_3.koji_tag = 'f28' + module_build_3.koji_tag = "f28" db.session.commit() db.session.refresh(module_build_2) db.session.refresh(module_build_3) @@ -301,25 +309,16 @@ class TestPoller: koji_session = ClientSession.return_value # No created module build has any of these tags. koji_session.getBuildTargets.return_value = [ - { - 'id': 1, - 'dest_tag_name': module_build_2.koji_tag, - 'name': module_build_2.koji_tag - }, - { - 'id': 2, - 'dest_tag_name': module_build_3.koji_tag, - 'name': module_build_3.koji_tag - }, + {"id": 1, "dest_tag_name": module_build_2.koji_tag, "name": module_build_2.koji_tag}, + {"id": 2, "dest_tag_name": module_build_3.koji_tag, "name": module_build_3.koji_tag}, ] consumer = mock.MagicMock() consumer.incoming = queue.Queue() global_consumer.return_value = consumer - with patch.object(conf, 'koji_tag_prefixes', - new=['module', 'another-prefix']): - with patch.object(conf, 'koji_target_delete_time', new=60): + with patch.object(conf, "koji_tag_prefixes", new=["module", "another-prefix"]): + with patch.object(conf, "koji_target_delete_time", new=60): hub = mock.MagicMock() poller = MBSProducer(hub) poller.delete_old_koji_targets(conf, db.session) @@ -327,15 +326,16 @@ class TestPoller: koji_session.deleteBuildTarget.assert_called_once_with(1) koji_session.krb_login.assert_called_once() - @patch.dict('sys.modules', krbV=mock.MagicMock()) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch.dict("sys.modules", krbV=mock.MagicMock()) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_cant_delete_build_target_if_not_reach_delete_time( - self, ClientSession, create_builder, global_consumer, dbg): + self, ClientSession, create_builder, global_consumer, dbg + ): module_build_2 = models.ModuleBuild.query.filter_by(id=2).one() # Only module build 1's build target should be deleted. - module_build_2.koji_tag = 'module-tag1' - module_build_2.state = models.BUILD_STATES['done'] + module_build_2.koji_tag = "module-tag1" + module_build_2.state = models.BUILD_STATES["done"] # Ensure to exceed the koji_target_delete_time easily later for deletion module_build_2.time_completed = datetime.utcnow() - timedelta(minutes=5) db.session.commit() @@ -344,18 +344,14 @@ class TestPoller: koji_session = ClientSession.return_value # No created module build has any of these tags. koji_session.getBuildTargets.return_value = [ - { - 'id': 1, - 'dest_tag_name': module_build_2.koji_tag, - 'name': module_build_2.koji_tag - }, + {"id": 1, "dest_tag_name": module_build_2.koji_tag, "name": module_build_2.koji_tag} ] consumer = mock.MagicMock() consumer.incoming = queue.Queue() global_consumer.return_value = consumer - with patch.object(conf, 'koji_tag_prefixes', new=['module']): + with patch.object(conf, "koji_tag_prefixes", new=["module"]): # Use default koji_target_delete_time in config. That time is long # enough for test. hub = mock.MagicMock() @@ -364,9 +360,8 @@ class TestPoller: koji_session.deleteBuildTarget.assert_not_called() - @pytest.mark.parametrize('state', ['init', 'wait']) - def test_process_waiting_module_build( - self, create_builder, global_consumer, dbg, state): + @pytest.mark.parametrize("state", ["init", "wait"]) + def test_process_waiting_module_build(self, create_builder, global_consumer, dbg, state): """ Test that processing old waiting module builds works. """ consumer = mock.MagicMock() @@ -396,9 +391,10 @@ class TestPoller: # ensure the time_modified was changed. assert module_build.time_modified > original - @pytest.mark.parametrize('state', ['init', 'wait']) + @pytest.mark.parametrize("state", ["init", "wait"]) def test_process_waiting_module_build_not_old_enough( - self, create_builder, global_consumer, dbg, state): + self, create_builder, global_consumer, dbg, state + ): """ Test that we do not process young waiting builds. """ consumer = mock.MagicMock() @@ -426,8 +422,7 @@ class TestPoller: # Ensure we did *not* process the 9 minute-old build. assert consumer.incoming.qsize() == 0 - def test_process_waiting_module_build_none_found( - self, create_builder, global_consumer, dbg): + def test_process_waiting_module_build_none_found(self, create_builder, global_consumer, dbg): """ Test nothing happens when no module builds are waiting. """ consumer = mock.MagicMock() @@ -446,8 +441,7 @@ class TestPoller: # Ensure we did *not* process any of the non-waiting builds. assert consumer.incoming.qsize() == 0 - def test_cleanup_stale_failed_builds( - self, create_builder, global_consumer, dbg): + def test_cleanup_stale_failed_builds(self, create_builder, global_consumer, dbg): """ Test that one of the two module builds gets to the garbage state when running cleanup_stale_failed_builds. """ @@ -455,14 +449,14 @@ class TestPoller: create_builder.return_value = builder module_build_one = models.ModuleBuild.query.get(2) module_build_two = models.ModuleBuild.query.get(3) - module_build_one.state = models.BUILD_STATES['failed'] + module_build_one.state = models.BUILD_STATES["failed"] module_build_one.time_modified = datetime.utcnow() - timedelta( days=conf.cleanup_failed_builds_time + 1) module_build_two.time_modified = datetime.utcnow() - module_build_two.state = models.BUILD_STATES['failed'] + module_build_two.state = models.BUILD_STATES["failed"] failed_component = models.ComponentBuild.query.filter_by( - package='tangerine', module_id=3).one() - failed_component.state = koji.BUILD_STATES['FAILED'] + package="tangerine", module_id=3).one() + failed_component.state = koji.BUILD_STATES["FAILED"] failed_component.tagged = False failed_component.tagged_in_final = False db.session.add(failed_component) @@ -481,34 +475,35 @@ class TestPoller: poller.cleanup_stale_failed_builds(conf, db.session) db.session.refresh(module_build_two) # Make sure module_build_one was transitioned to garbage - assert module_build_one.state == models.BUILD_STATES['garbage'] - state_reason = ('The module was garbage collected since it has failed over {0} day(s) ago' - .format(conf.cleanup_failed_builds_time)) + assert module_build_one.state == models.BUILD_STATES["garbage"] + state_reason = ( + "The module was garbage collected since it has failed over {0} day(s) ago" + .format(conf.cleanup_failed_builds_time) + ) assert module_build_one.state_reason == state_reason # Make sure all the components are marked as untagged in the database for component in module_build_one.component_builds: assert not component.tagged assert not component.tagged_in_final # Make sure module_build_two stayed the same - assert module_build_two.state == models.BUILD_STATES['failed'] + assert module_build_two.state == models.BUILD_STATES["failed"] # Make sure the builds were untagged builder.untag_artifacts.assert_called_once_with([ - 'perl-Tangerine-0.23-1.module+0+d027b723', - 'perl-List-Compare-0.53-5.module+0+d027b723', - 'tangerine-0.22-3.module+0+d027b723', - 'module-build-macros-0.1-1.module+0+d027b723' + "perl-Tangerine-0.23-1.module+0+d027b723", + "perl-List-Compare-0.53-5.module+0+d027b723", + "tangerine-0.22-3.module+0+d027b723", + "module-build-macros-0.1-1.module+0+d027b723", ]) - def test_cleanup_stale_failed_builds_no_components( - self, create_builder, global_consumer, dbg): + def test_cleanup_stale_failed_builds_no_components(self, create_builder, global_consumer, dbg): """ Test that a module build without any components built gets to the garbage state when running cleanup_stale_failed_builds. """ module_build_one = models.ModuleBuild.query.get(1) module_build_two = models.ModuleBuild.query.get(2) - module_build_one.state = models.BUILD_STATES['failed'] + module_build_one.state = models.BUILD_STATES["failed"] module_build_one.time_modified = datetime.utcnow() - module_build_two.state = models.BUILD_STATES['failed'] + module_build_two.state = models.BUILD_STATES["failed"] module_build_two.time_modified = datetime.utcnow() - timedelta( days=conf.cleanup_failed_builds_time + 1) module_build_two.koji_tag = None @@ -531,19 +526,21 @@ class TestPoller: poller.cleanup_stale_failed_builds(conf, db.session) db.session.refresh(module_build_two) # Make sure module_build_two was transitioned to garbage - assert module_build_two.state == models.BUILD_STATES['garbage'] - state_reason = ('The module was garbage collected since it has failed over {0} day(s) ago' - .format(conf.cleanup_failed_builds_time)) + assert module_build_two.state == models.BUILD_STATES["garbage"] + state_reason = ( + "The module was garbage collected since it has failed over {0} day(s) ago" + .format(conf.cleanup_failed_builds_time) + ) assert module_build_two.state_reason == state_reason # Make sure module_build_one stayed the same - assert module_build_one.state == models.BUILD_STATES['failed'] + assert module_build_one.state == models.BUILD_STATES["failed"] # Make sure that the builder was never instantiated create_builder.assert_not_called() - @pytest.mark.parametrize('test_state', [models.BUILD_STATES[state] - for state in conf.cleanup_stuck_builds_states]) - def test_cancel_stuck_module_builds( - self, create_builder, global_consumer, dbg, test_state): + @pytest.mark.parametrize( + "test_state", [models.BUILD_STATES[state] for state in conf.cleanup_stuck_builds_states] + ) + def test_cancel_stuck_module_builds(self, create_builder, global_consumer, dbg, test_state): module_build1 = models.ModuleBuild.query.get(1) module_build1.state = test_state @@ -576,16 +573,16 @@ class TestPoller: assert len(module) == 1 assert module[0].id == 2 - @pytest.mark.parametrize('tagged, tagged_in_final', ([True, False], [True, False])) + @pytest.mark.parametrize("tagged, tagged_in_final", ([True, False], [True, False])) @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_sync_koji_build_tags( - self, ClientSession, create_builder, global_consumer, dbg, - tagged, tagged_in_final): + self, ClientSession, create_builder, global_consumer, dbg, tagged, tagged_in_final + ): module_build_2 = models.ModuleBuild.query.filter_by(id=2).one() # Only module build 1's build target should be deleted. - module_build_2.koji_tag = 'module-tag1' - module_build_2.state = models.BUILD_STATES['build'] + module_build_2.koji_tag = "module-tag1" + module_build_2.state = models.BUILD_STATES["build"] c = module_build_2.current_batch()[0] c.state = koji.BUILD_STATES["COMPLETE"] c.tagged_in_final = False @@ -598,19 +595,9 @@ class TestPoller: ret = [] if tagged: - ret.append( - { - 'id': 1, - 'name': module_build_2.koji_tag + "-build" - }, - ) + ret.append({"id": 1, "name": module_build_2.koji_tag + "-build"}) if tagged_in_final: - ret.append( - { - 'id': 2, - 'name': module_build_2.koji_tag - }, - ) + ret.append({"id": 2, "name": module_build_2.koji_tag}) koji_session.listTags.return_value = ret consumer = mock.MagicMock() diff --git a/tests/test_scheduler/test_repo_done.py b/tests/test_scheduler/test_repo_done.py index e24ce3d9..4f4c702d 100644 --- a/tests/test_scheduler/test_repo_done.py +++ b/tests/test_scheduler/test_repo_done.py @@ -29,8 +29,7 @@ from tests import conf, db, app, scheduler_init_data class TestRepoDone: - - @mock.patch('module_build_service.models.ModuleBuild.from_repo_done_event') + @mock.patch("module_build_service.models.ModuleBuild.from_repo_done_event") def test_no_match(self, from_repo_done_event): """ Test that when a repo msg hits us and we have no match, that we do nothing gracefully. @@ -38,68 +37,86 @@ class TestRepoDone: scheduler_init_data() from_repo_done_event.return_value = None msg = module_build_service.messaging.KojiRepoChange( - 'no matches for this...', '2016-some-nonexistent-build') - module_build_service.scheduler.handlers.repos.done( - config=conf, session=db.session, msg=msg) + "no matches for this...", "2016-some-nonexistent-build") + module_build_service.scheduler.handlers.repos.done(config=conf, session=db.session, msg=msg) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.recover_orphaned_artifact', return_value=[]) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.get_average_build_time', - return_value=0.0) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.list_tasks_for_components', - return_value=[]) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.buildroot_ready', return_value=True) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.get_session') - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.build') - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.buildroot_connect') - def test_a_single_match(self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, - mock_uea): + @mock.patch( + "module_build_service.builder.KojiModuleBuilder." + "KojiModuleBuilder.recover_orphaned_artifact", + return_value=[], + ) + @mock.patch( + "module_build_service.builder.KojiModuleBuilder." + "KojiModuleBuilder.get_average_build_time", + return_value=0.0, + ) + @mock.patch( + "module_build_service.builder.KojiModuleBuilder." + "KojiModuleBuilder.list_tasks_for_components", + return_value=[], + ) + @mock.patch( + "module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_ready", + return_value=True, + ) + @mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session") + @mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.build") + @mock.patch( + "module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect" + ) + def test_a_single_match( + self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea + ): """ Test that when a repo msg hits us and we have a single match. """ scheduler_init_data() - get_session.return_value = mock.Mock(), 'development' - build_fn.return_value = 1234, 1, '', None + get_session.return_value = mock.Mock(), "development" + build_fn.return_value = 1234, 1, "", None msg = module_build_service.messaging.KojiRepoChange( - 'some_msg_id', 'module-testmodule-master-20170109091357-7c29193d-build') - module_build_service.scheduler.handlers.repos.done( - config=conf, session=db.session, msg=msg) + "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build") + module_build_service.scheduler.handlers.repos.done(config=conf, session=db.session, msg=msg) build_fn.assert_called_once_with( - artifact_name='tangerine', - source=('https://src.fedoraproject.org/rpms/tangerine?' - '#fbed359411a1baa08d4a88e0d12d426fbf8f602c')) + artifact_name="tangerine", + source=( + "https://src.fedoraproject.org/rpms/tangerine?" + "#fbed359411a1baa08d4a88e0d12d426fbf8f602c" + ), + ) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.finalize') - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.recover_orphaned_artifact', return_value=[]) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.get_average_build_time', - return_value=0.0) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.list_tasks_for_components', - return_value=[]) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.buildroot_ready', return_value=True) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.get_session') - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.build') - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.buildroot_connect') - def test_a_single_match_finalize(self, connect, build_fn, get_session, ready, list_tasks_fn, - mock_gabt, mock_uea, finalizer): + @mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.finalize") + @mock.patch( + "module_build_service.builder.KojiModuleBuilder." + "KojiModuleBuilder.recover_orphaned_artifact", + return_value=[], + ) + @mock.patch( + "module_build_service.builder.KojiModuleBuilder." + "KojiModuleBuilder.get_average_build_time", + return_value=0.0, + ) + @mock.patch( + "module_build_service.builder.KojiModuleBuilder." + "KojiModuleBuilder.list_tasks_for_components", + return_value=[], + ) + @mock.patch( + "module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_ready", + return_value=True, + ) + @mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session") + @mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.build") + @mock.patch( + "module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect" + ) + def test_a_single_match_finalize( + self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea, finalizer + ): """ Test that when a repo msg hits us and we have a single match. """ scheduler_init_data(tangerine_state=1) - get_session.return_value = mock.Mock(), 'development' - build_fn.return_value = 1234, 1, '', None + get_session.return_value = mock.Mock(), "development" + build_fn.return_value = 1234, 1, "", None # Ensure the time_completed is None, so we can test it is set to # some date once the build is finalized. @@ -117,94 +134,109 @@ class TestRepoDone: finalizer.side_effect = mocked_finalizer msg = module_build_service.messaging.KojiRepoChange( - 'some_msg_id', 'module-testmodule-master-20170109091357-7c29193d-build') - module_build_service.scheduler.handlers.repos.done( - config=conf, session=db.session, msg=msg) + "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build") + module_build_service.scheduler.handlers.repos.done(config=conf, session=db.session, msg=msg) finalizer.assert_called_once() - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.recover_orphaned_artifact', return_value=[]) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.get_average_build_time', - return_value=0.0) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.list_tasks_for_components', - return_value=[]) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.buildroot_ready', return_value=True) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.get_session') - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.build') - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.buildroot_connect') - def test_a_single_match_build_fail(self, connect, build_fn, config, ready, list_tasks_fn, - mock_gabt, mock_uea): + @mock.patch( + "module_build_service.builder.KojiModuleBuilder." + "KojiModuleBuilder.recover_orphaned_artifact", + return_value=[], + ) + @mock.patch( + "module_build_service.builder.KojiModuleBuilder." + "KojiModuleBuilder.get_average_build_time", + return_value=0.0, + ) + @mock.patch( + "module_build_service.builder.KojiModuleBuilder." + "KojiModuleBuilder.list_tasks_for_components", + return_value=[], + ) + @mock.patch( + "module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_ready", + return_value=True, + ) + @mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session") + @mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.build") + @mock.patch( + "module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect" + ) + def test_a_single_match_build_fail( + self, connect, build_fn, config, ready, list_tasks_fn, mock_gabt, mock_uea + ): """ Test that when a KojiModuleBuilder.build fails, the build is marked as failed with proper state_reason. """ scheduler_init_data() - config.return_value = mock.Mock(), 'development' - build_fn.return_value = None, 4, 'Failed to submit artifact tangerine to Koji', None + config.return_value = mock.Mock(), "development" + build_fn.return_value = None, 4, "Failed to submit artifact tangerine to Koji", None msg = module_build_service.messaging.KojiRepoChange( - 'some_msg_id', 'module-testmodule-master-20170109091357-7c29193d-build') - module_build_service.scheduler.handlers.repos.done( - config=conf, session=db.session, msg=msg) + "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build") + module_build_service.scheduler.handlers.repos.done(config=conf, session=db.session, msg=msg) build_fn.assert_called_once_with( - artifact_name='tangerine', - source=('https://src.fedoraproject.org/rpms/tangerine?' - '#fbed359411a1baa08d4a88e0d12d426fbf8f602c')) - component_build = module_build_service.models.ComponentBuild.query\ - .filter_by(package='tangerine').one() - assert component_build.state_reason == 'Failed to submit artifact tangerine to Koji' + artifact_name="tangerine", + source=( + "https://src.fedoraproject.org/rpms/tangerine?" + "#fbed359411a1baa08d4a88e0d12d426fbf8f602c" + ), + ) + component_build = ( + module_build_service.models.ComponentBuild.query.filter_by(package="tangerine").one()) + assert component_build.state_reason == "Failed to submit artifact tangerine to Koji" - @mock.patch('module_build_service.scheduler.handlers.repos.log.info') + @mock.patch("module_build_service.scheduler.handlers.repos.log.info") def test_erroneous_regen_repo_received(self, mock_log_info): """ Test that when an unexpected KojiRepoRegen message is received, the module doesn't complete or go to the next build batch. """ scheduler_init_data(1) msg = module_build_service.messaging.KojiRepoChange( - 'some_msg_id', 'module-testmodule-master-20170109091357-7c29193d-build') - component_build = module_build_service.models.ComponentBuild.query\ - .filter_by(package='tangerine').one() + "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build") + component_build = ( + module_build_service.models.ComponentBuild.query.filter_by(package="tangerine").one()) component_build.tagged = False db.session.add(component_build) db.session.commit() - module_build_service.scheduler.handlers.repos.done( - config=conf, session=db.session, msg=msg) + module_build_service.scheduler.handlers.repos.done(config=conf, session=db.session, msg=msg) mock_log_info.assert_called_with( - 'Ignoring repo regen, because not all components are tagged.') + "Ignoring repo regen, because not all components are tagged." + ) module_build = module_build_service.models.ModuleBuild.query.get(2) # Make sure the module build didn't transition since all the components weren't tagged - assert module_build.state == module_build_service.models.BUILD_STATES['build'] + assert module_build.state == module_build_service.models.BUILD_STATES["build"] - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.list_tasks_for_components', - return_value=[]) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.buildroot_ready', return_value=True) - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.get_session') - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.build') - @mock.patch('module_build_service.builder.KojiModuleBuilder.' - 'KojiModuleBuilder.buildroot_connect') - @mock.patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", - return_value={'build': [], 'srpm-build': []}) + @mock.patch( + "module_build_service.builder.KojiModuleBuilder." + "KojiModuleBuilder.list_tasks_for_components", + return_value=[], + ) + @mock.patch( + "module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_ready", + return_value=True, + ) + @mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session") + @mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.build") + @mock.patch( + "module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect" + ) + @mock.patch( + "module_build_service.builder.GenericBuilder.default_buildroot_groups", + return_value={"build": [], "srpm-build": []}, + ) def test_failed_component_build(self, dbg, connect, build_fn, config, ready, list_tasks_fn): """ Test that when a KojiModuleBuilder.build fails, the build is marked as failed with proper state_reason. """ with app.app_context(): scheduler_init_data(3) - config.return_value = mock.Mock(), 'development' - build_fn.return_value = None, 4, 'Failed to submit artifact x to Koji', None + config.return_value = mock.Mock(), "development" + build_fn.return_value = None, 4, "Failed to submit artifact x to Koji", None msg = module_build_service.messaging.KojiRepoChange( - 'some_msg_id', 'module-testmodule-master-20170109091357-7c29193d-build') + "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build") module_build_service.scheduler.handlers.repos.done( config=conf, session=db.session, msg=msg) module_build = module_build_service.models.ModuleBuild.query.get(2) diff --git a/tests/test_scheduler/test_tag_tagged.py b/tests/test_scheduler/test_tag_tagged.py index 135bf3d0..0c3484dc 100644 --- a/tests/test_scheduler/test_tag_tagged.py +++ b/tests/test_scheduler/test_tag_tagged.py @@ -34,19 +34,17 @@ import koji class TestTagTagged: - def setup_method(self, test_method): reuse_component_init_data() - @mock.patch('module_build_service.models.ModuleBuild.from_tag_change_event') + @mock.patch("module_build_service.models.ModuleBuild.from_tag_change_event") def test_no_matching_module(self, from_tag_change_event): """ Test that when a tag msg hits us and we have no match, that we do nothing gracefully. """ from_tag_change_event.return_value = None msg = module_build_service.messaging.KojiTagChange( - 'no matches for this...', '2016-some-nonexistent-build', 'artifact', - 'artifact-1.2-1') + "no matches for this...", "2016-some-nonexistent-build", "artifact", "artifact-1.2-1") module_build_service.scheduler.handlers.tags.tagged( config=conf, session=db.session, msg=msg) @@ -55,13 +53,18 @@ class TestTagTagged: that we do nothing gracefully. """ msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c-build', - 'artifact', 'artifact-1.2-1') + "id", + "module-testmodule-master-20170219191323-c40c156c-build", + "artifact", + "artifact-1.2-1", + ) module_build_service.scheduler.handlers.tags.tagged( config=conf, session=db.session, msg=msg) - @patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", - return_value={'build': [], 'srpm-build': []}) + @patch( + "module_build_service.builder.GenericBuilder.default_buildroot_groups", + return_value={"build": [], "srpm-build": []}, + ) @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session") @patch("module_build_service.builder.GenericBuilder.create_from_module") def test_newrepo(self, create_builder, koji_get_session, dbg): @@ -69,8 +72,8 @@ class TestTagTagged: Test that newRepo is called in the expected times. """ koji_session = mock.MagicMock() - koji_session.getTag = lambda tag_name: {'name': tag_name} - koji_session.getTaskInfo.return_value = {'state': koji.TASK_STATES['CLOSED']} + koji_session.getTag = lambda tag_name: {"name": tag_name} + koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["CLOSED"]} koji_session.newRepo.return_value = 123456 koji_get_session.return_value = koji_session @@ -78,7 +81,8 @@ class TestTagTagged: builder.koji_session = koji_session builder.buildroot_ready.return_value = False builder.module_build_tag = { - "name": "module-testmodule-master-20170219191323-c40c156c-build"} + "name": "module-testmodule-master-20170219191323-c40c156c-build" + } create_builder.return_value = builder module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one() @@ -92,25 +96,33 @@ class TestTagTagged: module_build.batch = 2 for c in module_build.current_batch(): - if c.package == 'perl-Tangerine': - c.nvr = 'perl-Tangerine-0.23-1.module+0+d027b723' - elif c.package == 'perl-List-Compare': - c.nvr = 'perl-List-Compare-0.53-5.module+0+d027b723' + if c.package == "perl-Tangerine": + c.nvr = "perl-Tangerine-0.23-1.module+0+d027b723" + elif c.package == "perl-List-Compare": + c.nvr = "perl-List-Compare-0.53-5.module+0+d027b723" c.state = koji.BUILD_STATES["COMPLETE"] db.session.commit() # Tag the first component to the buildroot. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c-build', - 'perl-Tangerine', 'perl-Tangerine-0.23-1.module+0+d027b723') + "id", + "module-testmodule-master-20170219191323-c40c156c-build", + "perl-Tangerine", + "perl-Tangerine-0.23-1.module+0+d027b723", + ) module_build_service.scheduler.handlers.tags.tagged( - config=conf, session=db.session, msg=msg) + config=conf, session=db.session, msg=msg + ) # Tag the first component to the final tag. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c', - 'perl-Tangerine', 'perl-Tangerine-0.23-1.module+0+d027b723') + "id", + "module-testmodule-master-20170219191323-c40c156c", + "perl-Tangerine", + "perl-Tangerine-0.23-1.module+0+d027b723", + ) module_build_service.scheduler.handlers.tags.tagged( - config=conf, session=db.session, msg=msg) + config=conf, session=db.session, msg=msg + ) # newRepo should not be called, because there are still components # to tag. @@ -118,10 +130,14 @@ class TestTagTagged: # Tag the second component to the buildroot. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c-build', - 'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723') + "id", + "module-testmodule-master-20170219191323-c40c156c-build", + "perl-List-Compare", + "perl-List-Compare-0.53-5.module+0+d027b723", + ) module_build_service.scheduler.handlers.tags.tagged( - config=conf, session=db.session, msg=msg) + config=conf, session=db.session, msg=msg + ) # newRepo should not be called, because the component has not been # tagged to final tag so far. @@ -129,8 +145,11 @@ class TestTagTagged: # Tag the first component to the final tag. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c', - 'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723') + "id", + "module-testmodule-master-20170219191323-c40c156c", + "perl-List-Compare", + "perl-List-Compare-0.53-5.module+0+d027b723", + ) module_build_service.scheduler.handlers.tags.tagged( config=conf, session=db.session, msg=msg) @@ -146,8 +165,10 @@ class TestTagTagged: # status later in poller. assert module_build.new_repo_task_id == 123456 - @patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", - return_value={'build': [], 'srpm-build': []}) + @patch( + "module_build_service.builder.GenericBuilder.default_buildroot_groups", + return_value={"build": [], "srpm-build": []}, + ) @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session") @patch("module_build_service.builder.GenericBuilder.create_from_module") def test_newrepo_still_building_components(self, create_builder, koji_get_session, dbg): @@ -155,8 +176,8 @@ class TestTagTagged: Test that newRepo is called in the expected times. """ koji_session = mock.MagicMock() - koji_session.getTag = lambda tag_name: {'name': tag_name} - koji_session.getTaskInfo.return_value = {'state': koji.TASK_STATES['CLOSED']} + koji_session.getTag = lambda tag_name: {"name": tag_name} + koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["CLOSED"]} koji_session.newRepo.return_value = 123456 koji_get_session.return_value = koji_session @@ -164,27 +185,34 @@ class TestTagTagged: builder.koji_session = koji_session builder.buildroot_ready.return_value = False builder.module_build_tag = { - "name": "module-testmodule-master-20170219191323-c40c156c-build"} + "name": "module-testmodule-master-20170219191323-c40c156c-build" + } create_builder.return_value = builder module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one() module_build.batch = 2 - component = module_build_service.models.ComponentBuild.query\ - .filter_by(package='perl-Tangerine', module_id=module_build.id).one() + component = module_build_service.models.ComponentBuild.query.filter_by( + package="perl-Tangerine", module_id=module_build.id).one() component.state = koji.BUILD_STATES["BUILDING"] - component.nvr = 'perl-Tangerine-0.23-1.module+0+d027b723' + component.nvr = "perl-Tangerine-0.23-1.module+0+d027b723" db.session.commit() # Tag the perl-List-Compare component to the buildroot. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c-build', - 'perl-Tangerine', 'perl-Tangerine-0.23-1.module+0+d027b723') + "id", + "module-testmodule-master-20170219191323-c40c156c-build", + "perl-Tangerine", + "perl-Tangerine-0.23-1.module+0+d027b723", + ) module_build_service.scheduler.handlers.tags.tagged( config=conf, session=db.session, msg=msg) # Tag the perl-List-Compare component to final tag. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c', - 'perl-Tangerine', 'perl-Tangerine-0.23-1.module+0+d027b723') + "id", + "module-testmodule-master-20170219191323-c40c156c", + "perl-Tangerine", + "perl-Tangerine-0.23-1.module+0+d027b723", + ) module_build_service.scheduler.handlers.tags.tagged( config=conf, session=db.session, msg=msg) @@ -192,8 +220,10 @@ class TestTagTagged: # built yet. assert not koji_session.newRepo.called - @patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", - return_value={'build': [], 'srpm-build': []}) + @patch( + "module_build_service.builder.GenericBuilder.default_buildroot_groups", + return_value={"build": [], "srpm-build": []}, + ) @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session") @patch("module_build_service.builder.GenericBuilder.create_from_module") def test_newrepo_failed_components(self, create_builder, koji_get_session, dbg): @@ -201,8 +231,8 @@ class TestTagTagged: Test that newRepo is called in the expected times. """ koji_session = mock.MagicMock() - koji_session.getTag = lambda tag_name: {'name': tag_name} - koji_session.getTaskInfo.return_value = {'state': koji.TASK_STATES['CLOSED']} + koji_session.getTag = lambda tag_name: {"name": tag_name} + koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["CLOSED"]} koji_session.newRepo.return_value = 123456 koji_get_session.return_value = koji_session @@ -210,7 +240,8 @@ class TestTagTagged: builder.koji_session = koji_session builder.buildroot_ready.return_value = False builder.module_build_tag = { - "name": "module-testmodule-master-20170219191323-c40c156c-build"} + "name": "module-testmodule-master-20170219191323-c40c156c-build" + } create_builder.return_value = builder module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one() @@ -223,26 +254,33 @@ class TestTagTagged: c.tagged_in_final = True module_build.batch = 2 - component = module_build_service.models.ComponentBuild.query\ - .filter_by(package='perl-Tangerine', module_id=module_build.id).one() + component = module_build_service.models.ComponentBuild.query.filter_by( + package="perl-Tangerine", module_id=module_build.id).one() component.state = koji.BUILD_STATES["FAILED"] - component.nvr = 'perl-Tangerine-0.23-1.module+0+d027b723' - component = module_build_service.models.ComponentBuild.query\ - .filter_by(package='perl-List-Compare', module_id=module_build.id).one() + component.nvr = "perl-Tangerine-0.23-1.module+0+d027b723" + component = module_build_service.models.ComponentBuild.query.filter_by( + package="perl-List-Compare", module_id=module_build.id).one() component.state = koji.BUILD_STATES["COMPLETE"] - component.nvr = 'perl-List-Compare-0.53-5.module+0+d027b723' + component.nvr = "perl-List-Compare-0.53-5.module+0+d027b723" db.session.commit() # Tag the perl-List-Compare component to the buildroot. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c-build', - 'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723') + "id", + "module-testmodule-master-20170219191323-c40c156c-build", + "perl-List-Compare", + "perl-List-Compare-0.53-5.module+0+d027b723", + ) module_build_service.scheduler.handlers.tags.tagged( - config=conf, session=db.session, msg=msg) + config=conf, session=db.session, msg=msg + ) # Tag the perl-List-Compare component to final tag. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c', - 'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723') + "id", + "module-testmodule-master-20170219191323-c40c156c", + "perl-List-Compare", + "perl-List-Compare-0.53-5.module+0+d027b723", + ) module_build_service.scheduler.handlers.tags.tagged( config=conf, session=db.session, msg=msg) @@ -259,20 +297,21 @@ class TestTagTagged: # status later in poller. assert module_build.new_repo_task_id == 123456 - @patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", - return_value={'build': [], 'srpm-build': []}) + @patch( + "module_build_service.builder.GenericBuilder.default_buildroot_groups", + return_value={"build": [], "srpm-build": []}, + ) @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session") @patch("module_build_service.builder.GenericBuilder.create_from_module") - def test_newrepo_multiple_batches_tagged( - self, create_builder, koji_get_session, dbg): + def test_newrepo_multiple_batches_tagged(self, create_builder, koji_get_session, dbg): """ Test that newRepo is called just once and only when all components are tagged even if we tag components from the multiple batches in the same time. """ koji_session = mock.MagicMock() - koji_session.getTag = lambda tag_name: {'name': tag_name} - koji_session.getTaskInfo.return_value = {'state': koji.TASK_STATES['CLOSED']} + koji_session.getTag = lambda tag_name: {"name": tag_name} + koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["CLOSED"]} koji_session.newRepo.return_value = 123456 koji_get_session.return_value = koji_session @@ -280,33 +319,40 @@ class TestTagTagged: builder.koji_session = koji_session builder.buildroot_ready.return_value = False builder.module_build_tag = { - "name": "module-testmodule-master-20170219191323-c40c156c-build"} + "name": "module-testmodule-master-20170219191323-c40c156c-build" + } create_builder.return_value = builder module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one() module_build.batch = 2 mbm = module_build_service.models.ComponentBuild.query.filter_by( - module_id=3, package='module-build-macros').one() + module_id=3, package="module-build-macros").one() mbm.tagged = False db.session.add(mbm) for c in module_build.current_batch(): - if c.package == 'perl-Tangerine': - c.nvr = 'perl-Tangerine-0.23-1.module+0+d027b723' - elif c.package == 'perl-List-Compare': - c.nvr = 'perl-List-Compare-0.53-5.module+0+d027b723' + if c.package == "perl-Tangerine": + c.nvr = "perl-Tangerine-0.23-1.module+0+d027b723" + elif c.package == "perl-List-Compare": + c.nvr = "perl-List-Compare-0.53-5.module+0+d027b723" c.state = koji.BUILD_STATES["COMPLETE"] db.session.commit() # Tag the first component to the buildroot. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c-build', - 'perl-Tangerine', 'perl-Tangerine-0.23-1.module+0+d027b723') + "id", + "module-testmodule-master-20170219191323-c40c156c-build", + "perl-Tangerine", + "perl-Tangerine-0.23-1.module+0+d027b723", + ) module_build_service.scheduler.handlers.tags.tagged( config=conf, session=db.session, msg=msg) # Tag the first component to the final tag. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c', - 'perl-Tangerine', 'perl-Tangerine-0.23-1.module+0+d027b723') + "id", + "module-testmodule-master-20170219191323-c40c156c", + "perl-Tangerine", + "perl-Tangerine-0.23-1.module+0+d027b723", + ) module_build_service.scheduler.handlers.tags.tagged( config=conf, session=db.session, msg=msg) @@ -316,14 +362,20 @@ class TestTagTagged: # Tag the second component to the buildroot. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c-build', - 'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723') + "id", + "module-testmodule-master-20170219191323-c40c156c-build", + "perl-List-Compare", + "perl-List-Compare-0.53-5.module+0+d027b723", + ) module_build_service.scheduler.handlers.tags.tagged( config=conf, session=db.session, msg=msg) # Tag the second component to final tag. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c', - 'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723') + "id", + "module-testmodule-master-20170219191323-c40c156c", + "perl-List-Compare", + "perl-List-Compare-0.53-5.module+0+d027b723", + ) module_build_service.scheduler.handlers.tags.tagged( config=conf, session=db.session, msg=msg) @@ -333,14 +385,20 @@ class TestTagTagged: # Tag the component from first batch to final tag. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c', - 'module-build-macros', 'module-build-macros-0.1-1.module+0+b0a1d1f7') + "id", + "module-testmodule-master-20170219191323-c40c156c", + "module-build-macros", + "module-build-macros-0.1-1.module+0+b0a1d1f7", + ) module_build_service.scheduler.handlers.tags.tagged( config=conf, session=db.session, msg=msg) # Tag the component from first batch to the buildroot. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c-build', - 'module-build-macros', 'module-build-macros-0.1-1.module+0+b0a1d1f7') + "id", + "module-testmodule-master-20170219191323-c40c156c-build", + "module-build-macros", + "module-build-macros-0.1-1.module+0+b0a1d1f7", + ) module_build_service.scheduler.handlers.tags.tagged( config=conf, session=db.session, msg=msg) @@ -356,18 +414,19 @@ class TestTagTagged: # status later in poller. assert module_build.new_repo_task_id == 123456 - @patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", - return_value={'build': [], 'srpm-build': []}) + @patch( + "module_build_service.builder.GenericBuilder.default_buildroot_groups", + return_value={"build": [], "srpm-build": []}, + ) @patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session") @patch("module_build_service.builder.GenericBuilder.create_from_module") - def test_newrepo_build_time_only( - self, create_builder, koji_get_session, dbg): + def test_newrepo_build_time_only(self, create_builder, koji_get_session, dbg): """ Test the component.build_time_only is respected in tag handler. """ koji_session = mock.MagicMock() - koji_session.getTag = lambda tag_name: {'name': tag_name} - koji_session.getTaskInfo.return_value = {'state': koji.TASK_STATES['CLOSED']} + koji_session.getTag = lambda tag_name: {"name": tag_name} + koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["CLOSED"]} koji_session.newRepo.return_value = 123456 koji_get_session.return_value = koji_session @@ -375,7 +434,8 @@ class TestTagTagged: builder.koji_session = koji_session builder.buildroot_ready.return_value = False builder.module_build_tag = { - "name": "module-testmodule-master-20170219191323-c40c156c-build"} + "name": "module-testmodule-master-20170219191323-c40c156c-build" + } create_builder.return_value = builder module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one() @@ -383,43 +443,52 @@ class TestTagTagged: # Set previous components as COMPLETE and tagged. module_build.batch = 1 for c in module_build.up_to_current_batch(): - if c.package == 'module-build-macros': - c.nvr = 'module-build-macros-0.1-1.module+0+b0a1d1f7' + if c.package == "module-build-macros": + c.nvr = "module-build-macros-0.1-1.module+0+b0a1d1f7" c.state = koji.BUILD_STATES["COMPLETE"] c.tagged = True c.tagged_in_final = True module_build.batch = 2 - component = module_build_service.models.ComponentBuild.query\ - .filter_by(package='perl-Tangerine', module_id=module_build.id).one() + component = module_build_service.models.ComponentBuild.query.filter_by( + package="perl-Tangerine", module_id=module_build.id).one() component.state = koji.BUILD_STATES["COMPLETE"] component.build_time_only = True component.tagged = False component.tagged_in_final = False - component.nvr = 'perl-Tangerine-0.23-1.module+0+d027b723' - component = module_build_service.models.ComponentBuild.query\ - .filter_by(package='perl-List-Compare', module_id=module_build.id).one() + component.nvr = "perl-Tangerine-0.23-1.module+0+d027b723" + component = module_build_service.models.ComponentBuild.query.filter_by( + package="perl-List-Compare", module_id=module_build.id).one() component.state = koji.BUILD_STATES["COMPLETE"] - component.nvr = 'perl-List-Compare-0.53-5.module+0+d027b723' + component.nvr = "perl-List-Compare-0.53-5.module+0+d027b723" db.session.commit() # Tag the perl-Tangerine component to the buildroot. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c-build', - 'perl-Tangerine', 'perl-Tangerine-0.23-1.module+0+d027b723') + "id", + "module-testmodule-master-20170219191323-c40c156c-build", + "perl-Tangerine", + "perl-Tangerine-0.23-1.module+0+d027b723", + ) module_build_service.scheduler.handlers.tags.tagged( config=conf, session=db.session, msg=msg) assert not koji_session.newRepo.called # Tag the perl-List-Compare component to the buildroot. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c-build', - 'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723') + "id", + "module-testmodule-master-20170219191323-c40c156c-build", + "perl-List-Compare", + "perl-List-Compare-0.53-5.module+0+d027b723", + ) module_build_service.scheduler.handlers.tags.tagged( config=conf, session=db.session, msg=msg) # Tag the perl-List-Compare component to final tag. msg = module_build_service.messaging.KojiTagChange( - 'id', 'module-testmodule-master-20170219191323-c40c156c', - 'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723') + "id", + "module-testmodule-master-20170219191323-c40c156c", + "perl-List-Compare", + "perl-List-Compare-0.53-5.module+0+d027b723", + ) module_build_service.scheduler.handlers.tags.tagged( config=conf, session=db.session, msg=msg) diff --git a/tests/test_scm.py b/tests/test_scm.py index 8fd83f1a..a3f1bd80 100644 --- a/tests/test_scm.py +++ b/tests/test_scm.py @@ -29,15 +29,14 @@ import pytest import module_build_service.scm from module_build_service.errors import ValidationError, UnprocessableEntity -base_dir = os.path.join(os.path.dirname(__file__), 'scm_data') -repo_url = 'file://' + base_dir + '/testrepo' +base_dir = os.path.join(os.path.dirname(__file__), "scm_data") +repo_url = "file://" + base_dir + "/testrepo" class TestSCMModule: - def setup_method(self, test_method): self.tempdir = tempfile.mkdtemp() - self.repodir = self.tempdir + '/testrepo' + self.repodir = self.tempdir + "/testrepo" def teardown_method(self, test_method): if os.path.exists(self.tempdir): @@ -48,20 +47,20 @@ class TestSCMModule: scm = module_build_service.scm.SCM(repo_url) scm.checkout(self.tempdir) files = os.listdir(self.repodir) - assert 'foo' in files, "foo not in %r" % files + assert "foo" in files, "foo not in %r" % files def test_local_get_latest_is_sane(self): """ See that a hash is returned by scm.get_latest. """ scm = module_build_service.scm.SCM(repo_url) - latest = scm.get_latest('master') - target = '5481faa232d66589e660cc301179867fb00842c9' + latest = scm.get_latest("master") + target = "5481faa232d66589e660cc301179867fb00842c9" assert latest == target, "%r != %r" % (latest, target) def test_local_get_latest_commit_hash_is_sane(self): """ See that a hash is returned by scm.get_latest. """ scm = module_build_service.scm.SCM(repo_url) - latest = scm.get_latest('5481f') - target = '5481faa232d66589e660cc301179867fb00842c9' + latest = scm.get_latest("5481f") + target = "5481faa232d66589e660cc301179867fb00842c9" assert latest == target, "%r != %r" % (latest, target) def test_local_get_latest_unclean_input(self): @@ -70,22 +69,22 @@ class TestSCMModule: https://pagure.io/fm-orchestrator/issue/329 """ scm = module_build_service.scm.SCM(repo_url) - assert scm.scheme == 'git', scm.scheme - fname = tempfile.mktemp(suffix='mbs-scm-test') + assert scm.scheme == "git", scm.scheme + fname = tempfile.mktemp(suffix="mbs-scm-test") try: - scm.get_latest('master; touch %s' % fname) + scm.get_latest("master; touch %s" % fname) except UnprocessableEntity: assert not os.path.exists(fname), "%r exists! Vulnerable." % fname def test_local_extract_name(self): scm = module_build_service.scm.SCM(repo_url) - target = 'testrepo' - assert scm.name == target, '%r != %r' % (scm.name, target) + target = "testrepo" + assert scm.name == target, "%r != %r" % (scm.name, target) def test_local_extract_name_trailing_slash(self): - scm = module_build_service.scm.SCM(repo_url + '/') - target = 'testrepo' - assert scm.name == target, '%r != %r' % (scm.name, target) + scm = module_build_service.scm.SCM(repo_url + "/") + target = "testrepo" + assert scm.name == target, "%r != %r" % (scm.name, target) def test_verify(self): scm = module_build_service.scm.SCM(repo_url) @@ -97,20 +96,20 @@ class TestSCMModule: module_build_service.scm.SCM(repo_url, "unknown") def test_verify_commit_in_branch(self): - target = '7035bd33614972ac66559ac1fdd019ff6027ad21' + target = "7035bd33614972ac66559ac1fdd019ff6027ad21" scm = module_build_service.scm.SCM(repo_url + "?#" + target, "dev") scm.checkout(self.tempdir) scm.verify() def test_verify_commit_not_in_branch(self): - target = '7035bd33614972ac66559ac1fdd019ff6027ad21' + target = "7035bd33614972ac66559ac1fdd019ff6027ad21" scm = module_build_service.scm.SCM(repo_url + "?#" + target, "master") scm.checkout(self.tempdir) with pytest.raises(ValidationError): scm.verify() def test_verify_unknown_hash(self): - target = '7035bd33614972ac66559ac1fdd019ff6027ad22' + target = "7035bd33614972ac66559ac1fdd019ff6027ad22" scm = module_build_service.scm.SCM(repo_url + "?#" + target, "master") with pytest.raises(UnprocessableEntity): scm.checkout(self.tempdir) @@ -125,7 +124,7 @@ class TestSCMModule: def test_get_latest_incorrect_component_branch(self): scm = module_build_service.scm.SCM(repo_url) with pytest.raises(UnprocessableEntity): - scm.get_latest('foobar') + scm.get_latest("foobar") def test_get_latest_component_branch(self): ref = "5481faa232d66589e660cc301179867fb00842c9" @@ -143,4 +142,4 @@ class TestSCMModule: def test_get_latest_incorrect_component_ref(self): scm = module_build_service.scm.SCM(repo_url) with pytest.raises(UnprocessableEntity): - scm.get_latest('15481faa232d66589e660cc301179867fb00842c9') + scm.get_latest("15481faa232d66589e660cc301179867fb00842c9") diff --git a/tests/test_utils/test_ursine.py b/tests/test_utils/test_ursine.py index 2eff7541..5b3aa16c 100644 --- a/tests/test_utils/test_ursine.py +++ b/tests/test_utils/test_ursine.py @@ -30,28 +30,26 @@ from tests import make_module, clean_database class TestFindModuleKojiTags: """Test ursine.find_module_koji_tags""" - @patch.object(conf, 'koji_tag_prefixes', new=['module']) + @patch.object(conf, "koji_tag_prefixes", new=["module"]) def test_find_out_all_module_koji_tags(self): session = Mock() session.getFullInheritance.return_value = [ - {'name': 'module-tag1-s-v-c'}, - {'name': 'module-tag2-s-v-c'}, - {'name': 'tag-1'}, + {"name": "module-tag1-s-v-c"}, + {"name": "module-tag2-s-v-c"}, + {"name": "tag-1"}, ] - expected_tags = ['module-tag1-s-v-c', 'module-tag2-s-v-c'] + expected_tags = ["module-tag1-s-v-c", "module-tag2-s-v-c"] - tags = ursine.find_module_koji_tags(session, 'tag-a-build') + tags = ursine.find_module_koji_tags(session, "tag-a-build") assert expected_tags == tags - @patch.object(conf, 'koji_tag_prefixes', new=['module']) + @patch.object(conf, "koji_tag_prefixes", new=["module"]) def test_return_empty_if_no_module_koji_tags(self): session = Mock() - session.getFullInheritance.return_value = [ - {'name': 'tag-1'}, {'name': 'tag-2'}, - ] + session.getFullInheritance.return_value = [{"name": "tag-1"}, {"name": "tag-2"}] - tags = ursine.find_module_koji_tags(session, 'tag-a-build') + tags = ursine.find_module_koji_tags(session, "tag-a-build") assert [] == tags @@ -60,56 +58,68 @@ class TestFindUrsineRootTags: def setup_method(self): self.koji_session = Mock() - self.koji_session.getTag.side_effect = lambda name: \ - None if name == 'X-build' else {'name': name} + self.koji_session.getTag.side_effect = \ + lambda name: None if name == "X-build" else {"name": name} def test_find_build_tags(self): - with patch.object(conf, 'koji_external_repo_url_prefix', - new='http://example.com/brewroot/'): - tags = ursine.find_build_tags_from_external_repos(self.koji_session, [ - { - 'external_repo_name': 'tag-1-external-repo', - 'url': 'http://example.com/brewroot/repos/tag-1-build/latest/$arch/' - }, - { - 'external_repo_name': 'tag-2-external-repo', - 'url': 'http://example.com/brewroot/repos/tag-2-build/latest/$arch/' - }, - ]) + with patch.object( + conf, "koji_external_repo_url_prefix", new="http://example.com/brewroot/" + ): + tags = ursine.find_build_tags_from_external_repos( + self.koji_session, + [ + { + "external_repo_name": "tag-1-external-repo", + "url": "http://example.com/brewroot/repos/tag-1-build/latest/$arch/", + }, + { + "external_repo_name": "tag-2-external-repo", + "url": "http://example.com/brewroot/repos/tag-2-build/latest/$arch/", + }, + ], + ) - assert ['tag-1-build', 'tag-2-build'] == tags + assert ["tag-1-build", "tag-2-build"] == tags def test_return_emtpy_if_no_match_external_repo_url(self): - with patch.object(conf, 'koji_external_repo_url_prefix', - new='http://example.com/brewroot/'): - tags = ursine.find_build_tags_from_external_repos(self.koji_session, [ - { - 'external_repo_name': 'tag-1-external-repo', - 'url': 'https://another-site.org/repos/tag-1-build/latest/$arch/' - }, - { - 'external_repo_name': 'tag-2-external-repo', - 'url': 'https://another-site.org/repos/tag-2-build/latest/$arch/' - }, - ]) + with patch.object( + conf, "koji_external_repo_url_prefix", new="http://example.com/brewroot/" + ): + tags = ursine.find_build_tags_from_external_repos( + self.koji_session, + [ + { + "external_repo_name": "tag-1-external-repo", + "url": "https://another-site.org/repos/tag-1-build/latest/$arch/", + }, + { + "external_repo_name": "tag-2-external-repo", + "url": "https://another-site.org/repos/tag-2-build/latest/$arch/", + }, + ], + ) assert [] == tags def test_some_tag_is_not_koji_tag(self): - with patch.object(conf, 'koji_external_repo_url_prefix', - new='http://example.com/brewroot/'): - tags = ursine.find_build_tags_from_external_repos(self.koji_session, [ - { - 'external_repo_name': 'tag-1-external-repo', - 'url': 'http://example.com/brewroot/repos/tag-1-build/latest/$arch/' - }, - { - 'external_repo_name': 'tag-2-external-repo', - 'url': 'http://example.com/brewroot/repos/X-build/latest/$arch/' - }, - ]) + with patch.object( + conf, "koji_external_repo_url_prefix", new="http://example.com/brewroot/" + ): + tags = ursine.find_build_tags_from_external_repos( + self.koji_session, + [ + { + "external_repo_name": "tag-1-external-repo", + "url": "http://example.com/brewroot/repos/tag-1-build/latest/$arch/", + }, + { + "external_repo_name": "tag-2-external-repo", + "url": "http://example.com/brewroot/repos/X-build/latest/$arch/", + }, + ], + ) - assert ['tag-1-build'] == tags + assert ["tag-1-build"] == tags class TestGetModulemdsFromUrsineContent: @@ -121,71 +131,63 @@ class TestGetModulemdsFromUrsineContent: def teardown_method(self, test_method): clean_database() - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_return_empty_if_no_ursine_build_tag_is_found(self, ClientSession): session = ClientSession.return_value # No module koji_tag in ursine content yet. This will result in empty # ursine modulemds is returned. - session.getFullInheritance.return_value = [ - {'name': 'tag-1.0-build'}, - ] - session.getExternalRepoList.return_value = [ - { - 'external_repo_name': 'tag-1.0-external-repo', - 'url': 'http://example.com/repos/tag-4-build/latest/$arch/' - } - ] + session.getFullInheritance.return_value = [{"name": "tag-1.0-build"}] + session.getExternalRepoList.return_value = [{ + "external_repo_name": "tag-1.0-external-repo", + "url": "http://example.com/repos/tag-4-build/latest/$arch/", + }] - modulemds = ursine.get_modulemds_from_ursine_content('tag') + modulemds = ursine.get_modulemds_from_ursine_content("tag") assert [] == modulemds - @patch.object(conf, 'koji_tag_prefixes', new=['module']) - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') + @patch.object(conf, "koji_tag_prefixes", new=["module"]) + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") def test_get_modulemds(self, ClientSession): session = ClientSession.return_value # Ensure to to get build tag for further query of ursine content. # For this test, the build tag is tag-4-build - session.getExternalRepoList.return_value = [ - { - 'external_repo_name': 'tag-1.0-external-repo', - 'url': 'http://example.com/repos/tag-4-build/latest/$arch/' - } - ] + session.getExternalRepoList.return_value = [{ + "external_repo_name": "tag-1.0-external-repo", + "url": "http://example.com/repos/tag-4-build/latest/$arch/", + }] # Ensure to return module tags from ursine content of fake build tag # specified in above external repo's url. def mock_getFullInheritance(tag): - if tag == 'tag-4-build': + if tag == "tag-4-build": return [ - {'name': 'tag-1.0-build'}, + {"name": "tag-1.0-build"}, # Below two modules should be returned and whose modulemd # should be also queried from database. - {'name': 'module-name1-s-2020-c'}, - {'name': 'module-name2-s-2021-c'}, + {"name": "module-name1-s-2020-c"}, + {"name": "module-name2-s-2021-c"}, ] - raise ValueError('{} is not handled by test.'.format(tag)) + raise ValueError("{} is not handled by test.".format(tag)) + session.getFullInheritance.side_effect = mock_getFullInheritance # Defaults to DB resolver, so create fake module builds and store them # into database to ensure they can be queried. mmd_name1s2020c = make_module( - 'name1:s:2020:c', - xmd={'mbs': {'koji_tag': 'module-name1-s-2020-c'}}) + "name1:s:2020:c", xmd={"mbs": {"koji_tag": "module-name1-s-2020-c"}}) mmd_name2s2021c = make_module( - 'name2:s:2021:c', - xmd={'mbs': {'koji_tag': 'module-name2-s-2021-c'}}) + "name2:s:2021:c", xmd={"mbs": {"koji_tag": "module-name2-s-2021-c"}}) - koji_tag = 'tag' # It's ok to use arbitrary tag name. - with patch.object(conf, 'koji_external_repo_url_prefix', new='http://example.com/'): + koji_tag = "tag" # It's ok to use arbitrary tag name. + with patch.object(conf, "koji_external_repo_url_prefix", new="http://example.com/"): modulemds = ursine.get_modulemds_from_ursine_content(koji_tag) test_nsvcs = [item.dup_nsvc() for item in modulemds] test_nsvcs.sort() - expected_nsvcs = [mmd_name1s2020c.mmd().dup_nsvc(), - mmd_name2s2021c.mmd().dup_nsvc()] + expected_nsvcs = [mmd_name1s2020c.mmd().dup_nsvc(), mmd_name2s2021c.mmd().dup_nsvc()] expected_nsvcs.sort() session.getExternalRepoList.assert_called_once_with(koji_tag) @@ -195,92 +197,85 @@ class TestGetModulemdsFromUrsineContent: class TestRecordStreamCollisionModules: """Test ursine.record_stream_collision_modules""" - @patch.object(conf, 'base_module_names', new=['platform']) - @patch.object(ursine, 'find_stream_collision_modules') + @patch.object(conf, "base_module_names", new=["platform"]) + @patch.object(ursine, "find_stream_collision_modules") def test_nothing_changed_if_no_base_module_is_in_buildrequires( - self, find_stream_collision_modules): - xmd = { - 'mbs': { - 'buildrequires': { - 'modulea': {'stream': 'master'} - } - } - } - fake_mmd = make_module('name1:s:2020:c', xmd=xmd, store_to_db=False) + self, find_stream_collision_modules + ): + xmd = {"mbs": {"buildrequires": {"modulea": {"stream": "master"}}}} + fake_mmd = make_module("name1:s:2020:c", xmd=xmd, store_to_db=False) original_xmd = glib.from_variant_dict(fake_mmd.get_xmd()) - with patch.object(ursine, 'log') as log: + with patch.object(ursine, "log") as log: ursine.handle_stream_collision_modules(fake_mmd) assert 2 == log.info.call_count find_stream_collision_modules.assert_not_called() assert original_xmd == glib.from_variant_dict(fake_mmd.get_xmd()) - @patch.object(conf, 'base_module_names', new=['platform']) - @patch('module_build_service.utils.ursine.get_modulemds_from_ursine_content') + @patch.object(conf, "base_module_names", new=["platform"]) + @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content") def test_mark_handled_even_if_no_modules_in_ursine_content( - self, get_modulemds_from_ursine_content): + self, get_modulemds_from_ursine_content + ): xmd = { - 'mbs': { - 'buildrequires': { - 'modulea': {'stream': 'master'}, - 'platform': {'stream': 'master', 'koji_tag': 'module-rhel-8.0-build'}, + "mbs": { + "buildrequires": { + "modulea": {"stream": "master"}, + "platform": {"stream": "master", "koji_tag": "module-rhel-8.0-build"}, } } } - fake_mmd = make_module('name1:s:2020:c', xmd=xmd, store_to_db=False) + fake_mmd = make_module("name1:s:2020:c", xmd=xmd, store_to_db=False) original_xmd = glib.from_variant_dict(fake_mmd.get_xmd()) get_modulemds_from_ursine_content.return_value = [] - with patch.object(ursine, 'log') as log: + with patch.object(ursine, "log") as log: ursine.handle_stream_collision_modules(fake_mmd) assert 2 == log.info.call_count expected_xmd = copy.deepcopy(original_xmd) # Ensure stream_collision_modules is set. - expected_xmd['mbs']['buildrequires']['platform']['stream_collision_modules'] = '' - expected_xmd['mbs']['buildrequires']['platform']['ursine_rpms'] = '' + expected_xmd["mbs"]["buildrequires"]["platform"]["stream_collision_modules"] = "" + expected_xmd["mbs"]["buildrequires"]["platform"]["ursine_rpms"] = "" assert expected_xmd == glib.from_variant_dict(fake_mmd.get_xmd()) - @patch.object(conf, 'base_module_names', new=['platform', 'project-platform']) - @patch('module_build_service.utils.ursine.get_modulemds_from_ursine_content') - @patch('module_build_service.resolver.GenericResolver.create') - @patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession') - def test_add_collision_modules(self, ClientSession, resolver_create, - get_modulemds_from_ursine_content): + @patch.object(conf, "base_module_names", new=["platform", "project-platform"]) + @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content") + @patch("module_build_service.resolver.GenericResolver.create") + @patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession") + def test_add_collision_modules( + self, ClientSession, resolver_create, get_modulemds_from_ursine_content + ): xmd = { - 'mbs': { - 'buildrequires': { - 'modulea': {'stream': 'master'}, - 'foo': {'stream': '1'}, - 'bar': {'stream': '2'}, - 'platform': {'stream': 'master', 'koji_tag': 'module-rhel-8.0-build'}, - 'project-platform': { - 'stream': 'master', 'koji_tag': 'module-project-1.0-build' + "mbs": { + "buildrequires": { + "modulea": {"stream": "master"}, + "foo": {"stream": "1"}, + "bar": {"stream": "2"}, + "platform": {"stream": "master", "koji_tag": "module-rhel-8.0-build"}, + "project-platform": { + "stream": "master", + "koji_tag": "module-project-1.0-build", }, } } } - fake_mmd = make_module('name1:s:2020:c', - xmd=xmd, store_to_db=False) + fake_mmd = make_module("name1:s:2020:c", xmd=xmd, store_to_db=False) def mock_get_ursine_modulemds(koji_tag): - if koji_tag == 'module-rhel-8.0-build': + if koji_tag == "module-rhel-8.0-build": return [ # This is the one - make_module('modulea:10:20180813041838:5ea3b708', - store_to_db=False), - make_module('moduleb:1.0:20180113042038:6ea3b105', - store_to_db=False), + make_module("modulea:10:20180813041838:5ea3b708", store_to_db=False), + make_module("moduleb:1.0:20180113042038:6ea3b105", store_to_db=False), ] - if koji_tag == 'module-project-1.0-build': + if koji_tag == "module-project-1.0-build": return [ # Both of them are the collided modules - make_module('bar:6:20181013041838:817fa3a8', - store_to_db=False), - make_module('foo:2:20180113041838:95f078a1', - store_to_db=False), + make_module("bar:6:20181013041838:817fa3a8", store_to_db=False), + make_module("foo:2:20180113041838:95f078a1", store_to_db=False), ] get_modulemds_from_ursine_content.side_effect = mock_get_ursine_modulemds @@ -288,34 +283,31 @@ class TestRecordStreamCollisionModules: # Mock for finding out built rpms def mock_get_module(name, stream, version, context, strict=True): return { - 'modulea:10:20180813041838:5ea3b708': { - 'koji_tag': 'module-modulea-10-20180813041838-5ea3b708', + "modulea:10:20180813041838:5ea3b708": { + "koji_tag": "module-modulea-10-20180813041838-5ea3b708" }, - 'bar:6:20181013041838:817fa3a8': { - 'koji_tag': 'module-bar-6-20181013041838-817fa3a8', + "bar:6:20181013041838:817fa3a8": { + "koji_tag": "module-bar-6-20181013041838-817fa3a8" }, - 'foo:2:20180113041838:95f078a1': { - 'koji_tag': 'module-foo-2-20180113041838-95f078a1', + "foo:2:20180113041838:95f078a1": { + "koji_tag": "module-foo-2-20180113041838-95f078a1" }, - }['{}:{}:{}:{}'.format(name, stream, version, context)] + }["{}:{}:{}:{}".format(name, stream, version, context)] resolver = resolver_create.return_value resolver._get_module.side_effect = mock_get_module def mock_listTaggedRPMS(tag, latest): return { - 'module-modulea-10-20180813041838-5ea3b708': [[ - {'name': 'pkg1', 'version': '1.0', 'release': '1.fc28', - 'epoch': None}, - ]], - 'module-bar-6-20181013041838-817fa3a8': [[ - {'name': 'pkg2', 'version': '2.0', 'release': '1.fc28', - 'epoch': None}, - ]], - 'module-foo-2-20180113041838-95f078a1': [[ - {'name': 'pkg3', 'version': '3.0', 'release': '1.fc28', - 'epoch': None}, - ]], + "module-modulea-10-20180813041838-5ea3b708": [ + [{"name": "pkg1", "version": "1.0", "release": "1.fc28", "epoch": None}] + ], + "module-bar-6-20181013041838-817fa3a8": [ + [{"name": "pkg2", "version": "2.0", "release": "1.fc28", "epoch": None}] + ], + "module-foo-2-20180113041838-95f078a1": [ + [{"name": "pkg3", "version": "3.0", "release": "1.fc28", "epoch": None}] + ], }[tag] koji_session = ClientSession.return_value @@ -324,59 +316,47 @@ class TestRecordStreamCollisionModules: ursine.handle_stream_collision_modules(fake_mmd) xmd = glib.from_variant_dict(fake_mmd.get_xmd()) - buildrequires = xmd['mbs']['buildrequires'] + buildrequires = xmd["mbs"]["buildrequires"] - assert (['modulea:10:20180813041838:5ea3b708'] == - buildrequires['platform']['stream_collision_modules']) - assert (['pkg1-0:1.0-1.fc28'] == - buildrequires['platform']['ursine_rpms']) + modules = buildrequires["platform"]["stream_collision_modules"] + assert ["modulea:10:20180813041838:5ea3b708"] == modules + assert ["pkg1-0:1.0-1.fc28"] == buildrequires["platform"]["ursine_rpms"] - modules = sorted( - buildrequires['project-platform']['stream_collision_modules']) - expected_modules = ['bar:6:20181013041838:817fa3a8', - 'foo:2:20180113041838:95f078a1'] + modules = sorted(buildrequires["project-platform"]["stream_collision_modules"]) + expected_modules = ["bar:6:20181013041838:817fa3a8", "foo:2:20180113041838:95f078a1"] assert expected_modules == modules - assert (['pkg2-0:2.0-1.fc28', 'pkg3-0:3.0-1.fc28'] == - sorted(buildrequires['project-platform']['ursine_rpms'])) + rpms = sorted(buildrequires["project-platform"]["ursine_rpms"]) + assert ["pkg2-0:2.0-1.fc28", "pkg3-0:3.0-1.fc28"] == rpms class TestFindStreamCollisionModules: """Test ursine.find_stream_collision_modules""" - @patch('module_build_service.utils.ursine.get_modulemds_from_ursine_content') - def test_no_modulemds_found_from_ursine_content( - self, get_modulemds_from_ursine_content): + @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content") + def test_no_modulemds_found_from_ursine_content(self, get_modulemds_from_ursine_content): get_modulemds_from_ursine_content.return_value = [] - assert not ursine.find_stream_collision_modules({}, 'koji_tag') + assert not ursine.find_stream_collision_modules({}, "koji_tag") - @patch('module_build_service.utils.ursine.get_modulemds_from_ursine_content') + @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content") def test_no_collisions_found(self, get_modulemds_from_ursine_content): - xmd_mbs_buildrequires = { - 'modulea': {'stream': 'master'}, - 'moduleb': {'stream': '10'}, - } + xmd_mbs_buildrequires = {"modulea": {"stream": "master"}, "moduleb": {"stream": "10"}} get_modulemds_from_ursine_content.return_value = [ - make_module('moduler:1:1:c1', store_to_db=False), - make_module('modules:2:1:c2', store_to_db=False), - make_module('modulet:3:1:c3', store_to_db=False), + make_module("moduler:1:1:c1", store_to_db=False), + make_module("modules:2:1:c2", store_to_db=False), + make_module("modulet:3:1:c3", store_to_db=False), ] - assert [] == ursine.find_stream_collision_modules( - xmd_mbs_buildrequires, 'koji_tag') + assert [] == ursine.find_stream_collision_modules(xmd_mbs_buildrequires, "koji_tag") - @patch('module_build_service.utils.ursine.get_modulemds_from_ursine_content') + @patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content") def test_collision_modules_are_found(self, get_modulemds_from_ursine_content): - xmd_mbs_buildrequires = { - 'modulea': {'stream': 'master'}, - 'moduleb': {'stream': '10'}, - } + xmd_mbs_buildrequires = {"modulea": {"stream": "master"}, "moduleb": {"stream": "10"}} fake_modules = [ - make_module('moduler:1:1:c1', store_to_db=False), - make_module('moduleb:6:1:c2', store_to_db=False), - make_module('modulet:3:1:c3', store_to_db=False), + make_module("moduler:1:1:c1", store_to_db=False), + make_module("moduleb:6:1:c2", store_to_db=False), + make_module("modulet:3:1:c3", store_to_db=False), ] get_modulemds_from_ursine_content.return_value = fake_modules - assert [fake_modules[1].dup_nsvc()] == \ - ursine.find_stream_collision_modules( - xmd_mbs_buildrequires, 'koji_tag') + modules = ursine.find_stream_collision_modules(xmd_mbs_buildrequires, "koji_tag") + assert [fake_modules[1].dup_nsvc()] == modules diff --git a/tests/test_utils/test_utils.py b/tests/test_utils/test_utils.py index 3fb04b5c..f7919ffa 100644 --- a/tests/test_utils/test_utils.py +++ b/tests/test_utils/test_utils.py @@ -31,8 +31,14 @@ import module_build_service.scm from module_build_service import models, conf from module_build_service.errors import ProgrammingError, ValidationError, UnprocessableEntity from tests import ( - reuse_component_init_data, db, reuse_shared_userspace_init_data, clean_database, init_data, - scheduler_init_data, make_module) + reuse_component_init_data, + db, + reuse_shared_userspace_init_data, + clean_database, + init_data, + scheduler_init_data, + make_module, +) import mock import koji import pytest @@ -55,7 +61,7 @@ class FakeSCM(object): self.mocked_scm.return_value.checkout = self.checkout self.mocked_scm.return_value.name = self.name - self.mocked_scm.return_value.branch = 'master' + self.mocked_scm.return_value.branch = "master" self.mocked_scm.return_value.get_latest = self.get_latest self.mocked_scm.return_value.commit = self.commit self.mocked_scm.return_value.repository_root = "https://src.stg.fedoraproject.org/modules/" @@ -66,12 +72,12 @@ class FakeSCM(object): self.sourcedir = path.join(temp_dir, self.name) mkdir(self.sourcedir) base_dir = path.abspath(path.dirname(__file__)) - copyfile(path.join(base_dir, '..', 'staged_data', self.mmd_filename), - self.get_module_yaml()) + copyfile( + path.join(base_dir, "..", "staged_data", self.mmd_filename), self.get_module_yaml()) return self.sourcedir - def get_latest(self, ref='master'): + def get_latest(self, ref="master"): return self.commit if self.commit else ref def get_module_yaml(self): @@ -79,119 +85,118 @@ class FakeSCM(object): class TestUtilsComponentReuse: - def setup_method(self, test_method): reuse_component_init_data() def teardown_method(self, test_method): clean_database() - @pytest.mark.parametrize('changed_component', [ - 'perl-List-Compare', 'perl-Tangerine', 'tangerine', None - ]) + @pytest.mark.parametrize( + "changed_component", ["perl-List-Compare", "perl-Tangerine", "tangerine", None] + ) def test_get_reusable_component_different_component(self, changed_component): second_module_build = models.ModuleBuild.query.filter_by(id=3).one() if changed_component: mmd = second_module_build.mmd() - mmd.get_rpm_components()['tangerine'].set_ref( - '00ea1da4192a2030f9ae023de3b3143ed647bbab') + mmd.get_rpm_components()["tangerine"].set_ref( + "00ea1da4192a2030f9ae023de3b3143ed647bbab" + ) second_module_build.modulemd = to_text_type(mmd.dumps()) second_module_changed_component = models.ComponentBuild.query.filter_by( package=changed_component, module_id=3).one() - second_module_changed_component.ref = '00ea1da4192a2030f9ae023de3b3143ed647bbab' + second_module_changed_component.ref = "00ea1da4192a2030f9ae023de3b3143ed647bbab" db.session.add(second_module_changed_component) db.session.commit() plc_rv = module_build_service.utils.get_reusable_component( - db.session, second_module_build, 'perl-List-Compare') + db.session, second_module_build, "perl-List-Compare") pt_rv = module_build_service.utils.get_reusable_component( - db.session, second_module_build, 'perl-Tangerine') + db.session, second_module_build, "perl-Tangerine") tangerine_rv = module_build_service.utils.get_reusable_component( - db.session, second_module_build, 'tangerine') + db.session, second_module_build, "tangerine") - if changed_component == 'perl-List-Compare': + if changed_component == "perl-List-Compare": # perl-Tangerine can be reused even though a component in its batch has changed assert plc_rv is None - assert pt_rv.package == 'perl-Tangerine' + assert pt_rv.package == "perl-Tangerine" assert tangerine_rv is None - elif changed_component == 'perl-Tangerine': + elif changed_component == "perl-Tangerine": # perl-List-Compare can be reused even though a component in its batch has changed - assert plc_rv.package == 'perl-List-Compare' + assert plc_rv.package == "perl-List-Compare" assert pt_rv is None assert tangerine_rv is None - elif changed_component == 'tangerine': + elif changed_component == "tangerine": # perl-List-Compare and perl-Tangerine can be reused since they are in an earlier # buildorder than tangerine - assert plc_rv.package == 'perl-List-Compare' - assert pt_rv.package == 'perl-Tangerine' + assert plc_rv.package == "perl-List-Compare" + assert pt_rv.package == "perl-Tangerine" assert tangerine_rv is None elif changed_component is None: # Nothing has changed so everthing can be used - assert plc_rv.package == 'perl-List-Compare' - assert pt_rv.package == 'perl-Tangerine' - assert tangerine_rv.package == 'tangerine' + assert plc_rv.package == "perl-List-Compare" + assert pt_rv.package == "perl-Tangerine" + assert tangerine_rv.package == "tangerine" def test_get_reusable_component_different_rpm_macros(self): second_module_build = models.ModuleBuild.query.filter_by(id=3).one() mmd = second_module_build.mmd() - mmd.set_rpm_buildopts({'macros': '%my_macro 1'}) + mmd.set_rpm_buildopts({"macros": "%my_macro 1"}) second_module_build.modulemd = to_text_type(mmd.dumps()) db.session.commit() plc_rv = module_build_service.utils.get_reusable_component( - db.session, second_module_build, 'perl-List-Compare') + db.session, second_module_build, "perl-List-Compare") assert plc_rv is None pt_rv = module_build_service.utils.get_reusable_component( - db.session, second_module_build, 'perl-Tangerine') + db.session, second_module_build, "perl-Tangerine") assert pt_rv is None - @pytest.mark.parametrize('set_current_arch', [True, False]) - @pytest.mark.parametrize('set_database_arch', [True, False]) + @pytest.mark.parametrize("set_current_arch", [True, False]) + @pytest.mark.parametrize("set_database_arch", [True, False]) def test_get_reusable_component_different_arches(self, set_database_arch, set_current_arch): second_module_build = models.ModuleBuild.query.filter_by(id=3).one() - if set_current_arch: # set architecture for current build + if set_current_arch: # set architecture for current build mmd = second_module_build.mmd() arches = Modulemd.SimpleSet() - arches.set(['i686']) - mmd.get_rpm_components()['tangerine'].set_arches(arches) + arches.set(["i686"]) + mmd.get_rpm_components()["tangerine"].set_arches(arches) second_module_build.modulemd = to_text_type(mmd.dumps()) - if set_database_arch: # set architecture for build in database + if set_database_arch: # set architecture for build in database second_module_changed_component = models.ComponentBuild.query.filter_by( - package='tangerine', module_id=2).one() + package="tangerine", module_id=2).one() mmd = second_module_changed_component.module_build.mmd() arches = Modulemd.SimpleSet() - arches.set(['i686']) - mmd.get_rpm_components()['tangerine'].set_arches(arches) + arches.set(["i686"]) + mmd.get_rpm_components()["tangerine"].set_arches(arches) second_module_changed_component.module_build.modulemd = to_text_type(mmd.dumps()) db.session.add(second_module_changed_component) db.session.commit() tangerine = module_build_service.utils.get_reusable_component( - db.session, second_module_build, 'tangerine') + db.session, second_module_build, "tangerine") assert bool(tangerine is None) != bool(set_current_arch == set_database_arch) - @pytest.mark.parametrize('rebuild_strategy', models.ModuleBuild.rebuild_strategies.keys()) + @pytest.mark.parametrize("rebuild_strategy", models.ModuleBuild.rebuild_strategies.keys()) def test_get_reusable_component_different_buildrequires_hash(self, rebuild_strategy): first_module_build = models.ModuleBuild.query.filter_by(id=2).one() first_module_build.rebuild_strategy = rebuild_strategy second_module_build = models.ModuleBuild.query.filter_by(id=3).one() mmd = second_module_build.mmd() xmd = glib.from_variant_dict(mmd.get_xmd()) - xmd['mbs']['buildrequires']['platform']['ref'] = \ - 'da39a3ee5e6b4b0d3255bfef95601890afd80709' + xmd["mbs"]["buildrequires"]["platform"]["ref"] = "da39a3ee5e6b4b0d3255bfef95601890afd80709" mmd.set_xmd(glib.dict_values(xmd)) second_module_build.modulemd = to_text_type(mmd.dumps()) - second_module_build.ref_build_context = '37c6c57bedf4305ef41249c1794760b5cb8fad17' + second_module_build.ref_build_context = "37c6c57bedf4305ef41249c1794760b5cb8fad17" second_module_build.rebuild_strategy = rebuild_strategy db.session.commit() plc_rv = module_build_service.utils.get_reusable_component( - db.session, second_module_build, 'perl-List-Compare') + db.session, second_module_build, "perl-List-Compare") pt_rv = module_build_service.utils.get_reusable_component( - db.session, second_module_build, 'perl-Tangerine') + db.session, second_module_build, "perl-Tangerine") tangerine_rv = module_build_service.utils.get_reusable_component( - db.session, second_module_build, 'tangerine') + db.session, second_module_build, "tangerine") if rebuild_strategy == "only-changed": assert plc_rv is not None @@ -202,26 +207,26 @@ class TestUtilsComponentReuse: assert pt_rv is None assert tangerine_rv is None - @pytest.mark.parametrize('rebuild_strategy', models.ModuleBuild.rebuild_strategies.keys()) + @pytest.mark.parametrize("rebuild_strategy", models.ModuleBuild.rebuild_strategies.keys()) def test_get_reusable_component_different_buildrequires_stream(self, rebuild_strategy): first_module_build = models.ModuleBuild.query.filter_by(id=2).one() first_module_build.rebuild_strategy = rebuild_strategy second_module_build = models.ModuleBuild.query.filter_by(id=3).one() mmd = second_module_build.mmd() xmd = glib.from_variant_dict(mmd.get_xmd()) - xmd['mbs']['buildrequires']['platform']['stream'] = 'different' + xmd["mbs"]["buildrequires"]["platform"]["stream"] = "different" mmd.set_xmd(glib.dict_values(xmd)) second_module_build.modulemd = to_text_type(mmd.dumps()) - second_module_build.build_context = '37c6c57bedf4305ef41249c1794760b5cb8fad17' + second_module_build.build_context = "37c6c57bedf4305ef41249c1794760b5cb8fad17" second_module_build.rebuild_strategy = rebuild_strategy db.session.commit() plc_rv = module_build_service.utils.get_reusable_component( - db.session, second_module_build, 'perl-List-Compare') + db.session, second_module_build, "perl-List-Compare") pt_rv = module_build_service.utils.get_reusable_component( - db.session, second_module_build, 'perl-Tangerine') + db.session, second_module_build, "perl-Tangerine") tangerine_rv = module_build_service.utils.get_reusable_component( - db.session, second_module_build, 'tangerine') + db.session, second_module_build, "tangerine") assert plc_rv is None assert pt_rv is None @@ -231,31 +236,31 @@ class TestUtilsComponentReuse: second_module_build = models.ModuleBuild.query.filter_by(id=3).one() mmd = second_module_build.mmd() br_list = Modulemd.SimpleSet() - br_list.add('master') - mmd.get_dependencies()[0].set_buildrequires({'some_module': br_list}) + br_list.add("master") + mmd.get_dependencies()[0].set_buildrequires({"some_module": br_list}) xmd = glib.from_variant_dict(mmd.get_xmd()) - xmd['mbs']['buildrequires'] = { - 'some_module': { - 'ref': 'da39a3ee5e6b4b0d3255bfef95601890afd80709', - 'stream': 'master', - 'version': '20170123140147' + xmd["mbs"]["buildrequires"] = { + "some_module": { + "ref": "da39a3ee5e6b4b0d3255bfef95601890afd80709", + "stream": "master", + "version": "20170123140147", } } mmd.set_xmd(glib.dict_values(xmd)) second_module_build.modulemd = to_text_type(mmd.dumps()) - second_module_build.ref_build_context = '37c6c57bedf4305ef41249c1794760b5cb8fad17' + second_module_build.ref_build_context = "37c6c57bedf4305ef41249c1794760b5cb8fad17" db.session.commit() plc_rv = module_build_service.utils.get_reusable_component( - db.session, second_module_build, 'perl-List-Compare') + db.session, second_module_build, "perl-List-Compare") assert plc_rv is None pt_rv = module_build_service.utils.get_reusable_component( - db.session, second_module_build, 'perl-Tangerine') + db.session, second_module_build, "perl-Tangerine") assert pt_rv is None tangerine_rv = module_build_service.utils.get_reusable_component( - db.session, second_module_build, 'tangerine') + db.session, second_module_build, "tangerine") assert tangerine_rv is None @patch("module_build_service.utils.submit.submit_module_build") @@ -276,7 +281,7 @@ class TestUtilsComponentReuse: username = "test" stream = "dev" - with io.open(modulemd_file_path, "w", encoding='utf-8') as fd: + with io.open(modulemd_file_path, "w", encoding="utf-8") as fd: fd.write(modulemd_yaml) with open(modulemd_file_path, "rb") as fd: @@ -287,27 +292,25 @@ class TestUtilsComponentReuse: username_arg = mock_submit_args[0] mmd_arg = mock_submit_args[1] assert mmd_arg.get_stream() == stream - assert "\n\n%__spec_check_pre exit 0\n" in mmd_arg.get_rpm_buildopts()['macros'] + assert "\n\n%__spec_check_pre exit 0\n" in mmd_arg.get_rpm_buildopts()["macros"] assert username_arg == username rmtree(module_dir) class TestUtils: - def setup_method(self, test_method): clean_database() def teardown_method(self, test_method): clean_database() - @pytest.mark.parametrize('context', ["c1", None]) + @pytest.mark.parametrize("context", ["c1", None]) def test_import_mmd_contexts(self, context): - mmd = load_mmd_file(path.join( - BASE_DIR, '..', 'staged_data', 'formatted_testmodule.yaml')) + mmd = load_mmd_file(path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml")) mmd.set_context(context) xmd = glib.from_variant_dict(mmd.get_xmd()) - xmd['mbs']['koji_tag'] = 'foo' + xmd["mbs"]["koji_tag"] = "foo" mmd.set_xmd(glib.dict_values(xmd)) build, msgs = module_build_service.utils.import_mmd(db.session, mmd) @@ -322,33 +325,34 @@ class TestUtils: def test_import_mmd_multiple_dependencies(self): mmd = Modulemd.Module().new_from_file( - path.join(BASE_DIR, '..', 'staged_data', 'formatted_testmodule.yaml')) + path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml")) mmd.upgrade() mmd.add_dependencies(mmd.get_dependencies()[0]) - expected_error = 'The imported module\'s dependencies list should contain just one element' + expected_error = "The imported module's dependencies list should contain just one element" with pytest.raises(UnprocessableEntity) as e: module_build_service.utils.import_mmd(db.session, mmd) assert str(e.value) == expected_error def test_import_mmd_no_xmd_buildrequires(self): mmd = Modulemd.Module().new_from_file( - path.join(BASE_DIR, '..', 'staged_data', 'formatted_testmodule.yaml')) + path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml")) mmd.upgrade() xmd = glib.from_variant_dict(mmd.get_xmd()) - del xmd['mbs']['buildrequires'] + del xmd["mbs"]["buildrequires"] mmd.set_xmd(glib.dict_values(xmd)) expected_error = ( - 'The imported module buildrequires other modules, but the metadata in the ' - 'xmd["mbs"]["buildrequires"] dictionary is missing entries') + "The imported module buildrequires other modules, but the metadata in the " + 'xmd["mbs"]["buildrequires"] dictionary is missing entries' + ) with pytest.raises(UnprocessableEntity) as e: module_build_service.utils.import_mmd(db.session, mmd) assert str(e.value) == expected_error def test_import_mmd_minimal_xmd_from_local_repository(self): mmd = Modulemd.Module().new_from_file( - path.join(BASE_DIR, '..', 'staged_data', 'formatted_testmodule.yaml')) + path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml")) mmd.upgrade() xmd = glib.from_variant_dict(mmd.get_xmd()) xmd["mbs"] = {} @@ -360,24 +364,27 @@ class TestUtils: build, msgs = module_build_service.utils.import_mmd(db.session, mmd, False) assert build.name == mmd.get_name() - @pytest.mark.parametrize('stream, disttag_marking, error_msg', ( - ('f28', None, None), - ('f28', 'fedora28', None), - ('f-28', 'f28', None), - ('f-28', None, 'The stream cannot contain a dash unless disttag_marking is set'), - ('f28', 'f-28', 'The disttag_marking cannot contain a dash'), - ('f-28', 'fedora-28', 'The disttag_marking cannot contain a dash') - )) + @pytest.mark.parametrize( + "stream, disttag_marking, error_msg", + ( + ("f28", None, None), + ("f28", "fedora28", None), + ("f-28", "f28", None), + ("f-28", None, "The stream cannot contain a dash unless disttag_marking is set"), + ("f28", "f-28", "The disttag_marking cannot contain a dash"), + ("f-28", "fedora-28", "The disttag_marking cannot contain a dash"), + ), + ) def test_import_mmd_base_module(self, stream, disttag_marking, error_msg): clean_database(add_platform_module=False) mmd = Modulemd.Module().new_from_file( - path.join(BASE_DIR, '..', 'staged_data', 'platform.yaml')) + path.join(BASE_DIR, "..", "staged_data", "platform.yaml")) mmd.upgrade() mmd.set_stream(stream) if disttag_marking: xmd = glib.from_variant_dict(mmd.get_xmd()) - xmd['mbs']['disttag_marking'] = disttag_marking + xmd["mbs"]["disttag_marking"] = disttag_marking mmd.set_xmd(glib.dict_values(xmd)) if error_msg: @@ -399,16 +406,16 @@ class TestUtils: scheduler_init_data(1) build_one = models.ModuleBuild.query.get(2) release = module_build_service.utils.get_rpm_release(build_one) - assert release == 'module+f28+2+814cfa39' + assert release == "module+f28+2+814cfa39" def test_get_rpm_release_platform_stream_override(self): scheduler_init_data(1) # Set the disttag_marking override on the platform - platform = models.ModuleBuild.query.filter_by(name='platform', stream='f28').first() + platform = models.ModuleBuild.query.filter_by(name="platform", stream="f28").first() platform_mmd = platform.mmd() platform_xmd = glib.from_variant_dict(platform_mmd.get_xmd()) - platform_xmd['mbs']['disttag_marking'] = 'fedora28' + platform_xmd["mbs"]["disttag_marking"] = "fedora28" platform_mmd.set_xmd(glib.dict_values(platform_xmd)) platform.modulemd = to_text_type(platform_mmd.dumps()) db.session.add(platform) @@ -416,33 +423,38 @@ class TestUtils: build_one = models.ModuleBuild.query.get(2) release = module_build_service.utils.get_rpm_release(build_one) - assert release == 'module+fedora28+2+814cfa39' + assert release == "module+fedora28+2+814cfa39" - @patch('module_build_service.config.Config.allowed_disttag_marking_module_names', - new_callable=mock.PropertyMock, return_value=['build']) + @patch( + "module_build_service.config.Config.allowed_disttag_marking_module_names", + new_callable=mock.PropertyMock, + return_value=["build"], + ) def test_get_rpm_release_metadata_br_stream_override(self, mock_admmn): """ Test that when a module buildrequires a module in conf.allowed_disttag_marking_module_names, and that module has the xmd.mbs.disttag_marking field set, it should influence the disttag. """ scheduler_init_data(1) - mmd_path = path.abspath(path.join( - __file__, path.pardir, path.pardir, 'staged_data', 'build_metadata_module.yaml')) + mmd_path = path.abspath( + path.join( + __file__, path.pardir, path.pardir, "staged_data", "build_metadata_module.yaml") + ) metadata_mmd = module_build_service.utils.load_mmd_file(mmd_path) module_build_service.utils.import_mmd(db.session, metadata_mmd) build_one = models.ModuleBuild.query.get(2) mmd = build_one.mmd() dep = mmd.get_dependencies()[0] - dep.add_buildrequires('build', ['product1.2']) - mmd.set_dependencies((dep, )) + dep.add_buildrequires("build", ["product1.2"]) + mmd.set_dependencies((dep,)) xmd = glib.from_variant_dict(mmd.get_xmd()) - xmd['mbs']['buildrequires']['build'] = { - 'filtered_rpms': [], - 'ref': 'virtual', - 'stream': 'product1.2', - 'version': '1', - 'context': '00000000', + xmd["mbs"]["buildrequires"]["build"] = { + "filtered_rpms": [], + "ref": "virtual", + "stream": "product1.2", + "version": "1", + "context": "00000000", } mmd.set_xmd(glib.dict_values(xmd)) build_one.modulemd = to_text_type(mmd.dumps()) @@ -450,7 +462,7 @@ class TestUtils: db.session.commit() release = module_build_service.utils.get_rpm_release(build_one) - assert release == 'module+product12+2+814cfa39' + assert release == "module+product12+2+814cfa39" def test_get_rpm_release_mse_scratch(self): init_data(contexts=True, scratch=True) @@ -465,57 +477,64 @@ class TestUtils: scheduler_init_data(1, scratch=True) build_one = models.ModuleBuild.query.get(2) release = module_build_service.utils.get_rpm_release(build_one) - assert release == 'scrmod+f28+2+814cfa39' + assert release == "scrmod+f28+2+814cfa39" - @pytest.mark.parametrize('scmurl', [ - ('https://src.stg.fedoraproject.org/modules/testmodule.git' - '?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'), - None - ]) - @patch('module_build_service.scm.SCM') + @pytest.mark.parametrize( + "scmurl", + [ + ( + "https://src.stg.fedoraproject.org/modules/testmodule.git" + "?#620ec77321b2ea7b0d67d82992dda3e1d67055b4" + ), + None, + ], + ) + @patch("module_build_service.scm.SCM") def test_format_mmd(self, mocked_scm, scmurl): - mocked_scm.return_value.commit = \ - '620ec77321b2ea7b0d67d82992dda3e1d67055b4' + mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4" # For all the RPMs in testmodule, get_latest is called mocked_scm.return_value.get_latest.side_effect = [ - '4ceea43add2366d8b8c5a622a2fb563b625b9abf', - 'fbed359411a1baa08d4a88e0d12d426fbf8f602c'] + "4ceea43add2366d8b8c5a622a2fb563b625b9abf", + "fbed359411a1baa08d4a88e0d12d426fbf8f602c", + ] hashes_returned = { - 'master': 'fbed359411a1baa08d4a88e0d12d426fbf8f602c', - 'f28': '4ceea43add2366d8b8c5a622a2fb563b625b9abf', - 'f27': '5deef23acd2367d8b8d5a621a2fc568b695bc3bd'} + "master": "fbed359411a1baa08d4a88e0d12d426fbf8f602c", + "f28": "4ceea43add2366d8b8c5a622a2fb563b625b9abf", + "f27": "5deef23acd2367d8b8d5a621a2fc568b695bc3bd", + } def mocked_get_latest(ref="master"): return hashes_returned[ref] mocked_scm.return_value.get_latest = mocked_get_latest mmd = Modulemd.Module().new_from_file( - path.join(BASE_DIR, '..', 'staged_data', 'testmodule.yaml')) + path.join(BASE_DIR, "..", "staged_data", "testmodule.yaml")) mmd.upgrade() # Modify the component branches so we can identify them later on - mmd.get_rpm_components()['perl-Tangerine'].set_ref('f28') - mmd.get_rpm_components()['tangerine'].set_ref('f27') + mmd.get_rpm_components()["perl-Tangerine"].set_ref("f28") + mmd.get_rpm_components()["tangerine"].set_ref("f27") module_build_service.utils.format_mmd(mmd, scmurl) # Make sure that original refs are not changed. mmd_pkg_refs = [pkg.get_ref() for pkg in mmd.get_rpm_components().values()] assert set(mmd_pkg_refs) == set(hashes_returned.keys()) br = mmd.get_dependencies()[0].get_buildrequires() - assert list(br.keys()) == ['platform'] - assert list(br.values())[0].get() == ['f28'] + assert list(br.keys()) == ["platform"] + assert list(br.values())[0].get() == ["f28"] xmd = { - 'mbs': { - 'commit': '', - 'rpms': { - 'perl-List-Compare': {'ref': 'fbed359411a1baa08d4a88e0d12d426fbf8f602c'}, - 'perl-Tangerine': {'ref': '4ceea43add2366d8b8c5a622a2fb563b625b9abf'}, - 'tangerine': {'ref': '5deef23acd2367d8b8d5a621a2fc568b695bc3bd'}}, - 'scmurl': '' + "mbs": { + "commit": "", + "rpms": { + "perl-List-Compare": {"ref": "fbed359411a1baa08d4a88e0d12d426fbf8f602c"}, + "perl-Tangerine": {"ref": "4ceea43add2366d8b8c5a622a2fb563b625b9abf"}, + "tangerine": {"ref": "5deef23acd2367d8b8d5a621a2fc568b695bc3bd"}, + }, + "scmurl": "", } } if scmurl: - xmd['mbs']['commit'] = '620ec77321b2ea7b0d67d82992dda3e1d67055b4' - xmd['mbs']['scmurl'] = scmurl + xmd["mbs"]["commit"] = "620ec77321b2ea7b0d67d82992dda3e1d67055b4" + xmd["mbs"]["scmurl"] = scmurl mmd_xmd = glib.from_variant_dict(mmd.get_xmd()) assert mmd_xmd == xmd @@ -528,217 +547,215 @@ class TestUtils: """ reuse_shared_userspace_init_data() new_module = models.ModuleBuild.query.get(3) - rv = module_build_service.utils.get_reusable_component( - db.session, new_module, 'llvm') - assert rv.package == 'llvm' + rv = module_build_service.utils.get_reusable_component(db.session, new_module, "llvm") + assert rv.package == "llvm" def test_validate_koji_tag_wrong_tag_arg_during_programming(self): """ Test that we fail on a wrong param name (non-existing one) due to programming error. """ - @module_build_service.utils.validate_koji_tag('wrong_tag_arg') + @module_build_service.utils.validate_koji_tag("wrong_tag_arg") def validate_koji_tag_programming_error(good_tag_arg, other_arg): pass with pytest.raises(ProgrammingError): - validate_koji_tag_programming_error('dummy', 'other_val') + validate_koji_tag_programming_error("dummy", "other_val") def test_validate_koji_tag_bad_tag_value(self): """ Test that we fail on a bad tag value. """ - @module_build_service.utils.validate_koji_tag('tag_arg') + @module_build_service.utils.validate_koji_tag("tag_arg") def validate_koji_tag_bad_tag_value(tag_arg): pass with pytest.raises(ValidationError): - validate_koji_tag_bad_tag_value('forbiddentagprefix-foo') + validate_koji_tag_bad_tag_value("forbiddentagprefix-foo") def test_validate_koji_tag_bad_tag_value_in_list(self): """ Test that we fail on a list containing bad tag value. """ - @module_build_service.utils.validate_koji_tag('tag_arg') + @module_build_service.utils.validate_koji_tag("tag_arg") def validate_koji_tag_bad_tag_value_in_list(tag_arg): pass with pytest.raises(ValidationError): - validate_koji_tag_bad_tag_value_in_list([ - 'module-foo', 'forbiddentagprefix-bar']) + validate_koji_tag_bad_tag_value_in_list(["module-foo", "forbiddentagprefix-bar"]) def test_validate_koji_tag_good_tag_value(self): """ Test that we pass on a good tag value. """ - @module_build_service.utils.validate_koji_tag('tag_arg') + @module_build_service.utils.validate_koji_tag("tag_arg") def validate_koji_tag_good_tag_value(tag_arg): return True - assert validate_koji_tag_good_tag_value('module-foo') is True + assert validate_koji_tag_good_tag_value("module-foo") is True def test_validate_koji_tag_good_tag_values_in_list(self): """ Test that we pass on a list of good tag values. """ - @module_build_service.utils.validate_koji_tag('tag_arg') + @module_build_service.utils.validate_koji_tag("tag_arg") def validate_koji_tag_good_tag_values_in_list(tag_arg): return True - assert validate_koji_tag_good_tag_values_in_list(['module-foo', 'module-bar']) is True + assert validate_koji_tag_good_tag_values_in_list(["module-foo", "module-bar"]) is True def test_validate_koji_tag_good_tag_value_in_dict(self): """ Test that we pass on a dict arg with default key and a good value. """ - @module_build_service.utils.validate_koji_tag('tag_arg') + @module_build_service.utils.validate_koji_tag("tag_arg") def validate_koji_tag_good_tag_value_in_dict(tag_arg): return True - assert validate_koji_tag_good_tag_value_in_dict({'name': 'module-foo'}) is True + assert validate_koji_tag_good_tag_value_in_dict({"name": "module-foo"}) is True def test_validate_koji_tag_good_tag_value_in_dict_nondefault_key(self): """ Test that we pass on a dict arg with non-default key and a good value. """ - @module_build_service.utils.validate_koji_tag('tag_arg', - dict_key='nondefault') + @module_build_service.utils.validate_koji_tag("tag_arg", dict_key="nondefault") def validate_koji_tag_good_tag_value_in_dict_nondefault_key(tag_arg): return True - assert validate_koji_tag_good_tag_value_in_dict_nondefault_key( - {'nondefault': 'module-foo'}) is True + assert ( + validate_koji_tag_good_tag_value_in_dict_nondefault_key({"nondefault": "module-foo"}) + is True + ) def test_validate_koji_tag_double_trouble_good(self): """ Test that we pass on a list of tags that are good. """ - expected = 'foo' + expected = "foo" - @module_build_service.utils.validate_koji_tag(['tag_arg1', 'tag_arg2']) + @module_build_service.utils.validate_koji_tag(["tag_arg1", "tag_arg2"]) def validate_koji_tag_double_trouble(tag_arg1, tag_arg2): return expected - actual = validate_koji_tag_double_trouble('module-1', 'module-2') + actual = validate_koji_tag_double_trouble("module-1", "module-2") assert actual == expected def test_validate_koji_tag_double_trouble_bad(self): """ Test that we fail on a list of tags that are bad. """ - @module_build_service.utils.validate_koji_tag(['tag_arg1', 'tag_arg2']) + @module_build_service.utils.validate_koji_tag(["tag_arg1", "tag_arg2"]) def validate_koji_tag_double_trouble(tag_arg1, tag_arg2): pass with pytest.raises(ValidationError): - validate_koji_tag_double_trouble('module-1', 'BADNEWS-2') + validate_koji_tag_double_trouble("module-1", "BADNEWS-2") def test_validate_koji_tag_is_None(self): """ Test that we fail on a tag which is None. """ - @module_build_service.utils.validate_koji_tag('tag_arg') + @module_build_service.utils.validate_koji_tag("tag_arg") def validate_koji_tag_is_None(tag_arg): pass with pytest.raises(ValidationError) as cm: validate_koji_tag_is_None(None) - assert str(cm.value).endswith(' No value provided.') is True + assert str(cm.value).endswith(" No value provided.") is True - @patch('module_build_service.scm.SCM') + @patch("module_build_service.scm.SCM") def test_record_component_builds_duplicate_components(self, mocked_scm): with app.app_context(): clean_database() - mocked_scm.return_value.commit = \ - '620ec77321b2ea7b0d67d82992dda3e1d67055b4' + mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4" mocked_scm.return_value.get_latest.side_effect = [ - '4ceea43add2366d8b8c5a622a2fb563b625b9abf', - 'fbed359411a1baa08d4a88e0d12d426fbf8f602c'] + "4ceea43add2366d8b8c5a622a2fb563b625b9abf", + "fbed359411a1baa08d4a88e0d12d426fbf8f602c", + ] - testmodule_mmd_path = path.join(BASE_DIR, '..', 'staged_data', 'testmodule.yaml') + testmodule_mmd_path = path.join(BASE_DIR, "..", "staged_data", "testmodule.yaml") mmd = Modulemd.Module().new_from_file(testmodule_mmd_path) mmd.upgrade() - mmd.set_name('testmodule-variant') + mmd.set_name("testmodule-variant") module_build = module_build_service.models.ModuleBuild() - module_build.name = 'testmodule-variant' - module_build.stream = 'master' + module_build.name = "testmodule-variant" + module_build.stream = "master" module_build.version = 20170109091357 - module_build.state = models.BUILD_STATES['init'] + module_build.state = models.BUILD_STATES["init"] module_build.scmurl = \ - 'https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79' + "https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79" module_build.batch = 1 - module_build.owner = 'Tom Brady' + module_build.owner = "Tom Brady" module_build.time_submitted = datetime(2017, 2, 15, 16, 8, 18) module_build.time_modified = datetime(2017, 2, 15, 16, 19, 35) - module_build.rebuild_strategy = 'changed-and-after' + module_build.rebuild_strategy = "changed-and-after" module_build.modulemd = to_text_type(mmd.dumps()) db.session.add(module_build) db.session.commit() # Rename the the modulemd to include - mmd.set_name('testmodule') + mmd.set_name("testmodule") # Remove perl-Tangerine and tangerine from the modulemd to include so only one # component conflicts comps = mmd.get_rpm_components() - del comps['perl-Tangerine'] - del comps['tangerine'] + del comps["perl-Tangerine"] + del comps["tangerine"] mmd.set_rpm_components(comps) error_msg = ( 'The included module "testmodule" in "testmodule-variant" have ' - 'the following conflicting components: perl-List-Compare') + "the following conflicting components: perl-List-Compare" + ) with pytest.raises(UnprocessableEntity) as e: module_build_service.utils.record_component_builds( mmd, module_build, main_mmd=module_build.mmd()) assert str(e.value) == error_msg - @patch('module_build_service.scm.SCM') + @patch("module_build_service.scm.SCM") def test_record_component_builds_set_weight(self, mocked_scm): with app.app_context(): clean_database() - mocked_scm.return_value.commit = \ - '620ec77321b2ea7b0d67d82992dda3e1d67055b4' + mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4" mocked_scm.return_value.get_latest.side_effect = [ - '4ceea43add2366d8b8c5a622a2fb563b625b9abf', - 'fbed359411a1baa08d4a88e0d12d426fbf8f602c', - 'dbed259411a1baa08d4a88e0d12d426fbf8f6037'] + "4ceea43add2366d8b8c5a622a2fb563b625b9abf", + "fbed359411a1baa08d4a88e0d12d426fbf8f602c", + "dbed259411a1baa08d4a88e0d12d426fbf8f6037", + ] - testmodule_mmd_path = path.join( - BASE_DIR, '..', 'staged_data', 'testmodule.yaml') + testmodule_mmd_path = path.join(BASE_DIR, "..", "staged_data", "testmodule.yaml") mmd = Modulemd.Module().new_from_file(testmodule_mmd_path) mmd.upgrade() module_build = module_build_service.models.ModuleBuild() - module_build.name = 'testmodule' - module_build.stream = 'master' + module_build.name = "testmodule" + module_build.stream = "master" module_build.version = 20170109091357 - module_build.state = models.BUILD_STATES['init'] + module_build.state = models.BUILD_STATES["init"] module_build.scmurl = \ - 'https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79' + "https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79" module_build.batch = 1 - module_build.owner = 'Tom Brady' + module_build.owner = "Tom Brady" module_build.time_submitted = datetime(2017, 2, 15, 16, 8, 18) module_build.time_modified = datetime(2017, 2, 15, 16, 19, 35) - module_build.rebuild_strategy = 'changed-and-after' + module_build.rebuild_strategy = "changed-and-after" module_build.modulemd = to_text_type(mmd.dumps()) db.session.add(module_build) db.session.commit() module_build_service.utils.record_component_builds(mmd, module_build) - assert module_build.state == models.BUILD_STATES['init'] + assert module_build.state == models.BUILD_STATES["init"] db.session.refresh(module_build) for c in module_build.component_builds: assert c.weight == 1.5 - @patch('module_build_service.scm.SCM') + @patch("module_build_service.scm.SCM") def test_format_mmd_arches(self, mocked_scm): with app.app_context(): clean_database() - mocked_scm.return_value.commit = \ - '620ec77321b2ea7b0d67d82992dda3e1d67055b4' + mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4" mocked_scm.return_value.get_latest.side_effect = [ - '4ceea43add2366d8b8c5a622a2fb563b625b9abf', - 'fbed359411a1baa08d4a88e0d12d426fbf8f602c', - 'dbed259411a1baa08d4a88e0d12d426fbf8f6037', - '4ceea43add2366d8b8c5a622a2fb563b625b9abf', - 'fbed359411a1baa08d4a88e0d12d426fbf8f602c', - 'dbed259411a1baa08d4a88e0d12d426fbf8f6037'] + "4ceea43add2366d8b8c5a622a2fb563b625b9abf", + "fbed359411a1baa08d4a88e0d12d426fbf8f602c", + "dbed259411a1baa08d4a88e0d12d426fbf8f6037", + "4ceea43add2366d8b8c5a622a2fb563b625b9abf", + "fbed359411a1baa08d4a88e0d12d426fbf8f602c", + "dbed259411a1baa08d4a88e0d12d426fbf8f6037", + ] - testmodule_mmd_path = path.join( - BASE_DIR, '..', 'staged_data', 'testmodule.yaml') - test_archs = ['powerpc', 'i486'] + testmodule_mmd_path = path.join(BASE_DIR, "..", "staged_data", "testmodule.yaml") + test_archs = ["powerpc", "i486"] mmd1 = Modulemd.Module().new_from_file(testmodule_mmd_path) mmd1.upgrade() @@ -761,8 +778,8 @@ class TestUtils: for pkg in mmd2.get_rpm_components().values(): assert set(pkg.get_arches().get()) == set(test_archs) - @patch('module_build_service.scm.SCM') - @patch('module_build_service.utils.submit.ThreadPool') + @patch("module_build_service.scm.SCM") + @patch("module_build_service.utils.submit.ThreadPool") def test_format_mmd_update_time_modified(self, tp, mocked_scm): with app.app_context(): init_data() @@ -774,32 +791,31 @@ class TestUtils: test_datetime = datetime(2019, 2, 14, 11, 11, 45, 42968) - testmodule_mmd_path = path.join( - BASE_DIR, '..', 'staged_data', 'testmodule.yaml') + testmodule_mmd_path = path.join(BASE_DIR, "..", "staged_data", "testmodule.yaml") mmd1 = Modulemd.Module().new_from_file(testmodule_mmd_path) mmd1.upgrade() - with patch('module_build_service.utils.submit.datetime') as dt: + with patch("module_build_service.utils.submit.datetime") as dt: dt.utcnow.return_value = test_datetime module_build_service.utils.format_mmd(mmd1, None, build, db.session) assert build.time_modified == test_datetime def test_generate_koji_tag_in_nsvc_format(self): - name, stream, version, context = ('testmodule', 'master', '20170816080815', '37c6c57') + name, stream, version, context = ("testmodule", "master", "20170816080815", "37c6c57") tag = module_build_service.utils.generate_koji_tag(name, stream, version, context) - assert tag == 'module-testmodule-master-20170816080815-37c6c57' + assert tag == "module-testmodule-master-20170816080815-37c6c57" def test_generate_koji_tag_in_hash_format(self): - name, version, context = ('testmodule', '20170816080815', '37c6c57') - stream = 'this-is-a-stream-with-very-looooong-name' + '-blah' * 50 + name, version, context = ("testmodule", "20170816080815", "37c6c57") + stream = "this-is-a-stream-with-very-looooong-name" + "-blah" * 50 nsvc_list = [name, stream, version, context] tag = module_build_service.utils.generate_koji_tag(*nsvc_list) - expected_tag = 'module-1cf457d452e54dda' + expected_tag = "module-1cf457d452e54dda" assert tag == expected_tag @patch("module_build_service.utils.submit.requests") @@ -807,27 +823,25 @@ class TestUtils: """ Push mock pdc responses through the eol check function. """ response = mock.Mock() - response.json.return_value = {"results": [{ - "id": 347907, - "global_component": "mariadb", - "name": "10.1", - "slas": [{ - "id": 694207, - "sla": "security_fixes", - "eol": "2019-12-01", - }], - "type": "module", - "active": True, - "critical_path": False, - }]} + response.json.return_value = { + "results": [{ + "id": 347907, + "global_component": "mariadb", + "name": "10.1", + "slas": [{"id": 694207, "sla": "security_fixes", "eol": "2019-12-01"}], + "type": "module", + "active": True, + "critical_path": False, + }] + } requests.get.return_value = response - is_eol = module_build_service.utils.submit._is_eol_in_pdc('mariadb', '10.1') + is_eol = module_build_service.utils.submit._is_eol_in_pdc("mariadb", "10.1") assert not is_eol response.json.return_value["results"][0]["active"] = False - is_eol = module_build_service.utils.submit._is_eol_in_pdc('mariadb', '10.1') + is_eol = module_build_service.utils.submit._is_eol_in_pdc("mariadb", "10.1") assert is_eol def test_get_prefixed_version_f28(self): @@ -841,12 +855,12 @@ class TestUtils: build_one = models.ModuleBuild.query.get(2) mmd = build_one.mmd() xmd = glib.from_variant_dict(mmd.get_xmd()) - xmd['mbs']['buildrequires']['platform']['stream'] = 'fl7.0.1-beta' + xmd["mbs"]["buildrequires"]["platform"]["stream"] = "fl7.0.1-beta" mmd.set_xmd(glib.dict_values(xmd)) v = module_build_service.utils.submit.get_prefixed_version(mmd) assert v == 7000120180205135154 - @patch('module_build_service.utils.mse.generate_expanded_mmds') + @patch("module_build_service.utils.mse.generate_expanded_mmds") def test_submit_build_new_mse_build(self, generate_expanded_mmds): """ Tests that finished build can be resubmitted in case the resubmitted @@ -854,7 +868,7 @@ class TestUtils: buildrequires). """ build = make_module("foo:stream:0:c1", {}, {}) - assert build.state == models.BUILD_STATES['ready'] + assert build.state == models.BUILD_STATES["ready"] mmd1 = build.mmd() mmd2 = build.mmd() @@ -867,9 +881,7 @@ class TestUtils: mmd1_copy.set_xmd({}) builds = module_build_service.utils.submit_module_build("foo", mmd1_copy, {}) ret = {b.mmd().get_context(): b.state for b in builds} - assert ret == { - "c1": models.BUILD_STATES['ready'], - "c2": models.BUILD_STATES['init']} + assert ret == {"c1": models.BUILD_STATES["ready"], "c2": models.BUILD_STATES["init"]} assert builds[0].siblings == [builds[1].id] assert builds[1].siblings == [builds[0].id] @@ -885,7 +897,7 @@ class DummyModuleBuilder(GenericBuilder): TAGGED_COMPONENTS = [] - @module_build_service.utils.validate_koji_tag('tag_name') + @module_build_service.utils.validate_koji_tag("tag_name") def __init__(self, owner, module, config, tag_name, components): self.module_str = module self.tag_name = tag_name @@ -924,7 +936,7 @@ class DummyModuleBuilder(GenericBuilder): def build(self, artifact_name, source): DummyModuleBuilder._build_id += 1 - state = koji.BUILD_STATES['COMPLETE'] + state = koji.BUILD_STATES["COMPLETE"] reason = "Submitted %s to Koji" % (artifact_name) return DummyModuleBuilder._build_id, state, reason, None @@ -936,7 +948,7 @@ class DummyModuleBuilder(GenericBuilder): def cancel_build(self, task_id): pass - def list_tasks_for_components(self, component_builds=None, state='active'): + def list_tasks_for_components(self, component_builds=None, state="active"): pass def repo_from_tag(self, config, tag_name, arch): @@ -946,10 +958,11 @@ class DummyModuleBuilder(GenericBuilder): pass -@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", - return_value={'build': [], 'srpm-build': []}) +@patch( + "module_build_service.builder.GenericBuilder.default_buildroot_groups", + return_value={"build": [], "srpm-build": []}, +) class TestBatches: - def setup_method(self, test_method): reuse_component_init_data() GenericBuilder.register_backend_class(DummyModuleBuilder) @@ -985,16 +998,15 @@ class TestBatches: # properly. for msg in further_work: if type(msg) == module_build_service.messaging.KojiBuildChange: - assert msg.build_new_state == koji.BUILD_STATES['COMPLETE'] + assert msg.build_new_state == koji.BUILD_STATES["COMPLETE"] component_build = models.ComponentBuild.from_component_event(db.session, msg) - assert component_build.state == koji.BUILD_STATES['BUILDING'] + assert component_build.state == koji.BUILD_STATES["BUILDING"] # When we handle these KojiBuildChange messages, MBS should tag all # the components just once. for msg in further_work: if type(msg) == module_build_service.messaging.KojiBuildChange: - module_build_service.scheduler.handlers.components.complete( - conf, db.session, msg) + module_build_service.scheduler.handlers.components.complete(conf, db.session, msg) # Since we have reused all the components in the batch, there should # be fake KojiRepoChange message. @@ -1003,7 +1015,7 @@ class TestBatches: # Check that packages have been tagged just once. assert len(DummyModuleBuilder.TAGGED_COMPONENTS) == 2 - @patch('module_build_service.utils.batches.start_build_component') + @patch("module_build_service.utils.batches.start_build_component") def test_start_next_batch_build_reuse_some(self, mock_sbc, default_buildroot_groups): """ Tests that start_next_batch_build: @@ -1017,8 +1029,8 @@ class TestBatches: module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.batch = 1 plc_component = models.ComponentBuild.query.filter_by( - module_id=3, package='perl-List-Compare').one() - plc_component.ref = '5ceea46add2366d8b8c5a623a2fb563b625b9abd' + module_id=3, package="perl-List-Compare").one() + plc_component.ref = "5ceea46add2366d8b8c5a623a2fb563b625b9abd" builder = mock.MagicMock() builder.recover_orphaned_artifact.return_value = [] @@ -1034,27 +1046,31 @@ class TestBatches: # set to COMPLETE, but the current component build state in the DB should be set # to BUILDING, so KojiBuildChange message handler handles the change # properly. - assert further_work[0].build_new_state == koji.BUILD_STATES['COMPLETE'] + assert further_work[0].build_new_state == koji.BUILD_STATES["COMPLETE"] component_build = models.ComponentBuild.from_component_event(db.session, further_work[0]) - assert component_build.state == koji.BUILD_STATES['BUILDING'] - assert component_build.package == 'perl-Tangerine' + assert component_build.state == koji.BUILD_STATES["BUILDING"] + assert component_build.package == "perl-Tangerine" assert component_build.reused_component_id is not None # Make sure perl-List-Compare is set to the build state as well but not reused - assert plc_component.state == koji.BUILD_STATES['BUILDING'] + assert plc_component.state == koji.BUILD_STATES["BUILDING"] assert plc_component.reused_component_id is None mock_sbc.assert_called_once() - @patch('module_build_service.utils.batches.start_build_component') - @patch('module_build_service.config.Config.rebuild_strategy', - new_callable=mock.PropertyMock, return_value='all') + @patch("module_build_service.utils.batches.start_build_component") + @patch( + "module_build_service.config.Config.rebuild_strategy", + new_callable=mock.PropertyMock, + return_value="all", + ) def test_start_next_batch_build_rebuild_strategy_all( - self, mock_rm, mock_sbc, default_buildroot_groups): + self, mock_rm, mock_sbc, default_buildroot_groups + ): """ Tests that start_next_batch_build can't reuse any components in the batch because the rebuild method is set to "all". """ module_build = models.ModuleBuild.query.filter_by(id=3).one() - module_build.rebuild_strategy = 'all' + module_build.rebuild_strategy = "all" module_build.batch = 1 builder = mock.MagicMock() @@ -1074,18 +1090,22 @@ class TestBatches: Tests whether exception occured while building sets the state to failed """ builder = mock.MagicMock() - builder.build.side_effect = Exception('Something have gone terribly wrong') + builder.build.side_effect = Exception("Something have gone terribly wrong") component = mock.MagicMock() module_build_service.utils.batches.start_build_component(builder, component) - assert component.state == koji.BUILD_STATES['FAILED'] + assert component.state == koji.BUILD_STATES["FAILED"] - @patch('module_build_service.utils.batches.start_build_component') - @patch('module_build_service.config.Config.rebuild_strategy', - new_callable=mock.PropertyMock, return_value='only-changed') + @patch("module_build_service.utils.batches.start_build_component") + @patch( + "module_build_service.config.Config.rebuild_strategy", + new_callable=mock.PropertyMock, + return_value="only-changed", + ) def test_start_next_batch_build_rebuild_strategy_only_changed( - self, mock_rm, mock_sbc, default_buildroot_groups): + self, mock_rm, mock_sbc, default_buildroot_groups + ): """ Tests that start_next_batch_build reuses all unchanged components in the batch because the rebuild method is set to "only-changed". This means that one component is reused in batch @@ -1093,12 +1113,12 @@ class TestBatches: in batch 3 can be reused. """ module_build = models.ModuleBuild.query.filter_by(id=3).one() - module_build.rebuild_strategy = 'only-changed' + module_build.rebuild_strategy = "only-changed" module_build.batch = 1 # perl-List-Compare changed plc_component = models.ComponentBuild.query.filter_by( - module_id=3, package='perl-List-Compare').one() - plc_component.ref = '5ceea46add2366d8b8c5a623a2fb563b625b9abd' + module_id=3, package="perl-List-Compare").one() + plc_component.ref = "5ceea46add2366d8b8c5a623a2fb563b625b9abd" builder = mock.MagicMock() builder.recover_orphaned_artifact.return_value = [] @@ -1114,22 +1134,22 @@ class TestBatches: # set to COMPLETE, but the current component build state in the DB should be set # to BUILDING, so KojiBuildChange message handler handles the change # properly. - assert further_work[0].build_new_state == koji.BUILD_STATES['COMPLETE'] + assert further_work[0].build_new_state == koji.BUILD_STATES["COMPLETE"] component_build = models.ComponentBuild.from_component_event(db.session, further_work[0]) - assert component_build.state == koji.BUILD_STATES['BUILDING'] - assert component_build.package == 'perl-Tangerine' + assert component_build.state == koji.BUILD_STATES["BUILDING"] + assert component_build.package == "perl-Tangerine" assert component_build.reused_component_id is not None # Make sure perl-List-Compare is set to the build state as well but not reused - assert plc_component.state == koji.BUILD_STATES['BUILDING'] + assert plc_component.state == koji.BUILD_STATES["BUILDING"] assert plc_component.reused_component_id is None mock_sbc.assert_called_once() mock_sbc.reset_mock() # Complete the build - plc_component.state = koji.BUILD_STATES['COMPLETE'] + plc_component.state = koji.BUILD_STATES["COMPLETE"] pt_component = models.ComponentBuild.query.filter_by( - module_id=3, package='perl-Tangerine').one() - pt_component.state = koji.BUILD_STATES['COMPLETE'] + module_id=3, package="perl-Tangerine").one() + pt_component.state = koji.BUILD_STATES["COMPLETE"] # Start the next build batch further_work = module_build_service.utils.start_next_batch_build( @@ -1138,14 +1158,14 @@ class TestBatches: assert module_build.batch == 3 # Verify that tangerine was reused even though perl-Tangerine was rebuilt in the previous # batch - assert further_work[0].build_new_state == koji.BUILD_STATES['COMPLETE'] + assert further_work[0].build_new_state == koji.BUILD_STATES["COMPLETE"] component_build = models.ComponentBuild.from_component_event(db.session, further_work[0]) - assert component_build.state == koji.BUILD_STATES['BUILDING'] - assert component_build.package == 'tangerine' + assert component_build.state == koji.BUILD_STATES["BUILDING"] + assert component_build.package == "tangerine" assert component_build.reused_component_id is not None mock_sbc.assert_not_called() - @patch('module_build_service.utils.batches.start_build_component') + @patch("module_build_service.utils.batches.start_build_component") def test_start_next_batch_build_smart_scheduling(self, mock_sbc, default_buildroot_groups): """ Tests that components with the longest build time will be scheduled first @@ -1153,11 +1173,11 @@ class TestBatches: module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.batch = 1 pt_component = models.ComponentBuild.query.filter_by( - module_id=3, package='perl-Tangerine').one() - pt_component.ref = '6ceea46add2366d8b8c5a623b2fb563b625bfabe' + module_id=3, package="perl-Tangerine").one() + pt_component.ref = "6ceea46add2366d8b8c5a623b2fb563b625bfabe" plc_component = models.ComponentBuild.query.filter_by( - module_id=3, package='perl-List-Compare').one() - plc_component.ref = '5ceea46add2366d8b8c5a623a2fb563b625b9abd' + module_id=3, package="perl-List-Compare").one() + plc_component.ref = "5ceea46add2366d8b8c5a623a2fb563b625b9abd" # Components are by default built by component id. To find out that weight is respected, # we have to set bigger weight to component with lower id. @@ -1175,16 +1195,16 @@ class TestBatches: # Make sure we don't have any messages returned since no components should be reused assert len(further_work) == 0 # Make sure both components are set to the build state but not reused - assert pt_component.state == koji.BUILD_STATES['BUILDING'] + assert pt_component.state == koji.BUILD_STATES["BUILDING"] assert pt_component.reused_component_id is None - assert plc_component.state == koji.BUILD_STATES['BUILDING'] + assert plc_component.state == koji.BUILD_STATES["BUILDING"] assert plc_component.reused_component_id is None # Test the order of the scheduling expected_calls = [mock.call(builder, plc_component), mock.call(builder, pt_component)] assert mock_sbc.mock_calls == expected_calls - @patch('module_build_service.utils.batches.start_build_component') + @patch("module_build_service.utils.batches.start_build_component") def test_start_next_batch_continue(self, mock_sbc, default_buildroot_groups): """ Tests that start_next_batch_build does not start new batch when @@ -1195,7 +1215,7 @@ class TestBatches: # The component was reused when the batch first started building_component = module_build.current_batch()[0] - building_component.state = koji.BUILD_STATES['BUILDING'] + building_component.state = koji.BUILD_STATES["BUILDING"] building_component.reused_component_id = 123 db.session.commit() @@ -1225,14 +1245,15 @@ class TestBatches: assert module_build.batch == 1 -@patch("module_build_service.config.Config.mock_resultsdir", - new_callable=mock.PropertyMock, - return_value=path.join( - BASE_DIR, '..', 'staged_data', "local_builds")) -@patch("module_build_service.config.Config.system", - new_callable=mock.PropertyMock, return_value="mock") +@patch( + "module_build_service.config.Config.mock_resultsdir", + new_callable=mock.PropertyMock, + return_value=path.join(BASE_DIR, "..", "staged_data", "local_builds"), +) +@patch( + "module_build_service.config.Config.system", new_callable=mock.PropertyMock, return_value="mock" +) class TestLocalBuilds: - def setup_method(self): clean_database() @@ -1248,8 +1269,7 @@ class TestLocalBuilds: assert local_modules[0].koji_tag.endswith( "/module-testmodule-master-20170816080816/results") - def test_load_local_builds_name_stream( - self, conf_system, conf_resultsdir): + def test_load_local_builds_name_stream(self, conf_system, conf_resultsdir): with app.app_context(): module_build_service.utils.load_local_builds("testmodule:master") local_modules = models.ModuleBuild.local_modules(db.session) @@ -1258,15 +1278,13 @@ class TestLocalBuilds: assert local_modules[0].koji_tag.endswith( "/module-testmodule-master-20170816080816/results") - def test_load_local_builds_name_stream_non_existing( - self, conf_system, conf_resultsdir): + def test_load_local_builds_name_stream_non_existing(self, conf_system, conf_resultsdir): with app.app_context(): with pytest.raises(RuntimeError): module_build_service.utils.load_local_builds("testmodule:x") models.ModuleBuild.local_modules(db.session) - def test_load_local_builds_name_stream_version( - self, conf_system, conf_resultsdir): + def test_load_local_builds_name_stream_version(self, conf_system, conf_resultsdir): with app.app_context(): module_build_service.utils.load_local_builds("testmodule:master:20170816080815") local_modules = models.ModuleBuild.local_modules(db.session) @@ -1275,36 +1293,30 @@ class TestLocalBuilds: assert local_modules[0].koji_tag.endswith( "/module-testmodule-master-20170816080815/results") - def test_load_local_builds_name_stream_version_non_existing( - self, conf_system, conf_resultsdir): + def test_load_local_builds_name_stream_version_non_existing(self, conf_system, conf_resultsdir): with app.app_context(): with pytest.raises(RuntimeError): module_build_service.utils.load_local_builds("testmodule:master:123") models.ModuleBuild.local_modules(db.session) - def test_load_local_builds_platform( - self, conf_system, conf_resultsdir): + def test_load_local_builds_platform(self, conf_system, conf_resultsdir): with app.app_context(): module_build_service.utils.load_local_builds("platform") local_modules = models.ModuleBuild.local_modules(db.session) assert len(local_modules) == 1 - assert local_modules[0].koji_tag.endswith( - "/module-platform-f28-3/results") + assert local_modules[0].koji_tag.endswith("/module-platform-f28-3/results") - def test_load_local_builds_platform_f28( - self, conf_system, conf_resultsdir): + def test_load_local_builds_platform_f28(self, conf_system, conf_resultsdir): with app.app_context(): module_build_service.utils.load_local_builds("platform:f28") local_modules = models.ModuleBuild.local_modules(db.session) assert len(local_modules) == 1 - assert local_modules[0].koji_tag.endswith( - "/module-platform-f28-3/results") + assert local_modules[0].koji_tag.endswith("/module-platform-f28-3/results") class TestOfflineLocalBuilds: - def setup_method(self): clean_database() @@ -1320,12 +1332,14 @@ class TestOfflineLocalBuilds: mmd = module_build.mmd() xmd = glib.from_variant_dict(mmd.get_xmd()) assert xmd == { - 'mbs': { - 'buildrequires': {}, - 'commit': 'ref_000000', - 'koji_tag': 'repofile://', - 'mse': 'true', - 'requires': {}}} + "mbs": { + "buildrequires": {}, + "commit": "ref_000000", + "koji_tag": "repofile://", + "mse": "true", + "requires": {}, + } + } profiles = mmd.get_profiles() assert set(profiles.keys()) == set(["buildroot", "srpm-buildroot"]) @@ -1335,11 +1349,11 @@ class TestOfflineLocalBuilds: with patch("dnf.Base") as dnf_base: repo = mock.MagicMock() repo.repofile = "/etc/yum.repos.d/foo.repo" - with open(path.join(BASE_DIR, '..', 'staged_data', 'formatted_testmodule.yaml')) as f: + with open(path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml")) as f: repo.get_metadata_content.return_value = f.read() base = dnf_base.return_value base.repos = {"reponame": repo} - patched_open.return_value.readlines.return_value = ('FOO=bar', 'PLATFORM_ID=platform:x') + patched_open.return_value.readlines.return_value = ("FOO=bar", "PLATFORM_ID=platform:x") module_build_service.utils.import_builds_from_local_dnf_repos() diff --git a/tests/test_utils/test_utils_mse.py b/tests/test_utils/test_utils_mse.py index 32e29936..c5131ab2 100644 --- a/tests/test_utils/test_utils_mse.py +++ b/tests/test_utils/test_utils_mse.py @@ -29,7 +29,6 @@ from tests import db, clean_database, make_module, init_data, base_dir class TestUtilsModuleStreamExpansion: - def setup_method(self, test_method): clean_database(False) @@ -45,8 +44,10 @@ class TestUtilsModuleStreamExpansion: mmd = module_build.mmd() module_build_service.utils.expand_mse_streams(db.session, mmd) modules = module_build_service.utils.get_mmds_required_by_module_recursively(mmd) - nsvcs = [":".join([m.get_name(), m.get_stream(), str(m.get_version()), m.get_context()]) - for m in modules] + nsvcs = [ + ":".join([m.get_name(), m.get_stream(), str(m.get_version()), m.get_context()]) + for m in modules + ] return nsvcs def _generate_default_modules(self): @@ -70,99 +71,96 @@ class TestUtilsModuleStreamExpansion: self._generate_default_modules() module_build = make_module( "app:1:0:c1", {"gtk": ["1", "2"]}, {"platform": ["f28"], "gtk": ["1", "2"]}) - mmds = module_build_service.utils.generate_expanded_mmds( - db.session, module_build.mmd()) + mmds = module_build_service.utils.generate_expanded_mmds(db.session, module_build.mmd()) contexts = set([mmd.get_context() for mmd in mmds]) - assert set(['e1e005fb', 'ce132a1e']) == contexts + assert set(["e1e005fb", "ce132a1e"]) == contexts @pytest.mark.parametrize( - 'requires,build_requires,stream_ambigous,expected_xmd,expected_buildrequires', [ - ({"gtk": ["1", "2"]}, - {"platform": ["f28"], "gtk": ["1", "2"]}, True, - set([ - frozenset(['platform:f28:0:c10', 'gtk:2:0:c4']), - frozenset(['platform:f28:0:c10', 'gtk:1:0:c2']) - ]), - set([ - frozenset(['gtk:1', 'platform:f28']), - frozenset(['gtk:2', 'platform:f28']), - ])), - - ({"foo": ["1"]}, - {"platform": ["f28"], "foo": ["1"], "gtk": ["1", "2"]}, True, - set([ - frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10']), - frozenset(['foo:1:0:c2', 'gtk:2:0:c4', 'platform:f28:0:c10']) - ]), - set([ - frozenset(['foo:1', 'gtk:1', 'platform:f28']), - frozenset(['foo:1', 'gtk:2', 'platform:f28']) - ])), - - ({"gtk": ["1"], "foo": ["1"]}, - {"platform": ["f28"], "gtk": ["1"], "foo": ["1"]}, False, - set([ - frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10']) - ]), - set([ - frozenset(['foo:1', 'gtk:1', 'platform:f28']) - ])), - - ({"gtk": ["1"], "foo": ["1"]}, - {"gtk": ["1"], "foo": ["1"], "platform": ["f28"]}, False, - set([ - frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10']) - ]), - set([ - frozenset(['foo:1', 'gtk:1', 'platform:f28']) - ])), - - ({"gtk": ["-2"], "foo": ["-2"]}, - {"platform": ["f28"], "gtk": ["-2"], "foo": ["-2"]}, True, - set([ - frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10']) - ]), - set([ - frozenset(['foo:1', 'gtk:1', 'platform:f28']) - ])), - - ({"gtk": ["1"], "foo": ["1"]}, - {"platform": ["f28"], "gtk": ["-1", "1"], "foo": ["-2", "1"]}, False, - set([ - frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10']) - ]), - set([ - frozenset(['foo:1', 'gtk:1', 'platform:f28']) - ])), - - ({"gtk": ["1"], "foo": ["1"]}, - {"platform": ["f28"], "gtk": ["1"]}, False, - set([ - frozenset(['gtk:1:0:c2', 'platform:f28:0:c10']) - ]), - set([ - frozenset(['gtk:1', 'platform:f28']) - ])), - - ({"gtk": []}, {"platform": ["f28"], "gtk": ["1"]}, True, - set([ - frozenset(['gtk:1:0:c2', 'platform:f28:0:c10']) - ]), - set([ - frozenset(['gtk:1', 'platform:f28']) - ])), - - ({}, {"platform": ["f29"], "app": ["1"]}, False, - set([ - frozenset(['app:1:0:c6', 'platform:f29:0:c11']) - ]), - set([ - frozenset(['app:1', 'platform:f29']) - ])), - ]) + "requires,build_requires,stream_ambigous,expected_xmd,expected_buildrequires", + [ + ( + {"gtk": ["1", "2"]}, + {"platform": ["f28"], "gtk": ["1", "2"]}, + True, + set( + [ + frozenset(["platform:f28:0:c10", "gtk:2:0:c4"]), + frozenset(["platform:f28:0:c10", "gtk:1:0:c2"]), + ] + ), + set([frozenset(["gtk:1", "platform:f28"]), frozenset(["gtk:2", "platform:f28"])]), + ), + ( + {"foo": ["1"]}, + {"platform": ["f28"], "foo": ["1"], "gtk": ["1", "2"]}, + True, + set( + [ + frozenset(["foo:1:0:c2", "gtk:1:0:c2", "platform:f28:0:c10"]), + frozenset(["foo:1:0:c2", "gtk:2:0:c4", "platform:f28:0:c10"]), + ] + ), + set( + [ + frozenset(["foo:1", "gtk:1", "platform:f28"]), + frozenset(["foo:1", "gtk:2", "platform:f28"]), + ] + ), + ), + ( + {"gtk": ["1"], "foo": ["1"]}, + {"platform": ["f28"], "gtk": ["1"], "foo": ["1"]}, + False, + set([frozenset(["foo:1:0:c2", "gtk:1:0:c2", "platform:f28:0:c10"])]), + set([frozenset(["foo:1", "gtk:1", "platform:f28"])]), + ), + ( + {"gtk": ["1"], "foo": ["1"]}, + {"gtk": ["1"], "foo": ["1"], "platform": ["f28"]}, + False, + set([frozenset(["foo:1:0:c2", "gtk:1:0:c2", "platform:f28:0:c10"])]), + set([frozenset(["foo:1", "gtk:1", "platform:f28"])]), + ), + ( + {"gtk": ["-2"], "foo": ["-2"]}, + {"platform": ["f28"], "gtk": ["-2"], "foo": ["-2"]}, + True, + set([frozenset(["foo:1:0:c2", "gtk:1:0:c2", "platform:f28:0:c10"])]), + set([frozenset(["foo:1", "gtk:1", "platform:f28"])]), + ), + ( + {"gtk": ["1"], "foo": ["1"]}, + {"platform": ["f28"], "gtk": ["-1", "1"], "foo": ["-2", "1"]}, + False, + set([frozenset(["foo:1:0:c2", "gtk:1:0:c2", "platform:f28:0:c10"])]), + set([frozenset(["foo:1", "gtk:1", "platform:f28"])]), + ), + ( + {"gtk": ["1"], "foo": ["1"]}, + {"platform": ["f28"], "gtk": ["1"]}, + False, + set([frozenset(["gtk:1:0:c2", "platform:f28:0:c10"])]), + set([frozenset(["gtk:1", "platform:f28"])]), + ), + ( + {"gtk": []}, + {"platform": ["f28"], "gtk": ["1"]}, + True, + set([frozenset(["gtk:1:0:c2", "platform:f28:0:c10"])]), + set([frozenset(["gtk:1", "platform:f28"])]), + ), + ( + {}, + {"platform": ["f29"], "app": ["1"]}, + False, + set([frozenset(["app:1:0:c6", "platform:f29:0:c11"])]), + set([frozenset(["app:1", "platform:f29"])]), + ), + ], + ) def test_generate_expanded_mmds_buildrequires( - self, requires, build_requires, stream_ambigous, expected_xmd, - expected_buildrequires): + self, requires, build_requires, stream_ambigous, expected_xmd, expected_buildrequires + ): self._generate_default_modules() module_build = make_module("app:1:0:c1", requires, build_requires) @@ -184,20 +182,22 @@ class TestUtilsModuleStreamExpansion: name, stream = ns.split(":") default_streams[name] = stream module_build_service.utils.generate_expanded_mmds( - db.session, module_build.mmd(), raise_if_stream_ambigous=True, - default_streams=default_streams) + db.session, + module_build.mmd(), + raise_if_stream_ambigous=True, + default_streams=default_streams, + ) - mmds = module_build_service.utils.generate_expanded_mmds( - db.session, module_build.mmd()) + mmds = module_build_service.utils.generate_expanded_mmds(db.session, module_build.mmd()) buildrequires_per_mmd_xmd = set() buildrequires_per_mmd_buildrequires = set() for mmd in mmds: xmd = glib.from_variant_dict(mmd.get_xmd()) br_nsvcs = [] - for name, detail in xmd['mbs']['buildrequires'].items(): - br_nsvcs.append(":".join([ - name, detail["stream"], detail["version"], detail["context"]])) + for name, detail in xmd["mbs"]["buildrequires"].items(): + br_nsvcs.append( + ":".join([name, detail["stream"], detail["version"], detail["context"]])) buildrequires_per_mmd_xmd.add(frozenset(br_nsvcs)) assert len(mmd.get_dependencies()) == 1 @@ -212,47 +212,45 @@ class TestUtilsModuleStreamExpansion: assert buildrequires_per_mmd_xmd == expected_xmd assert buildrequires_per_mmd_buildrequires == expected_buildrequires - @pytest.mark.parametrize('requires,build_requires,expected', [ - ({"gtk": ["1", "2"]}, {"platform": [], "gtk": ["1", "2"]}, - set([ - frozenset(['gtk:1']), - frozenset(['gtk:2']), - ])), - - ({"gtk": ["1", "2"]}, {"platform": [], "gtk": ["1"]}, - set([ - frozenset(['gtk:1', 'gtk:2']), - ])), - - ({"gtk": ["1"], "foo": ["1"]}, - {"platform": [], "gtk": ["1"], "foo": ["1"]}, - set([ - frozenset(['foo:1', 'gtk:1']), - ])), - - ({"gtk": ["-2"], "foo": ["-2"]}, - {"platform": [], "gtk": ["-2"], "foo": ["-2"]}, - set([ - frozenset(['foo:1', 'gtk:1']), - ])), - - ({"gtk": ["-1", "1"], "foo": ["-2", "1"]}, - {"platform": [], "gtk": ["-1", "1"], "foo": ["-2", "1"]}, - set([ - frozenset(['foo:1', 'gtk:1']), - ])), - - ({"gtk": [], "foo": []}, {"platform": [], "gtk": ["1"], "foo": ["1"]}, - set([ - frozenset([]), - ])), - - ]) + @pytest.mark.parametrize( + "requires,build_requires,expected", + [ + ( + {"gtk": ["1", "2"]}, + {"platform": [], "gtk": ["1", "2"]}, + set([frozenset(["gtk:1"]), frozenset(["gtk:2"])]), + ), + ( + {"gtk": ["1", "2"]}, + {"platform": [], "gtk": ["1"]}, + set([frozenset(["gtk:1", "gtk:2"])]), + ), + ( + {"gtk": ["1"], "foo": ["1"]}, + {"platform": [], "gtk": ["1"], "foo": ["1"]}, + set([frozenset(["foo:1", "gtk:1"])]), + ), + ( + {"gtk": ["-2"], "foo": ["-2"]}, + {"platform": [], "gtk": ["-2"], "foo": ["-2"]}, + set([frozenset(["foo:1", "gtk:1"])]), + ), + ( + {"gtk": ["-1", "1"], "foo": ["-2", "1"]}, + {"platform": [], "gtk": ["-1", "1"], "foo": ["-2", "1"]}, + set([frozenset(["foo:1", "gtk:1"])]), + ), + ( + {"gtk": [], "foo": []}, + {"platform": [], "gtk": ["1"], "foo": ["1"]}, + set([frozenset([])]), + ), + ], + ) def test_generate_expanded_mmds_requires(self, requires, build_requires, expected): self._generate_default_modules() module_build = make_module("app:1:0:c1", requires, build_requires) - mmds = module_build_service.utils.generate_expanded_mmds( - db.session, module_build.mmd()) + mmds = module_build_service.utils.generate_expanded_mmds(db.session, module_build.mmd()) requires_per_mmd = set() for mmd in mmds: @@ -266,33 +264,83 @@ class TestUtilsModuleStreamExpansion: assert requires_per_mmd == expected - @pytest.mark.parametrize('requires,build_requires,expected', [ - ({}, {"platform": [], "gtk": ["1", "2"]}, - ['platform:f29:0:c11', 'gtk:2:0:c4', 'gtk:2:0:c5', - 'platform:f28:0:c10', 'gtk:1:0:c2', 'gtk:1:0:c3']), - - ({}, {"platform": [], "gtk": ["1"], "foo": ["1"]}, - ['platform:f28:0:c10', 'gtk:1:0:c2', 'gtk:1:0:c3', - 'foo:1:0:c2', 'foo:1:0:c3', 'platform:f29:0:c11']), - - ({}, {"gtk": ["1"], "foo": ["1"], "platform": ["f28"]}, - ['platform:f28:0:c10', 'gtk:1:0:c2', - 'foo:1:0:c2']), - - ([{}, {}], [{"platform": [], "gtk": ["1"], "foo": ["1"]}, - {"platform": [], "gtk": ["2"], "foo": ["2"]}], - ['foo:1:0:c2', 'foo:1:0:c3', 'foo:2:0:c4', 'foo:2:0:c5', - 'platform:f28:0:c10', 'platform:f29:0:c11', 'gtk:1:0:c2', - 'gtk:1:0:c3', 'gtk:2:0:c4', 'gtk:2:0:c5']), - - ({}, {"platform": [], "gtk": ["-2"], "foo": ["-2"]}, - ['foo:1:0:c2', 'foo:1:0:c3', 'platform:f29:0:c11', - 'platform:f28:0:c10', 'gtk:1:0:c2', 'gtk:1:0:c3']), - - ({}, {"platform": [], "gtk": ["-1", "1"], "foo": ["-2", "1"]}, - ['foo:1:0:c2', 'foo:1:0:c3', 'platform:f29:0:c11', - 'platform:f28:0:c10', 'gtk:1:0:c2', 'gtk:1:0:c3']), - ]) + @pytest.mark.parametrize( + "requires,build_requires,expected", + [ + ( + {}, + {"platform": [], "gtk": ["1", "2"]}, + [ + "platform:f29:0:c11", + "gtk:2:0:c4", + "gtk:2:0:c5", + "platform:f28:0:c10", + "gtk:1:0:c2", + "gtk:1:0:c3", + ], + ), + ( + {}, + {"platform": [], "gtk": ["1"], "foo": ["1"]}, + [ + "platform:f28:0:c10", + "gtk:1:0:c2", + "gtk:1:0:c3", + "foo:1:0:c2", + "foo:1:0:c3", + "platform:f29:0:c11", + ], + ), + ( + {}, + {"gtk": ["1"], "foo": ["1"], "platform": ["f28"]}, + ["platform:f28:0:c10", "gtk:1:0:c2", "foo:1:0:c2"], + ), + ( + [{}, {}], + [ + {"platform": [], "gtk": ["1"], "foo": ["1"]}, + {"platform": [], "gtk": ["2"], "foo": ["2"]}, + ], + [ + "foo:1:0:c2", + "foo:1:0:c3", + "foo:2:0:c4", + "foo:2:0:c5", + "platform:f28:0:c10", + "platform:f29:0:c11", + "gtk:1:0:c2", + "gtk:1:0:c3", + "gtk:2:0:c4", + "gtk:2:0:c5", + ], + ), + ( + {}, + {"platform": [], "gtk": ["-2"], "foo": ["-2"]}, + [ + "foo:1:0:c2", + "foo:1:0:c3", + "platform:f29:0:c11", + "platform:f28:0:c10", + "gtk:1:0:c2", + "gtk:1:0:c3", + ], + ), + ( + {}, + {"platform": [], "gtk": ["-1", "1"], "foo": ["-2", "1"]}, + [ + "foo:1:0:c2", + "foo:1:0:c3", + "platform:f29:0:c11", + "platform:f28:0:c10", + "gtk:1:0:c2", + "gtk:1:0:c3", + ], + ), + ], + ) def test_get_required_modules_simple(self, requires, build_requires, expected): module_build = make_module("app:1:0:c1", requires, build_requires) self._generate_default_modules() @@ -316,15 +364,28 @@ class TestUtilsModuleStreamExpansion: make_module("lorem:1:1:c2", {"base": ["f29"]}, {}, base_module) make_module("base:f29:0:c3", {"platform": ["f29"]}, {}, base_module) - @pytest.mark.parametrize('requires,build_requires,expected', [ - ({}, {"platform": [], "gtk": ["1"]}, - ['foo:1:1:c2', 'base:f29:0:c3', 'platform:f29:0:c11', - 'bar:1:1:c2', 'gtk:1:1:c2', 'lorem:1:1:c2']), - - ({}, {"platform": [], "foo": ["1"]}, - ['foo:1:1:c2', 'base:f29:0:c3', 'platform:f29:0:c11', - 'bar:1:1:c2', 'lorem:1:1:c2']), - ]) + @pytest.mark.parametrize( + "requires,build_requires,expected", + [ + ( + {}, + {"platform": [], "gtk": ["1"]}, + [ + "foo:1:1:c2", + "base:f29:0:c3", + "platform:f29:0:c11", + "bar:1:1:c2", + "gtk:1:1:c2", + "lorem:1:1:c2", + ], + ), + ( + {}, + {"platform": [], "foo": ["1"]}, + ["foo:1:1:c2", "base:f29:0:c3", "platform:f29:0:c11", "bar:1:1:c2", "lorem:1:1:c2"], + ), + ], + ) def test_get_required_modules_recursion(self, requires, build_requires, expected): module_build = make_module("app:1:0:c1", requires, build_requires) self._generate_default_modules_recursion() @@ -345,10 +406,16 @@ class TestUtilsModuleStreamExpansion: make_module("gtk:1:2:c2", {"platform": ["f29"]}, {}, f290100) make_module("gtk:1:3:c2", {"platform": ["f29"]}, {}, f290200) - @pytest.mark.parametrize('requires,build_requires,expected', [ - ({}, {"platform": ["f29.1.0"], "gtk": ["1"]}, - ['platform:f29.0.0:0:c11', 'gtk:1:0:c2', 'gtk:1:2:c2', 'platform:f29.1.0:0:c11']), - ]) + @pytest.mark.parametrize( + "requires,build_requires,expected", + [ + ( + {}, + {"platform": ["f29.1.0"], "gtk": ["1"]}, + ["platform:f29.0.0:0:c11", "gtk:1:0:c2", "gtk:1:2:c2", "platform:f29.1.0:0:c11"], + ) + ], + ) def test_get_required_modules_stream_versions(self, requires, build_requires, expected): module_build = make_module("app:1:0:c1", requires, build_requires) self._generate_default_modules_modules_multiple_stream_versions() @@ -359,32 +426,32 @@ class TestUtilsModuleStreamExpansion: """Ensure the correct results are returned without duplicates.""" init_data(data_size=1, multiple_stream_versions=True) mmd = module_build_service.utils.load_mmd_file( - os.path.join(base_dir, 'staged_data', 'testmodule_v2.yaml')) + os.path.join(base_dir, "staged_data", "testmodule_v2.yaml")) deps = mmd.get_dependencies() brs = deps[0].get_buildrequires() - brs['platform'].set(['f29.1.0', 'f29.2.0']) + brs["platform"].set(["f29.1.0", "f29.2.0"]) deps[0].set_buildrequires(brs) mmd.set_dependencies(deps) mmds = module_build_service.utils.mse._get_base_module_mmds(mmd) - expected = set(['platform:f29.0.0', 'platform:f29.1.0', 'platform:f29.2.0']) + expected = set(["platform:f29.0.0", "platform:f29.1.0", "platform:f29.2.0"]) # Verify no duplicates were returned before doing set operations assert len(mmds) == len(expected) # Verify the expected ones were returned actual = set() for mmd_ in mmds: - actual.add('{}:{}'.format(mmd_.get_name(), mmd_.get_stream())) + actual.add("{}:{}".format(mmd_.get_name(), mmd_.get_stream())) assert actual == expected - @pytest.mark.parametrize('virtual_streams', (None, ["f29"], ["lp29"])) + @pytest.mark.parametrize("virtual_streams", (None, ["f29"], ["lp29"])) def test__get_base_module_mmds_virtual_streams(self, virtual_streams): """Ensure the correct results are returned without duplicates.""" init_data(data_size=1, multiple_stream_versions=True) mmd = module_build_service.utils.load_mmd_file( - os.path.join(base_dir, 'staged_data', 'testmodule_v2.yaml')) + os.path.join(base_dir, "staged_data", "testmodule_v2.yaml")) deps = mmd.get_dependencies() brs = deps[0].get_buildrequires() - brs['platform'].set(['f29.2.0']) + brs["platform"].set(["f29.2.0"]) deps[0].set_buildrequires(brs) mmd.set_dependencies(deps) @@ -392,14 +459,14 @@ class TestUtilsModuleStreamExpansion: mmds = module_build_service.utils.mse._get_base_module_mmds(mmd) if virtual_streams == ["f29"]: - expected = set(['platform:f29.0.0', 'platform:f29.1.0', 'platform:f29.2.0', - 'platform:lp29.1.1']) + expected = set( + ["platform:f29.0.0", "platform:f29.1.0", "platform:f29.2.0", "platform:lp29.1.1"]) else: - expected = set(['platform:f29.0.0', 'platform:f29.1.0', 'platform:f29.2.0']) + expected = set(["platform:f29.0.0", "platform:f29.1.0", "platform:f29.2.0"]) # Verify no duplicates were returned before doing set operations assert len(mmds) == len(expected) # Verify the expected ones were returned actual = set() for mmd_ in mmds: - actual.add('{}:{}'.format(mmd_.get_name(), mmd_.get_stream())) + actual.add("{}:{}".format(mmd_.get_name(), mmd_.get_stream())) assert actual == expected diff --git a/tests/test_views/test_views.py b/tests/test_views/test_views.py index 1ecb3b6f..b483b81d 100644 --- a/tests/test_views/test_views.py +++ b/tests/test_views/test_views.py @@ -47,16 +47,24 @@ from module_build_service.utils import import_mmd from module_build_service.glib import dict_values, from_variant_dict -user = ('Homer J. Simpson', set(['packager'])) -other_user = ('some_other_user', set(['packager'])) -anonymous_user = ('anonymous', set(['packager'])) -import_module_user = ('Import M. King', set(['mbs-import-module'])) +user = ("Homer J. Simpson", set(["packager"])) +other_user = ("some_other_user", set(["packager"])) +anonymous_user = ("anonymous", set(["packager"])) +import_module_user = ("Import M. King", set(["mbs-import-module"])) base_dir = dirname(dirname(__file__)) class FakeSCM(object): - def __init__(self, mocked_scm, name, mmd_filenames, commit=None, checkout_raise=False, - get_latest_raise=False, branch='master'): + def __init__( + self, + mocked_scm, + name, + mmd_filenames, + commit=None, + checkout_raise=False, + get_latest_raise=False, + branch="master", + ): """ Adds default testing checkout, get_latest and name methods to mocked_scm SCM class. @@ -75,19 +83,19 @@ class FakeSCM(object): self.sourcedir = None if checkout_raise: - self.mocked_scm.return_value.checkout.side_effect = \ - UnprocessableEntity( - "checkout: The requested commit hash was not found within " - "the repository. Perhaps you forgot to push. The original " - "message was: ") + self.mocked_scm.return_value.checkout.side_effect = UnprocessableEntity( + "checkout: The requested commit hash was not found within " + "the repository. Perhaps you forgot to push. The original " + "message was: " + ) else: self.mocked_scm.return_value.checkout = self.checkout self.mocked_scm.return_value.name = self.name self.mocked_scm.return_value.commit = self.commit if get_latest_raise: - self.mocked_scm.return_value.get_latest.side_effect = \ - UnprocessableEntity("Failed to get_latest commit") + self.mocked_scm.return_value.get_latest.side_effect = UnprocessableEntity( + "Failed to get_latest commit") else: self.mocked_scm.return_value.get_latest = self.get_latest self.mocked_scm.return_value.repository_root = "https://src.stg.fedoraproject.org/modules/" @@ -104,15 +112,14 @@ class FakeSCM(object): self.sourcedir = path.join(temp_dir, self.name) mkdir(self.sourcedir) base_dir = path.abspath(path.dirname(__file__)) - copyfile(path.join(base_dir, '..', 'staged_data', mmd_filename), - self.get_module_yaml()) + copyfile(path.join(base_dir, "..", "staged_data", mmd_filename), self.get_module_yaml()) self.checkout_id += 1 return self.sourcedir - def get_latest(self, ref='master'): - return hashlib.sha1(ref.encode('utf-8')).hexdigest()[:10] + def get_latest(self, ref="master"): + return hashlib.sha1(ref.encode("utf-8")).hexdigest()[:10] def get_module_yaml(self): return path.join(self.sourcedir, self.name + ".yaml") @@ -124,151 +131,151 @@ class TestViews: init_data(2) def test_query_build(self): - rv = self.client.get('/module-build-service/1/module-builds/2') + rv = self.client.get("/module-build-service/1/module-builds/2") data = json.loads(rv.data) - assert data['id'] == 2 - assert data['context'] == '00000000' - assert data['name'] == 'nginx' - assert data['owner'] == 'Moe Szyslak' - assert data['scratch'] is False - assert data['srpms'] == [] - assert data['stream'] == '1' - assert data['siblings'] == [] - assert data['state'] == 5 - assert data['state_reason'] is None - assert data['tasks'] == { - 'rpms': { - 'module-build-macros': { - 'task_id': 12312321, - 'state': 1, - 'state_reason': None, - 'nvr': 'module-build-macros-01-1.module+2+b8661ee4', + assert data["id"] == 2 + assert data["context"] == "00000000" + assert data["name"] == "nginx" + assert data["owner"] == "Moe Szyslak" + assert data["scratch"] is False + assert data["srpms"] == [] + assert data["stream"] == "1" + assert data["siblings"] == [] + assert data["state"] == 5 + assert data["state_reason"] is None + assert data["tasks"] == { + "rpms": { + "module-build-macros": { + "task_id": 12312321, + "state": 1, + "state_reason": None, + "nvr": "module-build-macros-01-1.module+2+b8661ee4", }, - 'nginx': { - 'task_id': 12312345, - 'state': 1, - 'state_reason': None, - 'nvr': 'nginx-1.10.1-2.module+2+b8661ee4', + "nginx": { + "task_id": 12312345, + "state": 1, + "state_reason": None, + "nvr": "nginx-1.10.1-2.module+2+b8661ee4", }, - }, + } } - assert data['time_completed'] == '2016-09-03T11:25:32Z' - assert data['time_modified'] == '2016-09-03T11:25:32Z' - assert data['time_submitted'] == '2016-09-03T11:23:20Z' - assert data['rebuild_strategy'] == 'changed-and-after' - assert data['version'] == '2' + assert data["time_completed"] == "2016-09-03T11:25:32Z" + assert data["time_modified"] == "2016-09-03T11:25:32Z" + assert data["time_submitted"] == "2016-09-03T11:23:20Z" + assert data["rebuild_strategy"] == "changed-and-after" + assert data["version"] == "2" - @pytest.mark.parametrize('api_version', [0, 99]) + @pytest.mark.parametrize("api_version", [0, 99]) def test_query_builds_invalid_api_version(self, api_version): - rv = self.client.get('/module-build-service/{0}/module-builds/'.format(api_version)) + rv = self.client.get("/module-build-service/{0}/module-builds/".format(api_version)) data = json.loads(rv.data) - assert data['error'] == 'Not Found' - assert data['message'] == 'The requested API version is not available' - assert data['status'] == 404 + assert data["error"] == "Not Found" + assert data["message"] == "The requested API version is not available" + assert data["status"] == 404 def test_query_build_short(self): - rv = self.client.get('/module-build-service/1/module-builds/2?short=True') + rv = self.client.get("/module-build-service/1/module-builds/2?short=True") data = json.loads(rv.data) - assert data['id'] == 2 - assert data['context'] == '00000000' - assert data['name'] == 'nginx' - assert data['state'] == 5 - assert data['state_name'] == 'ready' - assert data['stream'] == '1' - assert data['version'] == '2' + assert data["id"] == 2 + assert data["context"] == "00000000" + assert data["name"] == "nginx" + assert data["state"] == 5 + assert data["state_name"] == "ready" + assert data["stream"] == "1" + assert data["version"] == "2" def test_query_build_with_verbose_mode(self): - rv = self.client.get('/module-build-service/1/module-builds/2?verbose=true') + rv = self.client.get("/module-build-service/1/module-builds/2?verbose=true") data = json.loads(rv.data) - assert data['base_module_buildrequires'] == [] - assert data['component_builds'] == [1, 2] - assert data['context'] == '00000000' + assert data["base_module_buildrequires"] == [] + assert data["component_builds"] == [1, 2] + assert data["context"] == "00000000" # There is no xmd information on this module, so these values should be None - assert data['build_context'] is None - assert data['runtime_context'] is None - assert data['id'] == 2 + assert data["build_context"] is None + assert data["runtime_context"] is None + assert data["id"] == 2 with open(path.join(base_dir, "staged_data", "nginx_mmd.yaml")) as mmd: - assert data['modulemd'] == to_text_type(mmd.read()) - assert data['name'] == 'nginx' - assert data['owner'] == 'Moe Szyslak' - assert data['rebuild_strategy'] == 'changed-and-after' - assert data['scmurl'] == ('git://pkgs.domain.local/modules/nginx' - '?#ba95886c7a443b36a9ce31abda1f9bef22f2f8c9') - assert data['scratch'] is False - assert data['srpms'] == [] - assert data['siblings'] == [] - assert data['state'] == 5 - assert data['state_name'] == 'ready' - assert data['state_reason'] is None + assert data["modulemd"] == to_text_type(mmd.read()) + assert data["name"] == "nginx" + assert data["owner"] == "Moe Szyslak" + assert data["rebuild_strategy"] == "changed-and-after" + assert data["scmurl"] == \ + "git://pkgs.domain.local/modules/nginx?#ba95886c7a443b36a9ce31abda1f9bef22f2f8c9" + assert data["scratch"] is False + assert data["srpms"] == [] + assert data["siblings"] == [] + assert data["state"] == 5 + assert data["state_name"] == "ready" + assert data["state_reason"] is None # State trace is empty because we directly created these builds and didn't have them # transition, which creates these entries - assert data['state_trace'] == [] - assert data['state_url'] == '/module-build-service/1/module-builds/2' - assert data['stream'] == '1' - assert data['stream_version'] is None - assert data['tasks'] == { - 'rpms': { - 'module-build-macros': { - 'task_id': 12312321, - 'state': 1, - 'state_reason': None, - 'nvr': 'module-build-macros-01-1.module+2+b8661ee4', + assert data["state_trace"] == [] + assert data["state_url"] == "/module-build-service/1/module-builds/2" + assert data["stream"] == "1" + assert data["stream_version"] is None + assert data["tasks"] == { + "rpms": { + "module-build-macros": { + "task_id": 12312321, + "state": 1, + "state_reason": None, + "nvr": "module-build-macros-01-1.module+2+b8661ee4", }, - 'nginx': { - 'task_id': 12312345, - 'state': 1, - 'state_reason': None, - 'nvr': 'nginx-1.10.1-2.module+2+b8661ee4', + "nginx": { + "task_id": 12312345, + "state": 1, + "state_reason": None, + "nvr": "nginx-1.10.1-2.module+2+b8661ee4", }, - }, + } } - assert data['time_completed'] == u'2016-09-03T11:25:32Z' - assert data['time_modified'] == u'2016-09-03T11:25:32Z' - assert data['time_submitted'] == u'2016-09-03T11:23:20Z' - assert data['version'] == '2' - assert data['virtual_streams'] == [] + assert data["time_completed"] == u"2016-09-03T11:25:32Z" + assert data["time_modified"] == u"2016-09-03T11:25:32Z" + assert data["time_submitted"] == u"2016-09-03T11:23:20Z" + assert data["version"] == "2" + assert data["virtual_streams"] == [] def test_query_build_with_br_verbose_mode(self): reuse_component_init_data() - rv = self.client.get('/module-build-service/1/module-builds/2?verbose=true') + rv = self.client.get("/module-build-service/1/module-builds/2?verbose=true") data = json.loads(rv.data) - assert data['base_module_buildrequires'] == [{ - 'context': '00000000', - 'id': 1, - 'name': 'platform', - 'state': 5, - 'state_name': 'ready', - 'stream': 'f28', - 'stream_version': 280000, - 'version': '3' + assert data["base_module_buildrequires"] == [{ + "context": "00000000", + "id": 1, + "name": "platform", + "state": 5, + "state_name": "ready", + "stream": "f28", + "stream_version": 280000, + "version": "3", }] def test_pagination_metadata(self): - rv = self.client.get('/module-build-service/1/module-builds/?per_page=2&page=2') - meta_data = json.loads(rv.data)['meta'] - assert meta_data['prev'].split('?', 1)[1] in ['per_page=2&page=1', 'page=1&per_page=2'] - assert meta_data['next'].split('?', 1)[1] in ['per_page=2&page=3', 'page=3&per_page=2'] - assert meta_data['last'].split('?', 1)[1] in ['per_page=2&page=4', 'page=4&per_page=2'] - assert meta_data['first'].split('?', 1)[1] in ['per_page=2&page=1', 'page=1&per_page=2'] - assert meta_data['total'] == 7 - assert meta_data['per_page'] == 2 - assert meta_data['pages'] == 4 - assert meta_data['page'] == 2 + rv = self.client.get("/module-build-service/1/module-builds/?per_page=2&page=2") + meta_data = json.loads(rv.data)["meta"] + assert meta_data["prev"].split("?", 1)[1] in ["per_page=2&page=1", "page=1&per_page=2"] + assert meta_data["next"].split("?", 1)[1] in ["per_page=2&page=3", "page=3&per_page=2"] + assert meta_data["last"].split("?", 1)[1] in ["per_page=2&page=4", "page=4&per_page=2"] + assert meta_data["first"].split("?", 1)[1] in ["per_page=2&page=1", "page=1&per_page=2"] + assert meta_data["total"] == 7 + assert meta_data["per_page"] == 2 + assert meta_data["pages"] == 4 + assert meta_data["page"] == 2 def test_pagination_metadata_with_args(self): - rv = self.client.get('/module-build-service/1/module-builds/?per_page=2&page=2&order_by=id') - meta_data = json.loads(rv.data)['meta'] - for link in [meta_data['prev'], meta_data['next'], meta_data['last'], meta_data['first']]: - assert 'order_by=id' in link - assert 'per_page=2' in link - assert meta_data['total'] == 7 - assert meta_data['per_page'] == 2 - assert meta_data['pages'] == 4 - assert meta_data['page'] == 2 + rv = self.client.get("/module-build-service/1/module-builds/?per_page=2&page=2&order_by=id") + meta_data = json.loads(rv.data)["meta"] + for link in [meta_data["prev"], meta_data["next"], meta_data["last"], meta_data["first"]]: + assert "order_by=id" in link + assert "per_page=2" in link + assert meta_data["total"] == 7 + assert meta_data["per_page"] == 2 + assert meta_data["pages"] == 4 + assert meta_data["page"] == 2 def test_query_builds(self): - rv = self.client.get('/module-build-service/1/module-builds/?per_page=2') - items = json.loads(rv.data)['items'] + rv = self.client.get("/module-build-service/1/module-builds/?per_page=2") + items = json.loads(rv.data)["items"] expected = [ { "component_builds": [11, 12], @@ -278,8 +285,10 @@ class TestViews: "name": "testmodule", "owner": "some_other_user", "rebuild_strategy": "changed-and-after", - "scmurl": ("git://pkgs.domain.local/modules/testmodule" - "?#ca95886c7a443b36a9ce31abda1f9bef22f2f8c9"), + "scmurl": ( + "git://pkgs.domain.local/modules/testmodule" + "?#ca95886c7a443b36a9ce31abda1f9bef22f2f8c9" + ), "scratch": False, "siblings": [], "srpms": [], @@ -293,14 +302,14 @@ class TestViews: "nvr": "module-build-macros-01-1.module+7+f95651e2", "state": 1, "state_reason": None, - "task_id": 47383994 + "task_id": 47383994, }, "rubygem-rails": { "nvr": "postgresql-9.5.3-4.module+7+f95651e2", "state": 3, "state_reason": None, - "task_id": 2433434 - } + "task_id": 2433434, + }, } }, "time_completed": None, @@ -317,8 +326,10 @@ class TestViews: "name": "postgressql", "owner": "some_user", "rebuild_strategy": "changed-and-after", - "scmurl": ("git://pkgs.domain.local/modules/postgressql" - "?#aa95886c7a443b36a9ce31abda1f9bef22f2f8c9"), + "scmurl": ( + "git://pkgs.domain.local/modules/postgressql" + "?#aa95886c7a443b36a9ce31abda1f9bef22f2f8c9" + ), "scratch": False, "siblings": [], "srpms": [], @@ -332,14 +343,14 @@ class TestViews: "nvr": "module-build-macros-01-1.module+6+fa947d31", "state": 1, "state_reason": None, - "task_id": 47383994 + "task_id": 47383994, }, "postgresql": { "nvr": "postgresql-9.5.3-4.module+6+fa947d31", "state": 1, "state_reason": None, - "task_id": 2433434 - } + "task_id": 2433434, + }, } }, "time_completed": "2016-09-03T11:37:19Z", @@ -347,7 +358,7 @@ class TestViews: "time_submitted": "2016-09-03T12:35:33Z", "version": "3", "buildrequires": {}, - } + }, ] assert items == expected @@ -355,8 +366,8 @@ class TestViews: def test_query_builds_with_context(self): clean_database() init_data(2, contexts=True) - rv = self.client.get('/module-build-service/1/module-builds/?context=3a4057d2') - items = json.loads(rv.data)['items'] + rv = self.client.get("/module-build-service/1/module-builds/?context=3a4057d2") + items = json.loads(rv.data)["items"] expected = [ { "component_builds": [3, 4], @@ -366,8 +377,10 @@ class TestViews: "name": "nginx", "owner": "Moe Szyslak", "rebuild_strategy": "changed-and-after", - "scmurl": ("git://pkgs.domain.local/modules/nginx" - "?#ba95886c7a443b36a9ce31abda1f9bef22f2f8c9"), + "scmurl": ( + "git://pkgs.domain.local/modules/nginx" + "?#ba95886c7a443b36a9ce31abda1f9bef22f2f8c9" + ), "scratch": False, "siblings": [2], "srpms": [], @@ -381,14 +394,14 @@ class TestViews: "nvr": "module-build-macros-01-1.module+4+0557c87d", "state": 1, "state_reason": None, - "task_id": 47383993 + "task_id": 47383993, }, "postgresql": { "nvr": "postgresql-9.5.3-4.module+4+0557c87d", "state": 1, "state_reason": None, - "task_id": 2433433 - } + "task_id": 2433433, + }, } }, "time_completed": "2016-09-03T11:25:32Z", @@ -401,27 +414,27 @@ class TestViews: assert items == expected def test_query_builds_with_id_error(self): - rv = self.client.get('/module-build-service/1/module-builds/?id=1') + rv = self.client.get("/module-build-service/1/module-builds/?id=1") actual = json.loads(rv.data) - msg = ('The "id" query option is invalid. Did you mean to go to ' - '"/module-build-service/1/module-builds/1"?') - expected = { - 'error': 'Bad Request', - 'message': msg, - "status": 400 - } + msg = ( + 'The "id" query option is invalid. Did you mean to go to ' + '"/module-build-service/1/module-builds/1"?' + ) + expected = {"error": "Bad Request", "message": msg, "status": 400} assert actual == expected def test_query_builds_with_nsvc(self): - nsvcs = ["testmodule:4.3.43:7:00000000", - "testmodule:4.3.43:7", - "testmodule:4.3.43", - "testmodule"] + nsvcs = [ + "testmodule:4.3.43:7:00000000", + "testmodule:4.3.43:7", + "testmodule:4.3.43", + "testmodule", + ] results = [] for nsvc in nsvcs: - rv = self.client.get('/module-build-service/1/module-builds/?nsvc=%s&per_page=2' % nsvc) - results.append(json.loads(rv.data)['items']) + rv = self.client.get("/module-build-service/1/module-builds/?nsvc=%s&per_page=2" % nsvc) + results.append(json.loads(rv.data)["items"]) nsvc_keys = ["name", "stream", "version", "context"] @@ -440,23 +453,28 @@ class TestViews: # update database with builds which contain koji tags. reuse_component_init_data() mock_rpm_md = {"build_id": 1065871} - mock_tags = [{"name": "module-testmodule-master-20170219191323-c40c156c"}, - {"name": "module-testmodule-master-20170219191323-c40c156c-build"}, - {"name": "non-module-tag"}, - {"name": "module-testmodule-master-20170109091357-78e4a6fd"}] + mock_tags = [ + {"name": "module-testmodule-master-20170219191323-c40c156c"}, + {"name": "module-testmodule-master-20170219191323-c40c156c-build"}, + {"name": "non-module-tag"}, + {"name": "module-testmodule-master-20170109091357-78e4a6fd"}, + ] mock_session = ClientSession.return_value mock_session.getRPM.return_value = mock_rpm_md mock_session.listTags.return_value = mock_tags - rpm = quote('module-build-macros-0.1-1.testmodule_master_20170303190726.src.rpm') - with patch('koji.read_config', return_value={ - 'authtype': 'kerberos', - 'timeout': 60, - 'server': 'http://koji.example.com/' - }): - rv = self.client.get('/module-build-service/1/module-builds/?rpm=%s' % rpm) - results = json.loads(rv.data)['items'] + rpm = quote("module-build-macros-0.1-1.testmodule_master_20170303190726.src.rpm") + with patch( + "koji.read_config", + return_value={ + "authtype": "kerberos", + "timeout": 60, + "server": "http://koji.example.com/", + }, + ): + rv = self.client.get("/module-build-service/1/module-builds/?rpm=%s" % rpm) + results = json.loads(rv.data)["items"] assert len(results) == 2 assert results[0]["koji_tag"] == "module-testmodule-master-20170219191323-c40c156c" @@ -468,267 +486,274 @@ class TestViews: mock_session.krb_login.assert_not_called() - @patch('module_build_service.config.Config.system', - new_callable=PropertyMock, return_value="invalid_builder") + @patch( + "module_build_service.config.Config.system", + new_callable=PropertyMock, + return_value="invalid_builder", + ) def test_query_builds_with_binary_rpm_not_koji(self, mock_builder): - rpm = quote('module-build-macros-0.1-1.testmodule_master_20170303190726.src.rpm') - rv = self.client.get('/module-build-service/1/module-builds/?rpm=%s' % rpm) + rpm = quote("module-build-macros-0.1-1.testmodule_master_20170303190726.src.rpm") + rv = self.client.get("/module-build-service/1/module-builds/?rpm=%s" % rpm) results = json.loads(rv.data) expected_error = { - 'error': 'Bad Request', - 'message': 'Configured builder does not allow to search by rpm binary name!', - 'status': 400 + "error": "Bad Request", + "message": "Configured builder does not allow to search by rpm binary name!", + "status": 400, } assert rv.status_code == 400 assert results == expected_error def test_query_component_build(self): - rv = self.client.get('/module-build-service/1/component-builds/1') + rv = self.client.get("/module-build-service/1/component-builds/1") data = json.loads(rv.data) - assert data['id'] == 1 - assert data['format'] == 'rpms' - assert data['module_build'] == 2 - assert data['nvr'] == 'nginx-1.10.1-2.module+2+b8661ee4' - assert data['package'] == 'nginx' - assert data['state'] == 1 - assert data['state_name'] == 'COMPLETE' - assert data['state_reason'] is None - assert data['task_id'] == 12312345 + assert data["id"] == 1 + assert data["format"] == "rpms" + assert data["module_build"] == 2 + assert data["nvr"] == "nginx-1.10.1-2.module+2+b8661ee4" + assert data["package"] == "nginx" + assert data["state"] == 1 + assert data["state_name"] == "COMPLETE" + assert data["state_reason"] is None + assert data["task_id"] == 12312345 def test_query_component_build_short(self): - rv = self.client.get('/module-build-service/1/component-builds/1?short=True') + rv = self.client.get("/module-build-service/1/component-builds/1?short=True") data = json.loads(rv.data) - assert data['id'] == 1 - assert data['format'] == 'rpms' - assert data['module_build'] == 2 - assert data['nvr'] == 'nginx-1.10.1-2.module+2+b8661ee4' - assert data['package'] == 'nginx' - assert data['state'] == 1 - assert data['state_name'] == 'COMPLETE' - assert data['state_reason'] is None - assert data['task_id'] == 12312345 + assert data["id"] == 1 + assert data["format"] == "rpms" + assert data["module_build"] == 2 + assert data["nvr"] == "nginx-1.10.1-2.module+2+b8661ee4" + assert data["package"] == "nginx" + assert data["state"] == 1 + assert data["state_name"] == "COMPLETE" + assert data["state_reason"] is None + assert data["task_id"] == 12312345 def test_query_component_build_verbose(self): - rv = self.client.get('/module-build-service/1/component-builds/3?verbose=true') + rv = self.client.get("/module-build-service/1/component-builds/3?verbose=true") data = json.loads(rv.data) - assert data['id'] == 3 - assert data['format'] == 'rpms' - assert data['module_build'] == 3 - assert data['nvr'] == 'postgresql-9.5.3-4.module+3+0557c87d' - assert data['package'] == 'postgresql' - assert data['state'] == 1 - assert data['state_name'] == 'COMPLETE' - assert data['state_reason'] is None - assert data['task_id'] == 2433433 - assert data['state_trace'][0]['reason'] is None - assert data['state_trace'][0]['time'] is not None - assert data['state_trace'][0]['state'] == 1 - assert data['state_trace'][0]['state_name'] == 'wait' - assert data['state_url'], '/module-build-service/1/component-builds/3' + assert data["id"] == 3 + assert data["format"] == "rpms" + assert data["module_build"] == 3 + assert data["nvr"] == "postgresql-9.5.3-4.module+3+0557c87d" + assert data["package"] == "postgresql" + assert data["state"] == 1 + assert data["state_name"] == "COMPLETE" + assert data["state_reason"] is None + assert data["task_id"] == 2433433 + assert data["state_trace"][0]["reason"] is None + assert data["state_trace"][0]["time"] is not None + assert data["state_trace"][0]["state"] == 1 + assert data["state_trace"][0]["state_name"] == "wait" + assert data["state_url"], "/module-build-service/1/component-builds/3" def test_query_component_builds_filter_format(self): - rv = self.client.get('/module-build-service/1/component-builds/' - '?format=rpms') + rv = self.client.get("/module-build-service/1/component-builds/?format=rpms") data = json.loads(rv.data) - assert data['meta']['total'] == 12 + assert data["meta"]["total"] == 12 def test_query_component_builds_filter_ref(self): - rv = self.client.get('/module-build-service/1/component-builds/' - '?ref=this-filter-query-should-return-zero-items') + rv = self.client.get( + "/module-build-service/1/component-builds/" + "?ref=this-filter-query-should-return-zero-items" + ) data = json.loads(rv.data) - assert data['meta']['total'] == 0 + assert data["meta"]["total"] == 0 def test_query_component_builds_filter_tagged(self): - rv = self.client.get('/module-build-service/1/component-builds/?tagged=true') + rv = self.client.get("/module-build-service/1/component-builds/?tagged=true") data = json.loads(rv.data) - assert data['meta']['total'] == 8 + assert data["meta"]["total"] == 8 def test_query_component_builds_filter_nvr(self): - rv = self.client.get('/module-build-service/1/component-builds/?nvr=nginx-1.10.1-2.' - 'module%2B2%2Bb8661ee4') + rv = self.client.get( + "/module-build-service/1/component-builds/?nvr=nginx-1.10.1-2.module%2B2%2Bb8661ee4") data = json.loads(rv.data) - assert data['meta']['total'] == 1 + assert data["meta"]["total"] == 1 def test_query_component_builds_filter_task_id(self): - rv = self.client.get('/module-build-service/1/component-builds/?task_id=12312346') + rv = self.client.get("/module-build-service/1/component-builds/?task_id=12312346") data = json.loads(rv.data) - assert data['meta']['total'] == 1 + assert data["meta"]["total"] == 1 def test_query_component_builds_filter_state(self): - rv = self.client.get('/module-build-service/1/component-builds/?state=3') + rv = self.client.get("/module-build-service/1/component-builds/?state=3") data = json.loads(rv.data) - assert data['meta']['total'] == 2 + assert data["meta"]["total"] == 2 def test_query_component_builds_filter_multiple_states(self): - rv = self.client.get('/module-build-service/1/component-builds/?state=3&state=1') + rv = self.client.get("/module-build-service/1/component-builds/?state=3&state=1") data = json.loads(rv.data) - assert data['meta']['total'] == 12 + assert data["meta"]["total"] == 12 def test_query_builds_filter_name(self): - rv = self.client.get('/module-build-service/1/module-builds/?name=nginx') + rv = self.client.get("/module-build-service/1/module-builds/?name=nginx") data = json.loads(rv.data) - assert data['meta']['total'] == 2 + assert data["meta"]["total"] == 2 def test_query_builds_filter_koji_tag(self): - rv = self.client.get('/module-build-service/1/module-builds/?koji_tag=module-nginx-1.2') + rv = self.client.get("/module-build-service/1/module-builds/?koji_tag=module-nginx-1.2") data = json.loads(rv.data) - assert data['meta']['total'] == 2 + assert data["meta"]["total"] == 2 def test_query_builds_filter_completed_before(self): rv = self.client.get( - '/module-build-service/1/module-builds/?completed_before=2016-09-03T11:30:00Z') + "/module-build-service/1/module-builds/?completed_before=2016-09-03T11:30:00Z") data = json.loads(rv.data) - assert data['meta']['total'] == 2 + assert data["meta"]["total"] == 2 def test_query_builds_filter_completed_after(self): rv = self.client.get( - '/module-build-service/1/module-builds/?completed_after=2016-09-03T11:35:00Z') + "/module-build-service/1/module-builds/?completed_after=2016-09-03T11:35:00Z") data = json.loads(rv.data) - assert data['meta']['total'] == 3 + assert data["meta"]["total"] == 3 def test_query_builds_filter_submitted_before(self): rv = self.client.get( - '/module-build-service/1/module-builds/?submitted_before=2016-09-03T11:35:00Z') + "/module-build-service/1/module-builds/?submitted_before=2016-09-03T11:35:00Z") data = json.loads(rv.data) - assert data['meta']['total'] == 2 + assert data["meta"]["total"] == 2 def test_query_builds_filter_submitted_after(self): rv = self.client.get( - '/module-build-service/1/module-builds/?submitted_after=2016-09-03T11:35:00Z') + "/module-build-service/1/module-builds/?submitted_after=2016-09-03T11:35:00Z") data = json.loads(rv.data) - assert data['meta']['total'] == 5 + assert data["meta"]["total"] == 5 def test_query_builds_filter_modified_before(self): rv = self.client.get( - '/module-build-service/1/module-builds/?modified_before=2016-09-03T11:35:00Z') + "/module-build-service/1/module-builds/?modified_before=2016-09-03T11:35:00Z") data = json.loads(rv.data) - assert data['meta']['total'] == 1 + assert data["meta"]["total"] == 1 def test_query_builds_filter_modified_after(self): rv = self.client.get( - '/module-build-service/1/module-builds/?modified_after=2016-09-03T11:35:00Z') + "/module-build-service/1/module-builds/?modified_after=2016-09-03T11:35:00Z") data = json.loads(rv.data) - assert data['meta']['total'] == 6 + assert data["meta"]["total"] == 6 def test_query_builds_filter_owner(self): - rv = self.client.get( - '/module-build-service/1/module-builds/?owner=Moe%20Szyslak') + rv = self.client.get("/module-build-service/1/module-builds/?owner=Moe%20Szyslak") data = json.loads(rv.data) - assert data['meta']['total'] == 2 + assert data["meta"]["total"] == 2 def test_query_builds_filter_state(self): - rv = self.client.get( - '/module-build-service/1/module-builds/?state=3') + rv = self.client.get("/module-build-service/1/module-builds/?state=3") data = json.loads(rv.data) - assert data['meta']['total'] == 2 + assert data["meta"]["total"] == 2 def test_query_builds_filter_multiple_states(self): - rv = self.client.get( - '/module-build-service/1/module-builds/?state=3&state=1') + rv = self.client.get("/module-build-service/1/module-builds/?state=3&state=1") data = json.loads(rv.data) - assert data['meta']['total'] == 4 + assert data["meta"]["total"] == 4 def test_query_builds_two_filters(self): - rv = self.client.get('/module-build-service/1/module-builds/?owner=Moe%20Szyslak' - '&modified_after=2016-09-03T11:35:00Z') + rv = self.client.get( + "/module-build-service/1/module-builds/?owner=Moe%20Szyslak" + "&modified_after=2016-09-03T11:35:00Z" + ) data = json.loads(rv.data) - assert data['meta']['total'] == 1 + assert data["meta"]["total"] == 1 def test_query_builds_filter_nsv(self): rv = self.client.get( - '/module-build-service/1/module-builds/?name=postgressql&stream=1&version=2') + "/module-build-service/1/module-builds/?name=postgressql&stream=1&version=2") data = json.loads(rv.data) - for item in data['items']: - assert item['name'] == 'postgressql' - assert item['stream'] == '1' - assert item['version'] == '2' - assert data['meta']['total'] == 1 + for item in data["items"]: + assert item["name"] == "postgressql" + assert item["stream"] == "1" + assert item["version"] == "2" + assert data["meta"]["total"] == 1 def test_query_builds_filter_invalid_date(self): rv = self.client.get( - '/module-build-service/1/module-builds/?modified_after=2016-09-03T12:25:00-05:00') + "/module-build-service/1/module-builds/?modified_after=2016-09-03T12:25:00-05:00") data = json.loads(rv.data) - assert data['error'] == 'Bad Request' - assert data['message'] == ('An invalid Zulu ISO 8601 timestamp was ' - 'provided for the \"modified_after\" parameter') - assert data['status'] == 400 + assert data["error"] == "Bad Request" + assert data["message"] == \ + "An invalid Zulu ISO 8601 timestamp was " 'provided for the "modified_after" parameter' + assert data["status"] == 400 - @pytest.mark.parametrize('stream_version_lte', ('280000', '290000', '293000', 'invalid',)) + @pytest.mark.parametrize("stream_version_lte", ("280000", "290000", "293000", "invalid")) def test_query_builds_filter_stream_version_lte(self, stream_version_lte): init_data(data_size=1, multiple_stream_versions=True) - url = ('/module-build-service/1/module-builds/?name=platform&verbose=true' - '&stream_version_lte={}'.format(stream_version_lte)) + url = ( + "/module-build-service/1/module-builds/?name=platform&verbose=true" + "&stream_version_lte={}".format(stream_version_lte) + ) rv = self.client.get(url) data = json.loads(rv.data) - total = data.get('meta', {}).get('total') - if stream_version_lte == 'invalid': + total = data.get("meta", {}).get("total") + if stream_version_lte == "invalid": assert data == { - 'error': 'Bad Request', - 'message': ('An invalid value of stream_version_lte was provided. It must be an ' - 'integer greater than or equal to 10000.'), - 'status': 400 + "error": "Bad Request", + "message": ( + "An invalid value of stream_version_lte was provided. It must be an " + "integer greater than or equal to 10000." + ), + "status": 400, } - elif stream_version_lte == '280000': + elif stream_version_lte == "280000": assert total == 2 - elif stream_version_lte == '290000': + elif stream_version_lte == "290000": assert total == 1 - elif stream_version_lte == '293000': + elif stream_version_lte == "293000": assert total == 3 - @pytest.mark.parametrize('virtual_streams', ([], ('f28',), ('f29',), ('f28', 'f29'))) + @pytest.mark.parametrize("virtual_streams", ([], ("f28",), ("f29",), ("f28", "f29"))) def test_query_builds_filter_virtual_streams(self, virtual_streams): # Populate some platform modules with virtual streams init_data(data_size=1, multiple_stream_versions=True) - url = '/module-build-service/1/module-builds/?name=platform&verbose=true' + url = "/module-build-service/1/module-builds/?name=platform&verbose=true" for virtual_stream in virtual_streams: - url += '&virtual_stream={}'.format(virtual_stream) + url += "&virtual_stream={}".format(virtual_stream) rv = self.client.get(url) data = json.loads(rv.data) - total = data['meta']['total'] - if virtual_streams == ('f28',): + total = data["meta"]["total"] + if virtual_streams == ("f28",): assert total == 1 - for module in data['items']: - assert module['virtual_streams'] == ['f28'] - elif virtual_streams == ('f29',): + for module in data["items"]: + assert module["virtual_streams"] == ["f28"] + elif virtual_streams == ("f29",): assert total == 3 - for module in data['items']: - assert module['virtual_streams'] == ['f29'] - elif virtual_streams == ('f28', 'f29'): + for module in data["items"]: + assert module["virtual_streams"] == ["f29"] + elif virtual_streams == ("f28", "f29"): assert total == 4 - for module in data['items']: - assert len(set(module['virtual_streams']) - set(['f28', 'f29'])) == 0 + for module in data["items"]: + assert len(set(module["virtual_streams"]) - set(["f28", "f29"])) == 0 elif len(virtual_streams) == 0: assert total == 5 def test_query_builds_order_by(self): build = db.session.query(module_build_service.models.ModuleBuild).filter_by(id=2).one() - build.name = 'candy' + build.name = "candy" db.session.add(build) db.session.commit() - rv = self.client.get('/module-build-service/1/module-builds/?' - 'per_page=10&order_by=name') - items = json.loads(rv.data)['items'] - assert items[0]['name'] == 'candy' - assert items[1]['name'] == 'nginx' + rv = self.client.get("/module-build-service/1/module-builds/?per_page=10&order_by=name") + items = json.loads(rv.data)["items"] + assert items[0]["name"] == "candy" + assert items[1]["name"] == "nginx" def test_query_builds_order_by_multiple(self): init_data(data_size=1, multiple_stream_versions=True) platform_f28 = db.session.query(module_build_service.models.ModuleBuild).get(1) - platform_f28.version = '150' + platform_f28.version = "150" db.session.add(platform_f28) db.session.commit() rv = self.client.get( - '/module-build-service/1/module-builds/?order_desc_by=stream_version' - '&order_desc_by=version') - items = json.loads(rv.data)['items'] + "/module-build-service/1/module-builds/?order_desc_by=stream_version" + "&order_desc_by=version" + ) + items = json.loads(rv.data)["items"] expected_ids = [8, 6, 4, 1, 2, 12, 3, 5, 7, 9] - actual_ids = [item['id'] for item in items] + actual_ids = [item["id"] for item in items] assert actual_ids == expected_ids def test_query_builds_order_desc_by(self): - rv = self.client.get('/module-build-service/1/module-builds/?' - 'per_page=10&order_desc_by=id') - items = json.loads(rv.data)['items'] + rv = self.client.get( + "/module-build-service/1/module-builds/?per_page=10&order_desc_by=id") + items = json.loads(rv.data)["items"] # Check that the id is items[0]["id"], items[0]["id"] - 1, ... for idx, item in enumerate(items): assert item["id"] == items[0]["id"] - idx @@ -737,121 +762,128 @@ class TestViews: clean_database() init_data(2, contexts=True) - rv = self.client.get('/module-build-service/1/module-builds/?' - 'per_page=10&name=nginx&order_desc_by=context') - sorted_items = json.loads(rv.data)['items'] - sorted_contexts = [m['context'] for m in sorted_items] + rv = self.client.get( + "/module-build-service/1/module-builds/?per_page=10&name=nginx&order_desc_by=context") + sorted_items = json.loads(rv.data)["items"] + sorted_contexts = [m["context"] for m in sorted_items] - expected_contexts = ['d5a6c0fa', '795e97c1', '3a4057d2', '10e50d06'] + expected_contexts = ["d5a6c0fa", "795e97c1", "3a4057d2", "10e50d06"] assert sorted_contexts == expected_contexts def test_query_builds_order_by_order_desc_by(self): """ Test that when both order_by and order_desc_by are set, an error is returned. """ - rv = self.client.get('/module-build-service/1/module-builds/?' - 'per_page=10&order_desc_by=id&order_by=name') + rv = self.client.get( + "/module-build-service/1/module-builds/?per_page=10&order_desc_by=id&order_by=name") error = json.loads(rv.data) expected = { - 'error': 'Bad Request', - 'message': 'You may not specify both order_by and order_desc_by', - 'status': 400 + "error": "Bad Request", + "message": "You may not specify both order_by and order_desc_by", + "status": 400, } assert error == expected def test_query_builds_order_by_wrong_key(self): - rv = self.client.get('/module-build-service/1/module-builds/?' - 'per_page=10&order_by=unknown') + rv = self.client.get( + "/module-build-service/1/module-builds/?per_page=10&order_by=unknown") error = json.loads(rv.data) expected = { - 'error': 'Bad Request', - 'message': 'An invalid ordering key of "unknown" was supplied', - 'status': 400, + "error": "Bad Request", + "message": 'An invalid ordering key of "unknown" was supplied', + "status": 400, } assert error == expected def test_query_base_module_br_filters(self): reuse_component_init_data() - mmd = load_mmd_file(path.join(base_dir, 'staged_data', 'platform.yaml')) - mmd.set_stream('f30.1.3') + mmd = load_mmd_file(path.join(base_dir, "staged_data", "platform.yaml")) + mmd.set_stream("f30.1.3") import_mmd(db.session, mmd) - platform_f300103 = ModuleBuild.query.filter_by(stream='f30.1.3').one() + platform_f300103 = ModuleBuild.query.filter_by(stream="f30.1.3").one() build = ModuleBuild( - name='testmodule', - stream='master', + name="testmodule", + stream="master", version=20170109091357, state=5, - build_context='dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3', - runtime_context='ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7', - context='7c29193d', - koji_tag='module-testmodule-master-20170109091357-7c29193d', - scmurl='https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79', + build_context="dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3", + runtime_context="ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7", + context="7c29193d", + koji_tag="module-testmodule-master-20170109091357-7c29193d", + scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79", batch=3, - owner='Dr. Pepper', + owner="Dr. Pepper", time_submitted=datetime(2018, 11, 15, 16, 8, 18), time_modified=datetime(2018, 11, 15, 16, 19, 35), - rebuild_strategy='changed-and-after', - modulemd=read_staged_data('testmodule'), + rebuild_strategy="changed-and-after", + modulemd=read_staged_data("testmodule"), ) build.buildrequires.append(platform_f300103) db.session.add(build) db.session.commit() # Query by NSVC rv = self.client.get( - '/module-build-service/1/module-builds/?base_module_br=platform:f28:3:00000000') + "/module-build-service/1/module-builds/?base_module_br=platform:f28:3:00000000") data = json.loads(rv.data) - assert data['meta']['total'] == 2 + assert data["meta"]["total"] == 2 rv = self.client.get( - '/module-build-service/1/module-builds/?base_module_br=platform:f30.1.3:3:00000000') + "/module-build-service/1/module-builds/?base_module_br=platform:f30.1.3:3:00000000") data = json.loads(rv.data) - assert data['meta']['total'] == 1 + assert data["meta"]["total"] == 1 # Query by non-existent NVC rv = self.client.get( - '/module-build-service/1/module-builds/?base_module_br=platform:f12:3:00000000') + "/module-build-service/1/module-builds/?base_module_br=platform:f12:3:00000000") data = json.loads(rv.data) - assert data['meta']['total'] == 0 + assert data["meta"]["total"] == 0 # Query by name and stream - rv = self.client.get('/module-build-service/1/module-builds/?base_module_br_name=platform' - '&base_module_br_stream=f28') + rv = self.client.get( + "/module-build-service/1/module-builds/?base_module_br_name=platform" + "&base_module_br_stream=f28" + ) data = json.loads(rv.data) - assert data['meta']['total'] == 2 + assert data["meta"]["total"] == 2 # Query by stream version - rv = self.client.get('/module-build-service/1/module-builds/?base_module_br_stream_version=' - '280000') + rv = self.client.get( + "/module-build-service/1/module-builds/?base_module_br_stream_version=280000") data = json.loads(rv.data) - assert data['meta']['total'] == 2 + assert data["meta"]["total"] == 2 # Query by lte stream version - rv = self.client.get('/module-build-service/1/module-builds/?base_module_br_stream_version_' - 'lte=290000') + rv = self.client.get( + "/module-build-service/1/module-builds/?base_module_br_stream_version_lte=290000") data = json.loads(rv.data) - assert data['meta']['total'] == 2 + assert data["meta"]["total"] == 2 # Query by lte stream version with no results - rv = self.client.get('/module-build-service/1/module-builds/?base_module_br_stream_version_' - 'lte=270000') + rv = self.client.get( + "/module-build-service/1/module-builds/?base_module_br_stream_version_lte=270000") data = json.loads(rv.data) - assert data['meta']['total'] == 0 + assert data["meta"]["total"] == 0 # Query by gte stream version - rv = self.client.get('/module-build-service/1/module-builds/?base_module_br_stream_version_' - 'gte=270000') + rv = self.client.get( + "/module-build-service/1/module-builds/?base_module_br_stream_version_gte=270000") data = json.loads(rv.data) - assert data['meta']['total'] == 3 + assert data["meta"]["total"] == 3 # Query by gte stream version with no results - rv = self.client.get('/module-build-service/1/module-builds/?base_module_br_stream_version_' - 'gte=320000') + rv = self.client.get( + "/module-build-service/1/module-builds/?base_module_br_stream_version_gte=320000") data = json.loads(rv.data) - assert data['meta']['total'] == 0 + assert data["meta"]["total"] == 0 - @pytest.mark.parametrize('api_version', [1, 2]) - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @pytest.mark.parametrize("api_version", [1, 2]) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build(self, mocked_scm, mocked_get_user, api_version): - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") - post_url = '/module-build-service/{0}/module-builds/'.format(api_version) - rv = self.client.post(post_url, data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49'})) + post_url = "/module-build-service/{0}/module-builds/".format(api_version) + rv = self.client.post( + post_url, + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", + }), + ) data = json.loads(rv.data) if api_version >= 2: @@ -859,933 +891,1137 @@ class TestViews: assert len(data) == 1 data = data[0] - assert 'component_builds' in data, data - assert data['component_builds'] == [] - assert data['name'] == 'testmodule' - assert data['scmurl'] == ('https://src.stg.fedoraproject.org/modules/testmodule.git' - '?#68931c90de214d9d13feefbd35246a81b6cb8d49') - assert data['version'] == '281' - assert data['time_submitted'] is not None - assert data['time_modified'] is not None - assert data['time_completed'] is None - assert data['stream'] == 'master' - assert data['owner'] == 'Homer J. Simpson' - assert data['id'] == 8 - assert data['rebuild_strategy'] == 'changed-and-after' - assert data['state_name'] == 'init' - assert data['state_url'] == '/module-build-service/{0}/module-builds/8'.format(api_version) - assert len(data['state_trace']) == 1 - assert data['state_trace'][0]['state'] == 0 - assert data['tasks'] == {} - assert data['siblings'] == [] - module_build_service.utils.load_mmd(data['modulemd']) + assert "component_builds" in data, data + assert data["component_builds"] == [] + assert data["name"] == "testmodule" + assert data["scmurl"] == ( + "https://src.stg.fedoraproject.org/modules/testmodule.git" + "?#68931c90de214d9d13feefbd35246a81b6cb8d49" + ) + assert data["version"] == "281" + assert data["time_submitted"] is not None + assert data["time_modified"] is not None + assert data["time_completed"] is None + assert data["stream"] == "master" + assert data["owner"] == "Homer J. Simpson" + assert data["id"] == 8 + assert data["rebuild_strategy"] == "changed-and-after" + assert data["state_name"] == "init" + assert data["state_url"] == "/module-build-service/{0}/module-builds/8".format(api_version) + assert len(data["state_trace"]) == 1 + assert data["state_trace"][0]["state"] == 0 + assert data["tasks"] == {} + assert data["siblings"] == [] + module_build_service.utils.load_mmd(data["modulemd"]) # Make sure the buildrequires entry was created module = ModuleBuild.query.get(8) assert len(module.buildrequires) == 1 - assert module.buildrequires[0].name == 'platform' - assert module.buildrequires[0].stream == 'f28' - assert module.buildrequires[0].version == '3' - assert module.buildrequires[0].context == '00000000' + assert module.buildrequires[0].name == "platform" + assert module.buildrequires[0].stream == "f28" + assert module.buildrequires[0].version == "3" + assert module.buildrequires[0].context == "00000000" assert module.buildrequires[0].stream_version == 280000 - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_no_base_module(self, mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, 'testmodule', 'testmodule-no-base-module.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, + "testmodule", + "testmodule-no-base-module.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) - rv = self.client.post('/module-build-service/2/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49'})) + rv = self.client.post( + "/module-build-service/2/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", + }), + ) data = json.loads(rv.data) assert data == { - 'status': 422, - 'message': ('None of the base module (platform) streams in the buildrequires section ' - 'could be found'), - 'error': 'Unprocessable Entity' + "status": 422, + "message": ( + "None of the base module (platform) streams in the buildrequires section " + "could be found" + ), + "error": "Unprocessable Entity", } - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - @patch('module_build_service.config.Config.rebuild_strategy_allow_override', - new_callable=PropertyMock, return_value=True) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + @patch( + "module_build_service.config.Config.rebuild_strategy_allow_override", + new_callable=PropertyMock, + return_value=True, + ) def test_submit_build_rebuild_strategy(self, mocked_rmao, mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'rebuild_strategy': 'only-changed', - 'scmurl': ('https://src.stg.fedoraproject.org/modules/testmodule.git?' - '#68931c90de214d9d13feefbd35246a81b6cb8d49')})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "rebuild_strategy": "only-changed", + "scmurl": ( + "https://src.stg.fedoraproject.org/modules/testmodule.git?" + "#68931c90de214d9d13feefbd35246a81b6cb8d49" + ), + }), + ) data = json.loads(rv.data) - assert data['rebuild_strategy'] == 'only-changed' + assert data["rebuild_strategy"] == "only-changed" - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - @patch('module_build_service.config.Config.rebuild_strategies_allowed', - new_callable=PropertyMock, return_value=['all']) - @patch('module_build_service.config.Config.rebuild_strategy_allow_override', - new_callable=PropertyMock, return_value=True) - def test_submit_build_rebuild_strategy_not_allowed(self, mock_rsao, mock_rsa, mocked_scm, - mocked_get_user): - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + @patch( + "module_build_service.config.Config.rebuild_strategies_allowed", + new_callable=PropertyMock, + return_value=["all"], + ) + @patch( + "module_build_service.config.Config.rebuild_strategy_allow_override", + new_callable=PropertyMock, + return_value=True, + ) + def test_submit_build_rebuild_strategy_not_allowed( + self, mock_rsao, mock_rsa, mocked_scm, mocked_get_user + ): + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'rebuild_strategy': 'only-changed', - 'scmurl': ('https://src.stg.fedoraproject.org/modules/testmodule.git?' - '#68931c90de214d9d13feefbd35246a81b6cb8d49')})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "rebuild_strategy": "only-changed", + "scmurl": ( + "https://src.stg.fedoraproject.org/modules/testmodule.git?" + "#68931c90de214d9d13feefbd35246a81b6cb8d49" + ), + }), + ) data = json.loads(rv.data) assert rv.status_code == 400 expected_error = { - 'error': 'Bad Request', - 'message': ('The rebuild method of "only-changed" is not allowed. Choose from: all.'), - 'status': 400 + "error": "Bad Request", + "message": ('The rebuild method of "only-changed" is not allowed. Choose from: all.'), + "status": 400, } assert data == expected_error - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_dep_not_present(self, mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, 'testmodule', 'testmodule-no-deps.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, + "testmodule", + "testmodule-no-deps.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', - 'scmurl': ('https://src.stg.fedoraproject.org/modules/testmodule.git?' - '#68931c90de214d9d13feefbd35246a81b6cb8d49')})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": ( + "https://src.stg.fedoraproject.org/modules/testmodule.git?" + "#68931c90de214d9d13feefbd35246a81b6cb8d49" + ), + }), + ) data = json.loads(rv.data) assert rv.status_code == 422 expected_error = { - 'error': 'Unprocessable Entity', - 'message': 'Cannot find any module builds for chineese_food:good', - 'status': 422 + "error": "Unprocessable Entity", + "message": "Cannot find any module builds for chineese_food:good", + "status": 422, } assert data == expected_error - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_rebuild_strategy_override_not_allowed(self, mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'rebuild_strategy': 'only-changed', - 'scmurl': ('https://src.stg.fedoraproject.org/modules/testmodule.git?' - '#68931c90de214d9d13feefbd35246a81b6cb8d49')})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "rebuild_strategy": "only-changed", + "scmurl": ( + "https://src.stg.fedoraproject.org/modules/testmodule.git?" + "#68931c90de214d9d13feefbd35246a81b6cb8d49" + ), + }), + ) data = json.loads(rv.data) assert rv.status_code == 400 expected_error = { - 'error': 'Bad Request', - 'message': ('The request contains the "rebuild_strategy" parameter but overriding ' - 'the default isn\'t allowed'), - 'status': 400 + "error": "Bad Request", + "message": ( + 'The request contains the "rebuild_strategy" parameter but overriding ' + "the default isn't allowed" + ), + "status": 400, } assert data == expected_error - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_componentless_build(self, mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, 'fakemodule', 'fakemodule.yaml', - '3da541559918a808c2402bba5012f6c60b27661c') + FakeSCM( + mocked_scm, "fakemodule", "fakemodule.yaml", "3da541559918a808c2402bba5012f6c60b27661c") - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", + }), + ) data = json.loads(rv.data) - assert data['component_builds'] == [] - assert data['name'] == 'fakemodule' - assert data['scmurl'] == ('https://src.stg.fedoraproject.org/modules/testmodule.git' - '?#68931c90de214d9d13feefbd35246a81b6cb8d49') - assert data['version'] == '281' - assert data['time_submitted'] is not None - assert data['time_modified'] is not None - assert data['time_completed'] is None - assert data['stream'] == 'master' - assert data['owner'] == 'Homer J. Simpson' - assert data['id'] == 8 - assert data['state_name'] == 'init' - assert data['rebuild_strategy'] == 'changed-and-after' + assert data["component_builds"] == [] + assert data["name"] == "fakemodule" + assert data["scmurl"] == ( + "https://src.stg.fedoraproject.org/modules/testmodule.git" + "?#68931c90de214d9d13feefbd35246a81b6cb8d49" + ) + assert data["version"] == "281" + assert data["time_submitted"] is not None + assert data["time_modified"] is not None + assert data["time_completed"] is None + assert data["stream"] == "master" + assert data["owner"] == "Homer J. Simpson" + assert data["id"] == 8 + assert data["state_name"] == "init" + assert data["rebuild_strategy"] == "changed-and-after" def test_submit_build_auth_error(self): base_dir = path.abspath(path.dirname(__file__)) client_secrets = path.join(base_dir, "client_secrets.json") - with patch.dict('module_build_service.app.config', {'OIDC_CLIENT_SECRETS': client_secrets}): - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#48931b90de214d9d13feefbd35246a81b6cb8d49'})) + with patch.dict("module_build_service.app.config", {"OIDC_CLIENT_SECRETS": client_secrets}): + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#48931b90de214d9d13feefbd35246a81b6cb8d49", + }), + ) data = json.loads(rv.data) - assert data['message'] == "No 'authorization' header found." - assert data['status'] == 401 - assert data['error'] == 'Unauthorized' + assert data["message"] == "No 'authorization' header found." + assert data["status"] == 401 + assert data["error"] == "Unauthorized" - @patch('module_build_service.auth.get_user', return_value=user) + @patch("module_build_service.auth.get_user", return_value=user) def test_submit_build_scm_url_error(self, mocked_get_user): - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'git://badurl.com'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({"branch": "master", "scmurl": "git://badurl.com"}), + ) data = json.loads(rv.data) - assert data['message'] == 'The submitted scmurl git://badurl.com is not allowed' - assert data['status'] == 403 - assert data['error'] == 'Forbidden' + assert data["message"] == "The submitted scmurl git://badurl.com is not allowed" + assert data["status"] == 403 + assert data["error"] == "Forbidden" - @patch('module_build_service.auth.get_user', return_value=user) + @patch("module_build_service.auth.get_user", return_value=user) def test_submit_build_scm_url_without_hash(self, mocked_get_user): - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/testmodule.git", + }), + ) data = json.loads(rv.data) - assert data['message'] == ('The submitted scmurl https://src.stg.fedoraproject.org' - '/modules/testmodule.git is not valid') - assert data['status'] == 403 - assert data['error'] == 'Forbidden' + assert data["message"] == ( + "The submitted scmurl https://src.stg.fedoraproject.org" + "/modules/testmodule.git is not valid" + ) + assert data["status"] == 403 + assert data["error"] == "Forbidden" - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_bad_modulemd(self, mocked_scm, mocked_get_user): FakeSCM(mocked_scm, "bad", "bad.yaml") - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", + }), + ) data = json.loads(rv.data) - assert re.match(r'The modulemd .* is invalid\. Please verify the syntax is correct', - data['message']) - assert data['status'] == 422 - assert data['error'] == 'Unprocessable Entity' + assert re.match( + r"The modulemd .* is invalid\. Please verify the syntax is correct", + data["message"] + ) + assert data["status"] == 422 + assert data["error"] == "Unprocessable Entity" - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - def test_submit_build_includedmodule_custom_repo_not_allowed(self, - mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, "includedmodules", ["includedmodules.yaml", - "testmodule.yaml"]) - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49'})) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + def test_submit_build_includedmodule_custom_repo_not_allowed(self, mocked_scm, mocked_get_user): + FakeSCM(mocked_scm, "includedmodules", ["includedmodules.yaml", "testmodule.yaml"]) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", + }), + ) data = json.loads(rv.data) - assert data['status'] == 403 - assert data['error'] == 'Forbidden' + assert data["status"] == 403 + assert data["error"] == "Forbidden" - @patch('module_build_service.auth.get_user', return_value=other_user) + @patch("module_build_service.auth.get_user", return_value=other_user) def test_cancel_build(self, mocked_get_user): - rv = self.client.patch('/module-build-service/1/module-builds/7', - data=json.dumps({'state': 'failed'})) + rv = self.client.patch( + "/module-build-service/1/module-builds/7", data=json.dumps({"state": "failed"})) data = json.loads(rv.data) - assert data['state'] == 4 - assert data['state_reason'] == 'Canceled by some_other_user.' + assert data["state"] == 4 + assert data["state_reason"] == "Canceled by some_other_user." - @patch('module_build_service.auth.get_user', return_value=other_user) + @patch("module_build_service.auth.get_user", return_value=other_user) def test_cancel_build_already_failed(self, mocked_get_user): module = ModuleBuild.query.filter_by(id=7).one() module.state = 4 db.session.add(module) db.session.commit() - rv = self.client.patch('/module-build-service/1/module-builds/7', - data=json.dumps({'state': 'failed'})) + rv = self.client.patch( + "/module-build-service/1/module-builds/7", data=json.dumps({"state": "failed"})) data = json.loads(rv.data) - assert data['status'] == 403 - assert data['error'] == 'Forbidden' + assert data["status"] == 403 + assert data["error"] == "Forbidden" - @patch('module_build_service.auth.get_user', return_value=('sammy', set())) + @patch("module_build_service.auth.get_user", return_value=("sammy", set())) def test_cancel_build_unauthorized_no_groups(self, mocked_get_user): - rv = self.client.patch('/module-build-service/1/module-builds/7', - data=json.dumps({'state': 'failed'})) + rv = self.client.patch( + "/module-build-service/1/module-builds/7", data=json.dumps({"state": "failed"})) data = json.loads(rv.data) - assert data['status'] == 403 - assert data['error'] == 'Forbidden' + assert data["status"] == 403 + assert data["error"] == "Forbidden" - @patch('module_build_service.auth.get_user', return_value=('sammy', set(["packager"]))) + @patch("module_build_service.auth.get_user", return_value=("sammy", set(["packager"]))) def test_cancel_build_unauthorized_not_owner(self, mocked_get_user): - rv = self.client.patch('/module-build-service/1/module-builds/7', - data=json.dumps({'state': 'failed'})) + rv = self.client.patch( + "/module-build-service/1/module-builds/7", data=json.dumps({"state": "failed"})) data = json.loads(rv.data) - assert data['status'] == 403 - assert data['error'] == 'Forbidden' + assert data["status"] == 403 + assert data["error"] == "Forbidden" - @patch('module_build_service.auth.get_user', - return_value=('sammy', set(["packager", "mbs-admin"]))) + @patch( + "module_build_service.auth.get_user", return_value=("sammy", set(["packager", "mbs-admin"])) + ) def test_cancel_build_admin(self, mocked_get_user): - with patch("module_build_service.config.Config.admin_groups", - new_callable=PropertyMock, return_value=set(["mbs-admin"])): - rv = self.client.patch('/module-build-service/1/module-builds/7', - data=json.dumps({'state': 'failed'})) + with patch( + "module_build_service.config.Config.admin_groups", + new_callable=PropertyMock, + return_value=set(["mbs-admin"]), + ): + rv = self.client.patch( + "/module-build-service/1/module-builds/7", data=json.dumps({"state": "failed"})) data = json.loads(rv.data) - assert data['state'] == 4 - assert data['state_reason'] == 'Canceled by sammy.' + assert data["state"] == 4 + assert data["state_reason"] == "Canceled by sammy." - @patch('module_build_service.auth.get_user', - return_value=('sammy', set(["packager"]))) + @patch("module_build_service.auth.get_user", return_value=("sammy", set(["packager"]))) def test_cancel_build_no_admin(self, mocked_get_user): - with patch("module_build_service.config.Config.admin_groups", - new_callable=PropertyMock, return_value=set(["mbs-admin"])): - rv = self.client.patch('/module-build-service/1/module-builds/7', - data=json.dumps({'state': 'failed'})) + with patch( + "module_build_service.config.Config.admin_groups", + new_callable=PropertyMock, + return_value=set(["mbs-admin"]), + ): + rv = self.client.patch( + "/module-build-service/1/module-builds/7", data=json.dumps({"state": "failed"})) data = json.loads(rv.data) - assert data['status'] == 403 - assert data['error'] == 'Forbidden' + assert data["status"] == 403 + assert data["error"] == "Forbidden" - @patch('module_build_service.auth.get_user', return_value=other_user) + @patch("module_build_service.auth.get_user", return_value=other_user) def test_cancel_build_wrong_param(self, mocked_get_user): - rv = self.client.patch('/module-build-service/1/module-builds/7', - data=json.dumps({'some_param': 'value'})) + rv = self.client.patch( + "/module-build-service/1/module-builds/7", data=json.dumps({"some_param": "value"})) data = json.loads(rv.data) - assert data['status'] == 400 - assert data['error'] == 'Bad Request' - assert data['message'] == 'Invalid JSON submitted' + assert data["status"] == 400 + assert data["error"] == "Bad Request" + assert data["message"] == "Invalid JSON submitted" - @patch('module_build_service.auth.get_user', return_value=other_user) + @patch("module_build_service.auth.get_user", return_value=other_user) def test_cancel_build_wrong_state(self, mocked_get_user): - rv = self.client.patch('/module-build-service/1/module-builds/7', - data=json.dumps({'state': 'some_state'})) + rv = self.client.patch( + "/module-build-service/1/module-builds/7", data=json.dumps({"state": "some_state"})) data = json.loads(rv.data) - assert data['status'] == 400 - assert data['error'] == 'Bad Request' - assert data['message'] == 'The provided state change is not supported' + assert data["status"] == 400 + assert data["error"] == "Bad Request" + assert data["message"] == "The provided state change is not supported" - @patch('module_build_service.auth.get_user', return_value=user) + @patch("module_build_service.auth.get_user", return_value=user) def test_submit_build_unsupported_scm_scheme(self, mocked_get_user): - scmurl = 'unsupported://example.com/modules/' - 'testmodule.git?#0000000000000000000000000000000000000000' - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': scmurl})) + scmurl = "unsupported://example.com/modules/" + "testmodule.git?#0000000000000000000000000000000000000000" + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({"branch": "master", "scmurl": scmurl}), + ) data = json.loads(rv.data) - assert data['message'] in ("The submitted scmurl {} is not allowed".format(scmurl), - "The submitted scmurl {} is not valid".format(scmurl)) - assert data['status'] == 403 - assert data['error'] == 'Forbidden' + assert data["message"] in ( + "The submitted scmurl {} is not allowed".format(scmurl), + "The submitted scmurl {} is not valid".format(scmurl), + ) + assert data["status"] == 403 + assert data["error"] == "Forbidden" - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_version_set_error(self, mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, 'testmodule', 'testmodule-version-set.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, + "testmodule", + "testmodule-version-set.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", + }), + ) data = json.loads(rv.data) - assert data['status'] == 400 - assert data['message'] == ('The version "123456789" is already defined in the modulemd ' - 'but it shouldn\'t be since the version is generated based on ' - 'the commit time') - assert data['error'] == 'Bad Request' + assert data["status"] == 400 + assert data["message"] == ( + 'The version "123456789" is already defined in the modulemd but it shouldn\'t be since ' + "the version is generated based on the commit time" + ) + assert data["error"] == "Bad Request" - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_wrong_stream(self, mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, 'testmodule', 'testmodule-wrong-stream.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, + "testmodule", + "testmodule-wrong-stream.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", + }), + ) data = json.loads(rv.data) - assert data['status'] == 400 - assert data['message'] == ('The stream "wrong_stream" that is stored in the modulemd ' - 'does not match the branch "master"') - assert data['error'] == 'Bad Request' + assert data["status"] == 400 + assert data["message"] == ( + 'The stream "wrong_stream" that is stored in the modulemd ' + 'does not match the branch "master"' + ) + assert data["error"] == "Bad Request" - @patch('module_build_service.auth.get_user', return_value=user) + @patch("module_build_service.auth.get_user", return_value=user) def test_submit_build_set_owner(self, mocked_get_user): data = { - 'branch': 'master', - 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49', - 'owner': 'foo', + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", + "owner": "foo", } - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps(data)) + rv = self.client.post("/module-build-service/1/module-builds/", data=json.dumps(data)) result = json.loads(rv.data) - assert result['status'] == 400 - assert "The request contains 'owner' parameter" in result['message'] + assert result["status"] == 400 + assert "The request contains 'owner' parameter" in result["message"] - @patch('module_build_service.auth.get_user', return_value=anonymous_user) - @patch('module_build_service.scm.SCM') - @patch("module_build_service.config.Config.no_auth", new_callable=PropertyMock, - return_value=True) + @patch("module_build_service.auth.get_user", return_value=anonymous_user) + @patch("module_build_service.scm.SCM") + @patch( + "module_build_service.config.Config.no_auth", new_callable=PropertyMock, return_value=True + ) def test_submit_build_no_auth_set_owner(self, mocked_conf, mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") data = { - 'branch': 'master', - 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49', - 'owner': 'foo', + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", + "owner": "foo", } - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps(data)) + rv = self.client.post("/module-build-service/1/module-builds/", data=json.dumps(data)) result = json.loads(rv.data) - build = ModuleBuild.query.filter(ModuleBuild.id == result['id']).one() - assert (build.owner == result['owner'] == 'foo') is True + build = ModuleBuild.query.filter(ModuleBuild.id == result["id"]).one() + assert (build.owner == result["owner"] == "foo") is True - @patch('module_build_service.auth.get_user', return_value=('svc_account', set())) - @patch('module_build_service.scm.SCM') - @patch('module_build_service.config.Config.allowed_users', new_callable=PropertyMock) + @patch("module_build_service.auth.get_user", return_value=("svc_account", set())) + @patch("module_build_service.scm.SCM") + @patch("module_build_service.config.Config.allowed_users", new_callable=PropertyMock) def test_submit_build_allowed_users(self, allowed_users, mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") - allowed_users.return_value = {'svc_account'} + allowed_users.return_value = {"svc_account"} data = { - 'branch': 'master', - 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49', + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", } - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps(data)) + rv = self.client.post("/module-build-service/1/module-builds/", data=json.dumps(data)) assert rv.status_code == 201 - @patch('module_build_service.auth.get_user', return_value=anonymous_user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=anonymous_user) + @patch("module_build_service.scm.SCM") @patch("module_build_service.config.Config.no_auth", new_callable=PropertyMock) def test_patch_set_different_owner(self, mocked_no_auth, mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") mocked_no_auth.return_value = True data = { - 'branch': 'master', - 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49', - 'owner': 'foo', + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", + "owner": "foo", } - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps(data)) + rv = self.client.post("/module-build-service/1/module-builds/", data=json.dumps(data)) r1 = json.loads(rv.data) - url = '/module-build-service/1/module-builds/' + str(r1['id']) - r2 = self.client.patch(url, data=json.dumps({'state': 'failed'})) + url = "/module-build-service/1/module-builds/" + str(r1["id"]) + r2 = self.client.patch(url, data=json.dumps({"state": "failed"})) assert r2.status_code == 403 - r3 = self.client.patch(url, data=json.dumps({'state': 'failed', 'owner': 'foo'})) + r3 = self.client.patch(url, data=json.dumps({"state": "failed", "owner": "foo"})) assert r3.status_code == 200 mocked_no_auth.return_value = False - r3 = self.client.patch(url, data=json.dumps({'state': 'failed', 'owner': 'foo'})) + r3 = self.client.patch(url, data=json.dumps({"state": "failed", "owner": "foo"})) assert r3.status_code == 400 - assert "The request contains 'owner' parameter" in json.loads(r3.data)['message'] + assert "The request contains 'owner' parameter" in json.loads(r3.data)["message"] - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_commit_hash_not_found(self, mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '7035bd33614972ac66559ac1fdd019ff6027ad22', checkout_raise=True) + FakeSCM( + mocked_scm, + "testmodule", + "testmodule.yaml", + "7035bd33614972ac66559ac1fdd019ff6027ad22", + checkout_raise=True, + ) - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#7035bd33614972ac66559ac1fdd019ff6027ad22'})) + rv = self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({ + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#7035bd33614972ac66559ac1fdd019ff6027ad22", + }), + ) data = json.loads(rv.data) - assert "The requested commit hash was not found within the repository." in data['message'] - assert "Perhaps you forgot to push. The original message was: " in data['message'] - assert data['status'] == 422 - assert data['error'] == 'Unprocessable Entity' + assert "The requested commit hash was not found within the repository." in data["message"] + assert "Perhaps you forgot to push. The original message was: " in data["message"] + assert data["status"] == 422 + assert data["error"] == "Unprocessable Entity" - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") @patch("module_build_service.config.Config.allow_custom_scmurls", new_callable=PropertyMock) def test_submit_custom_scmurl(self, allow_custom_scmurls, mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") def submit(scmurl): - return self.client.post('/module-build-service/1/module-builds/', data=json.dumps( - {'branch': 'master', 'scmurl': scmurl})) + return self.client.post( + "/module-build-service/1/module-builds/", + data=json.dumps({"branch": "master", "scmurl": scmurl}), + ) allow_custom_scmurls.return_value = False - res1 = submit('git://some.custom.url.org/modules/testmodule.git?#68931c9') + res1 = submit("git://some.custom.url.org/modules/testmodule.git?#68931c9") data = json.loads(res1.data) - assert data['status'] == 403 - assert data['message'].startswith('The submitted scmurl') is True - assert data['message'].endswith('is not allowed') is True + assert data["status"] == 403 + assert data["message"].startswith("The submitted scmurl") is True + assert data["message"].endswith("is not allowed") is True allow_custom_scmurls.return_value = True - res2 = submit('git://some.custom.url.org/modules/testmodule.git?#68931c9') + res2 = submit("git://some.custom.url.org/modules/testmodule.git?#68931c9") assert res2.status_code == 201 - @pytest.mark.parametrize('br_override_streams, req_override_streams', ( - (['f28'], None), - (['f28'], ['f28']), - )) - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @pytest.mark.parametrize( + "br_override_streams, req_override_streams", ((["f28"], None), (["f28"], ["f28"])) + ) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_dep_override( - self, mocked_scm, mocked_get_user, br_override_streams, req_override_streams): + self, mocked_scm, mocked_get_user, br_override_streams, req_override_streams + ): init_data(data_size=1, multiple_stream_versions=True) - FakeSCM(mocked_scm, 'testmodule', 'testmodule_platform_f290000.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, + "testmodule", + "testmodule_platform_f290000.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) - post_url = '/module-build-service/2/module-builds/' - scm_url = ('https://src.stg.fedoraproject.org/modules/testmodule.git?#68931c90de214d9d13fe' - 'efbd35246a81b6cb8d49') - json_input = { - 'branch': 'master', - 'scmurl': scm_url - } + post_url = "/module-build-service/2/module-builds/" + scm_url = ( + "https://src.stg.fedoraproject.org/modules/testmodule.git?#68931c90de214d9d13fe" + "efbd35246a81b6cb8d49" + ) + json_input = {"branch": "master", "scmurl": scm_url} if br_override_streams: - json_input['buildrequire_overrides'] = {'platform': br_override_streams} + json_input["buildrequire_overrides"] = {"platform": br_override_streams} expected_br = set(br_override_streams) else: - expected_br = set(['f29.0.0']) + expected_br = set(["f29.0.0"]) if req_override_streams: - json_input['require_overrides'] = {'platform': req_override_streams} + json_input["require_overrides"] = {"platform": req_override_streams} expected_req = set(req_override_streams) else: - expected_req = set(['f29.0.0']) + expected_req = set(["f29.0.0"]) rv = self.client.post(post_url, data=json.dumps(json_input)) data = json.loads(rv.data) - mmd = module_build_service.utils.load_mmd(data[0]['modulemd']) + mmd = module_build_service.utils.load_mmd(data[0]["modulemd"]) assert len(mmd.get_dependencies()) == 1 dep = mmd.get_dependencies()[0] - assert set(dep.get_buildrequires()['platform'].get()) == expected_br - assert set(dep.get_requires()['platform'].get()) == expected_req + assert set(dep.get_buildrequires()["platform"].get()) == expected_br + assert set(dep.get_requires()["platform"].get()) == expected_req - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_invalid_basemodule_stream(self, mocked_scm, mocked_get_user): # By default tests do not provide platform:f28.0.0, but just platform:f28. # Therefore we want to enable multiple_stream_versions. init_data(2, multiple_stream_versions=True) - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") data = { - 'branch': 'master', - 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49', - 'buildrequire_overrides': {'platform': ['28.0.0']}, - 'require_overrides': {'platform': ['f28.0.0']} + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", + "buildrequire_overrides": {"platform": ["28.0.0"]}, + "require_overrides": {"platform": ["f28.0.0"]}, } - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps(data)) + rv = self.client.post("/module-build-service/1/module-builds/", data=json.dumps(data)) result = json.loads(rv.data) assert result == { - 'error': 'Unprocessable Entity', - 'status': 422, - 'message': ('None of the base module (platform) streams in the buildrequires ' - 'section could be found') + "error": "Unprocessable Entity", + "status": 422, + "message": ( + "None of the base module (platform) streams in the buildrequires " + "section could be found" + ), } assert rv.status_code == 422 - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_with_base_module_name(self, mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, 'platform', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "platform", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") data = { - 'branch': 'master', - 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'platform.git?#68931c90de214d9d13feefbd35246a81b6cb8d49', + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "platform.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", } - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps(data)) + rv = self.client.post("/module-build-service/1/module-builds/", data=json.dumps(data)) result = json.loads(rv.data) assert result == { - 'error': 'Bad Request', - 'status': 400, - 'message': 'You cannot build a module named "platform" since it is a base module' + "error": "Bad Request", + "status": 400, + "message": 'You cannot build a module named "platform" since it is a base module', } assert rv.status_code == 400 - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_with_xmd(self, mocked_scm, mocked_get_user): - FakeSCM(mocked_scm, 'testmodule', 'testmodule-forbidden-xmd.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, + "testmodule", + "testmodule-forbidden-xmd.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) data = { - 'branch': 'master', - 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49', + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", } - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps(data)) + rv = self.client.post("/module-build-service/1/module-builds/", data=json.dumps(data)) result = json.loads(rv.data) assert result == { - 'error': 'Bad Request', - 'status': 400, - 'message': 'The "mbs" xmd field is reserved for MBS' + "error": "Bad Request", + "status": 400, + "message": 'The "mbs" xmd field is reserved for MBS', } assert rv.status_code == 400 - @pytest.mark.parametrize('dep_type', ('buildrequire', 'require')) - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @pytest.mark.parametrize("dep_type", ("buildrequire", "require")) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_override_unused(self, mocked_scm, mocked_get_user, dep_type): - FakeSCM(mocked_scm, 'testmodule', 'testmodule_platform_f290000.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, + "testmodule", + "testmodule_platform_f290000.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) - post_url = '/module-build-service/2/module-builds/' - scm_url = ('https://src.stg.fedoraproject.org/modules/testmodule.git?#68931c90de214d9d13f' - 'eefbd35246a81b6cb8d49') - json_input = { - 'branch': 'master', - 'scmurl': scm_url, - } - json_input[dep_type + '_overrides'] = {'nonexistent': ['23'], 'nonexistent2': ['2']} + post_url = "/module-build-service/2/module-builds/" + scm_url = ( + "https://src.stg.fedoraproject.org/modules/testmodule.git?#68931c90de214d9d13f" + "eefbd35246a81b6cb8d49" + ) + json_input = {"branch": "master", "scmurl": scm_url} + json_input[dep_type + "_overrides"] = {"nonexistent": ["23"], "nonexistent2": ["2"]} rv = self.client.post(post_url, data=json.dumps(json_input)) data = json.loads(rv.data) assert data == { - 'error': 'Bad Request', - 'message': ('The {} overrides for the following modules aren\'t applicable: ' - 'nonexistent, nonexistent2').format(dep_type), - 'status': 400 + "error": "Bad Request", + "message": ( + "The {} overrides for the following modules aren't applicable: " + "nonexistent, nonexistent2" + ).format(dep_type), + "status": 400, } - @pytest.mark.parametrize('optional_params', ( - {'buildrequire_overrides': {'platform': 'f28'}}, - {'buildrequire_overrides': {'platform': 28}}, - {'buildrequire_overrides': 'platform:f28'}, - {'require_overrides': {'platform': 'f28'}}, - {'require_overrides': {'platform': 28}}, - {'require_overrides': 'platform:f28'} - )) - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @pytest.mark.parametrize( + "optional_params", + ( + {"buildrequire_overrides": {"platform": "f28"}}, + {"buildrequire_overrides": {"platform": 28}}, + {"buildrequire_overrides": "platform:f28"}, + {"require_overrides": {"platform": "f28"}}, + {"require_overrides": {"platform": 28}}, + {"require_overrides": "platform:f28"}, + ), + ) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_invalid_override(self, mocked_scm, mocked_get_user, optional_params): - FakeSCM(mocked_scm, 'testmodule', 'testmodule_platform_f290000.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, + "testmodule", + "testmodule_platform_f290000.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) - post_url = '/module-build-service/2/module-builds/' - scm_url = ('https://src.stg.fedoraproject.org/modules/testmodule.git?#68931c90de214d9d13f' - 'eefbd35246a81b6cb8d49') - json_input = { - 'branch': 'master', - 'scmurl': scm_url, - } + post_url = "/module-build-service/2/module-builds/" + scm_url = ( + "https://src.stg.fedoraproject.org/modules/testmodule.git?#" + "68931c90de214d9d13feefbd35246a81b6cb8d49" + ) + json_input = {"branch": "master", "scmurl": scm_url} json_input.update(optional_params) rv = self.client.post(post_url, data=json.dumps(json_input)) data = json.loads(rv.data) - msg = ('The "{}" parameter must be an object with the keys as module names and the values ' - 'as arrays of streams') - if 'buildrequire_overrides' in optional_params: - msg = msg.format('buildrequire_overrides') - elif 'require_overrides' in optional_params: - msg = msg.format('require_overrides') - assert data == { - 'error': 'Bad Request', - 'message': msg, - 'status': 400 - } + msg = ( + 'The "{}" parameter must be an object with the keys as module names and the values ' + "as arrays of streams" + ) + if "buildrequire_overrides" in optional_params: + msg = msg.format("buildrequire_overrides") + elif "require_overrides" in optional_params: + msg = msg.format("require_overrides") + assert data == {"error": "Bad Request", "message": msg, "status": 400} def test_about(self): - with patch.object(mbs_config.Config, 'auth_method', new_callable=PropertyMock) as auth: - auth.return_value = 'kerberos' - rv = self.client.get('/module-build-service/1/about/') + with patch.object(mbs_config.Config, "auth_method", new_callable=PropertyMock) as auth: + auth.return_value = "kerberos" + rv = self.client.get("/module-build-service/1/about/") data = json.loads(rv.data) assert rv.status_code == 200 - assert data == {'auth_method': 'kerberos', 'api_version': 2, 'version': version} + assert data == {"auth_method": "kerberos", "api_version": 2, "version": version} def test_rebuild_strategy_api(self): - rv = self.client.get('/module-build-service/1/rebuild-strategies/') + rv = self.client.get("/module-build-service/1/rebuild-strategies/") data = json.loads(rv.data) assert rv.status_code == 200 expected = { - 'items': [ + "items": [ { - 'allowed': False, - 'default': False, - 'description': 'All components will be rebuilt', - 'name': 'all' + "allowed": False, + "default": False, + "description": "All components will be rebuilt", + "name": "all", }, { - 'allowed': True, - 'default': True, - 'description': ('All components that have changed and those in subsequent ' - 'batches will be rebuilt'), - 'name': 'changed-and-after' + "allowed": True, + "default": True, + "description": ( + "All components that have changed and those in subsequent " + "batches will be rebuilt" + ), + "name": "changed-and-after", }, { - 'allowed': False, - 'default': False, - 'description': 'All changed components will be rebuilt', - 'name': 'only-changed' - } + "allowed": False, + "default": False, + "description": "All changed components will be rebuilt", + "name": "only-changed", + }, ] } assert data == expected def test_rebuild_strategy_api_only_changed_default(self): - with patch.object(mbs_config.Config, 'rebuild_strategy', new_callable=PropertyMock) as r_s: - r_s.return_value = 'only-changed' - rv = self.client.get('/module-build-service/1/rebuild-strategies/') + with patch.object(mbs_config.Config, "rebuild_strategy", new_callable=PropertyMock) as r_s: + r_s.return_value = "only-changed" + rv = self.client.get("/module-build-service/1/rebuild-strategies/") data = json.loads(rv.data) assert rv.status_code == 200 expected = { - 'items': [ + "items": [ { - 'allowed': False, - 'default': False, - 'description': 'All components will be rebuilt', - 'name': 'all' + "allowed": False, + "default": False, + "description": "All components will be rebuilt", + "name": "all", }, { - 'allowed': False, - 'default': False, - 'description': ('All components that have changed and those in subsequent ' - 'batches will be rebuilt'), - 'name': 'changed-and-after' + "allowed": False, + "default": False, + "description": ( + "All components that have changed and those in subsequent " + "batches will be rebuilt" + ), + "name": "changed-and-after", }, { - 'allowed': True, - 'default': True, - 'description': 'All changed components will be rebuilt', - 'name': 'only-changed' - } + "allowed": True, + "default": True, + "description": "All changed components will be rebuilt", + "name": "only-changed", + }, ] } assert data == expected def test_rebuild_strategy_api_override_allowed(self): - with patch.object(mbs_config.Config, 'rebuild_strategy_allow_override', - new_callable=PropertyMock) as rsao: + with patch.object( + mbs_config.Config, "rebuild_strategy_allow_override", new_callable=PropertyMock + ) as rsao: rsao.return_value = True - rv = self.client.get('/module-build-service/1/rebuild-strategies/') + rv = self.client.get("/module-build-service/1/rebuild-strategies/") data = json.loads(rv.data) assert rv.status_code == 200 expected = { - 'items': [ + "items": [ { - 'allowed': True, - 'default': False, - 'description': 'All components will be rebuilt', - 'name': 'all' + "allowed": True, + "default": False, + "description": "All components will be rebuilt", + "name": "all", }, { - 'allowed': True, - 'default': True, - 'description': ('All components that have changed and those in subsequent ' - 'batches will be rebuilt'), - 'name': 'changed-and-after' + "allowed": True, + "default": True, + "description": ( + "All components that have changed and those in subsequent " + "batches will be rebuilt" + ), + "name": "changed-and-after", }, { - 'allowed': True, - 'default': False, - 'description': 'All changed components will be rebuilt', - 'name': 'only-changed' - } + "allowed": True, + "default": False, + "description": "All changed components will be rebuilt", + "name": "only-changed", + }, ] } assert data == expected def test_cors_header_decorator(self): - rv = self.client.get('/module-build-service/1/module-builds/') - assert rv.headers['Access-Control-Allow-Origin'] == '*' + rv = self.client.get("/module-build-service/1/module-builds/") + assert rv.headers["Access-Control-Allow-Origin"] == "*" - @pytest.mark.parametrize('api_version', [1, 2]) - @patch('module_build_service.auth.get_user', return_value=user) - @patch.object(module_build_service.config.Config, 'allowed_groups_to_import_module', - new_callable=PropertyMock, return_value=set()) + @pytest.mark.parametrize("api_version", [1, 2]) + @patch("module_build_service.auth.get_user", return_value=user) + @patch.object( + module_build_service.config.Config, + "allowed_groups_to_import_module", + new_callable=PropertyMock, + return_value=set(), + ) def test_import_build_disabled(self, mocked_groups, mocked_get_user, api_version): - post_url = '/module-build-service/{0}/import-module/'.format(api_version) + post_url = "/module-build-service/{0}/import-module/".format(api_version) rv = self.client.post(post_url) data = json.loads(rv.data) - assert data['error'] == 'Forbidden' - assert data['message'] == ( - 'Import module API is disabled.') + assert data["error"] == "Forbidden" + assert data["message"] == "Import module API is disabled." - @pytest.mark.parametrize('api_version', [1, 2]) - @patch('module_build_service.auth.get_user', return_value=user) + @pytest.mark.parametrize("api_version", [1, 2]) + @patch("module_build_service.auth.get_user", return_value=user) def test_import_build_user_not_allowed(self, mocked_get_user, api_version): - post_url = '/module-build-service/{0}/import-module/'.format(api_version) + post_url = "/module-build-service/{0}/import-module/".format(api_version) rv = self.client.post(post_url) data = json.loads(rv.data) - assert data['error'] == 'Forbidden' - assert data['message'] == ( - 'Homer J. Simpson is not in any of ' - '{0}, only {1}'.format(set(['mbs-import-module']), set(['packager']))) + assert data["error"] == "Forbidden" + assert data["message"] == ( + "Homer J. Simpson is not in any of {0}, only {1}" + .format(set(["mbs-import-module"]), set(["packager"])) + ) - @pytest.mark.parametrize('api_version', [1, 2]) - @patch('module_build_service.auth.get_user', return_value=import_module_user) + @pytest.mark.parametrize("api_version", [1, 2]) + @patch("module_build_service.auth.get_user", return_value=import_module_user) def test_import_build_scm_invalid_json(self, mocked_get_user, api_version): - post_url = '/module-build-service/{0}/import-module/'.format(api_version) - rv = self.client.post(post_url, data='') + post_url = "/module-build-service/{0}/import-module/".format(api_version) + rv = self.client.post(post_url, data="") data = json.loads(rv.data) - assert data['error'] == 'Bad Request' - assert data['message'] == 'Invalid JSON submitted' + assert data["error"] == "Bad Request" + assert data["message"] == "Invalid JSON submitted" - @pytest.mark.parametrize('api_version', [1, 2]) - @patch('module_build_service.auth.get_user', return_value=import_module_user) + @pytest.mark.parametrize("api_version", [1, 2]) + @patch("module_build_service.auth.get_user", return_value=import_module_user) def test_import_build_scm_url_not_allowed(self, mocked_get_user, api_version): - post_url = '/module-build-service/{0}/import-module/'.format(api_version) + post_url = "/module-build-service/{0}/import-module/".format(api_version) rv = self.client.post( - post_url, - data=json.dumps({'scmurl': 'file://' + scm_base_dir + '/mariadb'})) + post_url, data=json.dumps({"scmurl": "file://" + scm_base_dir + "/mariadb"})) data = json.loads(rv.data) - assert data['error'] == 'Forbidden' - assert data['message'].startswith('The submitted scmurl ') - assert data['message'].endswith('/tests/scm_data/mariadb is not allowed') + assert data["error"] == "Forbidden" + assert data["message"].startswith("The submitted scmurl ") + assert data["message"].endswith("/tests/scm_data/mariadb is not allowed") - @pytest.mark.parametrize('api_version', [1, 2]) - @patch('module_build_service.auth.get_user', return_value=import_module_user) - @patch.object(module_build_service.config.Config, 'allow_custom_scmurls', - new_callable=PropertyMock, return_value=True) - def test_import_build_scm_url_not_in_list(self, mocked_scmurls, mocked_get_user, - api_version): - post_url = '/module-build-service/{0}/import-module/'.format(api_version) + @pytest.mark.parametrize("api_version", [1, 2]) + @patch("module_build_service.auth.get_user", return_value=import_module_user) + @patch.object( + module_build_service.config.Config, + "allow_custom_scmurls", + new_callable=PropertyMock, + return_value=True, + ) + def test_import_build_scm_url_not_in_list(self, mocked_scmurls, mocked_get_user, api_version): + post_url = "/module-build-service/{0}/import-module/".format(api_version) rv = self.client.post( post_url, - data=json.dumps({'scmurl': 'file://' + scm_base_dir + ( - '/mariadb?#b17bea85de2d03558f24d506578abcfcf467e5bc')})) + data=json.dumps({ + "scmurl": "file://{}/mariadb?#b17bea85de2d03558f24d506578abcfcf467e5bc".format( + scm_base_dir) + }), + ) data = json.loads(rv.data) - assert data['error'] == 'Forbidden' - assert data['message'].endswith( - '/tests/scm_data/mariadb?#b17bea85de2d03558f24d506578abcfcf467e5bc ' - 'is not in the list of allowed SCMs') + assert data["error"] == "Forbidden" + assert data["message"].endswith( + "/tests/scm_data/mariadb?#b17bea85de2d03558f24d506578abcfcf467e5bc " + "is not in the list of allowed SCMs" + ) - @pytest.mark.parametrize('api_version', [1, 2]) - @patch('module_build_service.auth.get_user', return_value=import_module_user) - @patch.object(module_build_service.config.Config, 'scmurls', - new_callable=PropertyMock, return_value=['file://']) + @pytest.mark.parametrize("api_version", [1, 2]) + @patch("module_build_service.auth.get_user", return_value=import_module_user) + @patch.object( + module_build_service.config.Config, + "scmurls", + new_callable=PropertyMock, + return_value=["file://"], + ) def test_import_build_scm(self, mocked_scmurls, mocked_get_user, api_version): - post_url = '/module-build-service/{0}/import-module/'.format(api_version) + post_url = "/module-build-service/{0}/import-module/".format(api_version) rv = self.client.post( post_url, - data=json.dumps({'scmurl': 'file://' + scm_base_dir + ( - '/mariadb?#e9742ed681f82e3ef5281fc652b4e68a3826cea6')})) + data=json.dumps({ + "scmurl": "file://{}/mariadb?#e9742ed681f82e3ef5281fc652b4e68a3826cea6".format( + scm_base_dir) + }), + ) data = json.loads(rv.data) - assert 'Module mariadb:10.2:20180724000000:00000000 imported' in data['messages'] - assert data['module']['name'] == 'mariadb' - assert data['module']['stream'] == '10.2' - assert data['module']['version'] == '20180724000000' - assert data['module']['context'] == '00000000' - assert data['module']['owner'] == 'mbs_import' - assert data['module']['state'] == 5 - assert data['module']['state_reason'] is None - assert data['module']['state_name'] == 'ready' - assert data['module']['scmurl'] is None - assert data['module']['component_builds'] == [] - assert data['module']['time_submitted'] == data['module']['time_modified'] == \ - data['module']['time_completed'] - assert data['module']['koji_tag'] == 'mariadb-10.2-20180724000000-00000000' - assert data['module']['siblings'] == [] - assert data['module']['rebuild_strategy'] == 'all' + assert "Module mariadb:10.2:20180724000000:00000000 imported" in data["messages"] + assert data["module"]["name"] == "mariadb" + assert data["module"]["stream"] == "10.2" + assert data["module"]["version"] == "20180724000000" + assert data["module"]["context"] == "00000000" + assert data["module"]["owner"] == "mbs_import" + assert data["module"]["state"] == 5 + assert data["module"]["state_reason"] is None + assert data["module"]["state_name"] == "ready" + assert data["module"]["scmurl"] is None + assert data["module"]["component_builds"] == [] + assert ( + data["module"]["time_submitted"] + == data["module"]["time_modified"] + == data["module"]["time_completed"] + ) + assert data["module"]["koji_tag"] == "mariadb-10.2-20180724000000-00000000" + assert data["module"]["siblings"] == [] + assert data["module"]["rebuild_strategy"] == "all" - @pytest.mark.parametrize('api_version', [1, 2]) - @patch('module_build_service.auth.get_user', return_value=import_module_user) - @patch.object(module_build_service.config.Config, 'scmurls', - new_callable=PropertyMock, return_value=['file://']) - def test_import_build_scm_another_commit_hash(self, mocked_scmurls, mocked_get_user, - api_version): - post_url = '/module-build-service/{0}/import-module/'.format(api_version) + @pytest.mark.parametrize("api_version", [1, 2]) + @patch("module_build_service.auth.get_user", return_value=import_module_user) + @patch.object( + module_build_service.config.Config, + "scmurls", + new_callable=PropertyMock, + return_value=["file://"], + ) + def test_import_build_scm_another_commit_hash( + self, mocked_scmurls, mocked_get_user, api_version + ): + post_url = "/module-build-service/{0}/import-module/".format(api_version) rv = self.client.post( post_url, - data=json.dumps({'scmurl': 'file://' + scm_base_dir + ( - '/mariadb?#8b43f38cdafdd773e7d0308e758105bf9f0f67a8')})) + data=json.dumps({ + "scmurl": "file://{}/mariadb?#8b43f38cdafdd773e7d0308e758105bf9f0f67a8".format( + scm_base_dir) + }), + ) data = json.loads(rv.data) - assert 'Module mariadb:10.2:20180724065109:00000000 imported' in data['messages'] - assert data['module']['name'] == 'mariadb' - assert data['module']['stream'] == '10.2' - assert data['module']['version'] == '20180724065109' - assert data['module']['context'] == '00000000' - assert data['module']['owner'] == 'mbs_import' - assert data['module']['state'] == 5 - assert data['module']['state_reason'] is None - assert data['module']['state_name'] == 'ready' - assert data['module']['scmurl'] is None - assert data['module']['component_builds'] == [] - assert data['module']['time_submitted'] == data['module']['time_modified'] == \ - data['module']['time_completed'] - assert data['module']['koji_tag'] == 'mariadb-10.2-20180724065109-00000000' - assert data['module']['siblings'] == [] - assert data['module']['rebuild_strategy'] == 'all' + assert "Module mariadb:10.2:20180724065109:00000000 imported" in data["messages"] + assert data["module"]["name"] == "mariadb" + assert data["module"]["stream"] == "10.2" + assert data["module"]["version"] == "20180724065109" + assert data["module"]["context"] == "00000000" + assert data["module"]["owner"] == "mbs_import" + assert data["module"]["state"] == 5 + assert data["module"]["state_reason"] is None + assert data["module"]["state_name"] == "ready" + assert data["module"]["scmurl"] is None + assert data["module"]["component_builds"] == [] + assert ( + data["module"]["time_submitted"] + == data["module"]["time_modified"] + == data["module"]["time_completed"] + ) + assert data["module"]["koji_tag"] == "mariadb-10.2-20180724065109-00000000" + assert data["module"]["siblings"] == [] + assert data["module"]["rebuild_strategy"] == "all" - @pytest.mark.parametrize('api_version', [1, 2]) - @patch('module_build_service.auth.get_user', return_value=import_module_user) - @patch.object(module_build_service.config.Config, 'scmurls', - new_callable=PropertyMock, return_value=['file://']) - def test_import_build_scm_incomplete_nsvc(self, mocked_scmurls, mocked_get_user, - api_version): - post_url = '/module-build-service/{0}/import-module/'.format(api_version) + @pytest.mark.parametrize("api_version", [1, 2]) + @patch("module_build_service.auth.get_user", return_value=import_module_user) + @patch.object( + module_build_service.config.Config, + "scmurls", + new_callable=PropertyMock, + return_value=["file://"], + ) + def test_import_build_scm_incomplete_nsvc(self, mocked_scmurls, mocked_get_user, api_version): + post_url = "/module-build-service/{0}/import-module/".format(api_version) rv = self.client.post( post_url, - data=json.dumps({'scmurl': 'file://' + scm_base_dir + ( - '/mariadb?#b17bea85de2d03558f24d506578abcfcf467e5bc')})) + data=json.dumps({ + "scmurl": "file://{}/mariadb?#b17bea85de2d03558f24d506578abcfcf467e5bc".format( + scm_base_dir) + }), + ) data = json.loads(rv.data) - assert data['error'] == 'Unprocessable Entity' - assert data['message'] == 'Incomplete NSVC: None:None:0:00000000' + assert data["error"] == "Unprocessable Entity" + assert data["message"] == "Incomplete NSVC: None:None:0:00000000" - @pytest.mark.parametrize('api_version', [1, 2]) - @patch('module_build_service.auth.get_user', return_value=import_module_user) - @patch.object(module_build_service.config.Config, 'scmurls', - new_callable=PropertyMock, return_value=['file://']) - def test_import_build_scm_yaml_is_bad(self, mocked_scmurls, mocked_get_user, - api_version): - post_url = '/module-build-service/{0}/import-module/'.format(api_version) + @pytest.mark.parametrize("api_version", [1, 2]) + @patch("module_build_service.auth.get_user", return_value=import_module_user) + @patch.object( + module_build_service.config.Config, + "scmurls", + new_callable=PropertyMock, + return_value=["file://"], + ) + def test_import_build_scm_yaml_is_bad(self, mocked_scmurls, mocked_get_user, api_version): + post_url = "/module-build-service/{0}/import-module/".format(api_version) rv = self.client.post( post_url, - data=json.dumps({'scmurl': 'file://' + scm_base_dir + ( - '/mariadb?#f7c5c7218c9a197d7fd245eeb4eee3d7abffd75d')})) + data=json.dumps({ + "scmurl": "file://{}/mariadb?#f7c5c7218c9a197d7fd245eeb4eee3d7abffd75d".format( + scm_base_dir) + }), + ) data = json.loads(rv.data) - assert data['error'] == 'Unprocessable Entity' - assert re.match(r'The modulemd .* is invalid\. Please verify the syntax is correct', - data['message']) + assert data["error"] == "Unprocessable Entity" + assert re.match( + r"The modulemd .* is invalid\. Please verify the syntax is correct", data["message"] + ) def test_buildrequires_is_included_in_json_output(self): # Inject xmd/mbs/buildrequires into an existing module build for # assertion later. from module_build_service.models import make_session from module_build_service import conf - br_modulea = dict(stream='6', version='1', context='1234') - br_moduleb = dict(stream='10', version='1', context='5678') + + br_modulea = dict(stream="6", version="1", context="1234") + br_moduleb = dict(stream="10", version="1", context="5678") with make_session(conf) as session: build = ModuleBuild.query.first() mmd = build.mmd() xmd = from_variant_dict(mmd.get_xmd()) - mbs = xmd.setdefault('mbs', {}) - buildrequires = mbs.setdefault('buildrequires', {}) - buildrequires['modulea'] = br_modulea - buildrequires['moduleb'] = br_moduleb + mbs = xmd.setdefault("mbs", {}) + buildrequires = mbs.setdefault("buildrequires", {}) + buildrequires["modulea"] = br_modulea + buildrequires["moduleb"] = br_moduleb mmd.set_xmd(dict_values(xmd)) build.modulemd = to_text_type(mmd.dumps()) session.commit() - rv = self.client.get('/module-build-service/1/module-builds/{}'.format(build.id)) + rv = self.client.get("/module-build-service/1/module-builds/{}".format(build.id)) data = json.loads(rv.data) - buildrequires = data.get('buildrequires', {}) + buildrequires = data.get("buildrequires", {}) - assert br_modulea == buildrequires.get('modulea') - assert br_moduleb == buildrequires.get('moduleb') + assert br_modulea == buildrequires.get("modulea") + assert br_moduleb == buildrequires.get("moduleb") - @pytest.mark.parametrize('api_version', [1, 2]) - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - @patch('module_build_service.config.Config.modules_allow_scratch', - new_callable=PropertyMock, return_value=True) + @pytest.mark.parametrize("api_version", [1, 2]) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + @patch( + "module_build_service.config.Config.modules_allow_scratch", + new_callable=PropertyMock, + return_value=True, + ) def test_submit_scratch_build( - self, mocked_allow_scratch, mocked_scm, mocked_get_user, api_version): - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + self, mocked_allow_scratch, mocked_scm, mocked_get_user, api_version + ): + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") - post_url = '/module-build-service/{0}/module-builds/'.format(api_version) + post_url = "/module-build-service/{0}/module-builds/".format(api_version) post_data = { - 'branch': 'master', - 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49', - 'scratch': True, + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", + "scratch": True, } rv = self.client.post(post_url, data=json.dumps(post_data)) data = json.loads(rv.data) @@ -1795,83 +2031,96 @@ class TestViews: assert len(data) == 1 data = data[0] - assert 'component_builds' in data, data - assert data['component_builds'] == [] - assert data['name'] == 'testmodule' - assert data['scmurl'] == ('https://src.stg.fedoraproject.org/modules/testmodule.git' - '?#68931c90de214d9d13feefbd35246a81b6cb8d49') - assert data['scratch'] is True - assert data['version'] == '281' - assert data['time_submitted'] is not None - assert data['time_modified'] is not None - assert data['time_completed'] is None - assert data['stream'] == 'master' - assert data['owner'] == 'Homer J. Simpson' - assert data['id'] == 8 - assert data['rebuild_strategy'] == 'changed-and-after' - assert data['state_name'] == 'init' - assert data['state_url'] == '/module-build-service/{0}/module-builds/8'.format(api_version) - assert len(data['state_trace']) == 1 - assert data['state_trace'][0]['state'] == 0 - assert data['tasks'] == {} - assert data['siblings'] == [] - module_build_service.utils.load_mmd(data['modulemd']) + assert "component_builds" in data, data + assert data["component_builds"] == [] + assert data["name"] == "testmodule" + assert data["scmurl"] == ( + "https://src.stg.fedoraproject.org/modules/testmodule.git" + "?#68931c90de214d9d13feefbd35246a81b6cb8d49" + ) + assert data["scratch"] is True + assert data["version"] == "281" + assert data["time_submitted"] is not None + assert data["time_modified"] is not None + assert data["time_completed"] is None + assert data["stream"] == "master" + assert data["owner"] == "Homer J. Simpson" + assert data["id"] == 8 + assert data["rebuild_strategy"] == "changed-and-after" + assert data["state_name"] == "init" + assert data["state_url"] == "/module-build-service/{0}/module-builds/8".format(api_version) + assert len(data["state_trace"]) == 1 + assert data["state_trace"][0]["state"] == 0 + assert data["tasks"] == {} + assert data["siblings"] == [] + module_build_service.utils.load_mmd(data["modulemd"]) # Make sure the buildrequires entry was created module = ModuleBuild.query.get(8) assert len(module.buildrequires) == 1 - assert module.buildrequires[0].name == 'platform' - assert module.buildrequires[0].stream == 'f28' - assert module.buildrequires[0].version == '3' - assert module.buildrequires[0].context == '00000000' + assert module.buildrequires[0].name == "platform" + assert module.buildrequires[0].stream == "f28" + assert module.buildrequires[0].version == "3" + assert module.buildrequires[0].context == "00000000" assert module.buildrequires[0].stream_version == 280000 - @pytest.mark.parametrize('api_version', [1, 2]) - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - @patch('module_build_service.config.Config.modules_allow_scratch', - new_callable=PropertyMock, return_value=False) + @pytest.mark.parametrize("api_version", [1, 2]) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + @patch( + "module_build_service.config.Config.modules_allow_scratch", + new_callable=PropertyMock, + return_value=False, + ) def test_submit_scratch_build_not_allowed( - self, mocked_allow_scratch, mocked_scm, mocked_get_user, api_version): - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + self, mocked_allow_scratch, mocked_scm, mocked_get_user, api_version + ): + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") - post_url = '/module-build-service/{0}/module-builds/'.format(api_version) + post_url = "/module-build-service/{0}/module-builds/".format(api_version) post_data = { - 'branch': 'master', - 'scmurl': 'https://src.stg.fedoraproject.org/modules/' - 'testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49', - 'scratch': True, + "branch": "master", + "scmurl": "https://src.stg.fedoraproject.org/modules/" + "testmodule.git?#68931c90de214d9d13feefbd35246a81b6cb8d49", + "scratch": True, } rv = self.client.post(post_url, data=json.dumps(post_data)) data = json.loads(rv.data) expected_error = { - 'error': 'Forbidden', - 'message': 'Scratch builds are not enabled', - 'status': 403 + "error": "Forbidden", + "message": "Scratch builds are not enabled", + "status": 403, } assert data == expected_error - assert rv.status_code == expected_error['status'] + assert rv.status_code == expected_error["status"] - @pytest.mark.parametrize('api_version', [1, 2]) - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.config.Config.modules_allow_scratch', - new_callable=PropertyMock, return_value=True) - @patch('module_build_service.config.Config.yaml_submit_allowed', - new_callable=PropertyMock, return_value=True) + @pytest.mark.parametrize("api_version", [1, 2]) + @patch("module_build_service.auth.get_user", return_value=user) + @patch( + "module_build_service.config.Config.modules_allow_scratch", + new_callable=PropertyMock, + return_value=True, + ) + @patch( + "module_build_service.config.Config.yaml_submit_allowed", + new_callable=PropertyMock, + return_value=True, + ) def test_submit_scratch_build_with_mmd( - self, mocked_allow_yaml, mocked_allow_scratch, mocked_get_user, api_version): + self, mocked_allow_yaml, mocked_allow_scratch, mocked_get_user, api_version + ): base_dir = path.abspath(path.dirname(__file__)) - mmd_path = path.join(base_dir, '..', 'staged_data', 'testmodule.yaml') - post_url = '/module-build-service/{0}/module-builds/'.format(api_version) - with open(mmd_path, 'rb') as f: - modulemd = f.read().decode('utf-8') + mmd_path = path.join(base_dir, "..", "staged_data", "testmodule.yaml") + post_url = "/module-build-service/{0}/module-builds/".format(api_version) + with open(mmd_path, "rb") as f: + modulemd = f.read().decode("utf-8") post_data = { - 'branch': 'master', - 'scratch': True, - 'modulemd': modulemd, - 'module_name': str(splitext(basename(mmd_path))[0]), + "branch": "master", + "scratch": True, + "modulemd": modulemd, + "module_name": str(splitext(basename(mmd_path))[0]), } rv = self.client.post(post_url, data=json.dumps(post_data)) data = json.loads(rv.data) @@ -1881,87 +2130,99 @@ class TestViews: assert len(data) == 1 data = data[0] - assert 'component_builds' in data, data - assert data['component_builds'] == [] - assert data['name'] == 'testmodule' - assert data['scmurl'] is None + assert "component_builds" in data, data + assert data["component_builds"] == [] + assert data["name"] == "testmodule" + assert data["scmurl"] is None # generated modulemd is nondeterministic, so just make sure it is set - assert data['modulemd'] is not None - assert data['scratch'] is True + assert data["modulemd"] is not None + assert data["scratch"] is True # generated version is nondeterministic, so just make sure it is set - assert data['version'] is not None - assert data['time_submitted'] is not None - assert data['time_modified'] is not None - assert data['time_completed'] is None - assert data['stream'] == 'master' - assert data['owner'] == 'Homer J. Simpson' - assert data['id'] == 8 - assert data['rebuild_strategy'] == 'changed-and-after' - assert data['state_name'] == 'init' - assert data['state_url'] == '/module-build-service/{0}/module-builds/8'.format(api_version) - assert len(data['state_trace']) == 1 - assert data['state_trace'][0]['state'] == 0 - assert data['tasks'] == {} - assert data['siblings'] == [] - module_build_service.utils.load_mmd(data['modulemd']) + assert data["version"] is not None + assert data["time_submitted"] is not None + assert data["time_modified"] is not None + assert data["time_completed"] is None + assert data["stream"] == "master" + assert data["owner"] == "Homer J. Simpson" + assert data["id"] == 8 + assert data["rebuild_strategy"] == "changed-and-after" + assert data["state_name"] == "init" + assert data["state_url"] == "/module-build-service/{0}/module-builds/8".format(api_version) + assert len(data["state_trace"]) == 1 + assert data["state_trace"][0]["state"] == 0 + assert data["tasks"] == {} + assert data["siblings"] == [] + module_build_service.utils.load_mmd(data["modulemd"]) # Make sure the buildrequires entry was created module = ModuleBuild.query.get(8) assert len(module.buildrequires) == 1 - assert module.buildrequires[0].name == 'platform' - assert module.buildrequires[0].stream == 'f28' - assert module.buildrequires[0].version == '3' - assert module.buildrequires[0].context == '00000000' + assert module.buildrequires[0].name == "platform" + assert module.buildrequires[0].stream == "f28" + assert module.buildrequires[0].version == "3" + assert module.buildrequires[0].context == "00000000" assert module.buildrequires[0].stream_version == 280000 - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.config.Config.modules_allow_scratch', - new_callable=PropertyMock, return_value=True) - @patch('module_build_service.config.Config.yaml_submit_allowed', - new_callable=PropertyMock, return_value=True) + @patch("module_build_service.auth.get_user", return_value=user) + @patch( + "module_build_service.config.Config.modules_allow_scratch", + new_callable=PropertyMock, + return_value=True, + ) + @patch( + "module_build_service.config.Config.yaml_submit_allowed", + new_callable=PropertyMock, + return_value=True, + ) def test_submit_scratch_build_with_mmd_no_module_name( - self, mocked_allow_yaml, mocked_allow_scratch, mocked_get_user): + self, mocked_allow_yaml, mocked_allow_scratch, mocked_get_user + ): base_dir = path.abspath(path.dirname(__file__)) - mmd_path = path.join(base_dir, '..', 'staged_data', 'testmodule.yaml') - post_url = '/module-build-service/1/module-builds/' - with open(mmd_path, 'rb') as f: - modulemd = f.read().decode('utf-8') + mmd_path = path.join(base_dir, "..", "staged_data", "testmodule.yaml") + post_url = "/module-build-service/1/module-builds/" + with open(mmd_path, "rb") as f: + modulemd = f.read().decode("utf-8") - post_data = { - 'branch': 'master', - 'scratch': True, - 'modulemd': modulemd, - } + post_data = {"branch": "master", "scratch": True, "modulemd": modulemd} rv = self.client.post(post_url, data=json.dumps(post_data)) assert rv.status_code == 400 data = json.loads(rv.data) expected_error = { - 'error': 'Bad Request', - 'message': ('The module\'s name was not present in the modulemd file. Please use the ' - '"module_name" parameter'), - 'status': 400 + "error": "Bad Request", + "message": ( + "The module's name was not present in the modulemd file. Please use the " + '"module_name" parameter' + ), + "status": 400, } assert data == expected_error - @pytest.mark.parametrize('api_version', [1, 2]) - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.config.Config.modules_allow_scratch', - new_callable=PropertyMock, return_value=True) - @patch('module_build_service.config.Config.yaml_submit_allowed', - new_callable=PropertyMock, return_value=False) + @pytest.mark.parametrize("api_version", [1, 2]) + @patch("module_build_service.auth.get_user", return_value=user) + @patch( + "module_build_service.config.Config.modules_allow_scratch", + new_callable=PropertyMock, + return_value=True, + ) + @patch( + "module_build_service.config.Config.yaml_submit_allowed", + new_callable=PropertyMock, + return_value=False, + ) def test_submit_scratch_build_with_mmd_yaml_not_allowed( - self, mocked_allow_yaml, mocked_allow_scratch, mocked_get_user, api_version): + self, mocked_allow_yaml, mocked_allow_scratch, mocked_get_user, api_version + ): base_dir = path.abspath(path.dirname(__file__)) - mmd_path = path.join(base_dir, '..', 'staged_data', 'testmodule.yaml') - post_url = '/module-build-service/{0}/module-builds/'.format(api_version) - with open(mmd_path, 'rb') as f: - modulemd = f.read().decode('utf-8') + mmd_path = path.join(base_dir, "..", "staged_data", "testmodule.yaml") + post_url = "/module-build-service/{0}/module-builds/".format(api_version) + with open(mmd_path, "rb") as f: + modulemd = f.read().decode("utf-8") post_data = { - 'branch': 'master', - 'scratch': True, - 'modulemd': modulemd, - 'module_name': str(splitext(basename(mmd_path))[0]), + "branch": "master", + "scratch": True, + "modulemd": modulemd, + "module_name": str(splitext(basename(mmd_path))[0]), } rv = self.client.post(post_url, data=json.dumps(post_data)) data = json.loads(rv.data) @@ -1975,168 +2236,195 @@ class TestViews: # but it should still succeed since yaml is always allowed for scratch builds assert rv.status_code == 201 - @pytest.mark.parametrize('branch, platform_override', ( - ('10', None), - ('10-rhel-8.0.0', 'el8.0.0'), - ('10-LP-product1.2', 'product1.2'), - )) - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - @patch.object(module_build_service.config.Config, 'br_stream_override_regexes', - new_callable=PropertyMock) + @pytest.mark.parametrize( + "branch, platform_override", + (("10", None), ("10-rhel-8.0.0", "el8.0.0"), ("10-LP-product1.2", "product1.2")), + ) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + @patch.object( + module_build_service.config.Config, "br_stream_override_regexes", new_callable=PropertyMock + ) def test_submit_build_dep_override_from_branch( - self, mocked_regexes, mocked_scm, mocked_get_user, branch, platform_override): + self, mocked_regexes, mocked_scm, mocked_get_user, branch, platform_override + ): """ Test that MBS will parse the SCM branch to determine the platform stream to buildrequire. """ - mocked_regexes.return_value = [ - r'(?:rh)(el)(?:\-)(\d+\.\d+\.\d+)$', - r'(?:\-LP\-)(.+)$' - ] + mocked_regexes.return_value = [r"(?:rh)(el)(?:\-)(\d+\.\d+\.\d+)$", r"(?:\-LP\-)(.+)$"] init_data(data_size=1, multiple_stream_versions=True) # Create a platform for whatever the override is so the build submission succeeds if platform_override: - platform_mmd = load_mmd_file(path.join(base_dir, 'staged_data', 'platform.yaml')) + platform_mmd = load_mmd_file(path.join(base_dir, "staged_data", "platform.yaml")) platform_mmd.set_stream(platform_override) - if platform_override == 'el8.0.0': + if platform_override == "el8.0.0": xmd = from_variant_dict(platform_mmd.get_xmd()) - xmd['mbs']['virtual_streams'] = ['el8'] + xmd["mbs"]["virtual_streams"] = ["el8"] platform_mmd.set_xmd(dict_values(xmd)) import_mmd(db.session, platform_mmd) - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") - post_url = '/module-build-service/2/module-builds/' - scm_url = ('https://src.stg.fedoraproject.org/modules/testmodule.git?#68931c90de214d9d13fe' - 'efbd35246a81b6cb8d49') + post_url = "/module-build-service/2/module-builds/" + scm_url = ( + "https://src.stg.fedoraproject.org/modules/testmodule.git?#" + "68931c90de214d9d13feefbd35246a81b6cb8d49" + ) - rv = self.client.post(post_url, data=json.dumps({'branch': branch, 'scmurl': scm_url})) + rv = self.client.post(post_url, data=json.dumps({"branch": branch, "scmurl": scm_url})) data = json.loads(rv.data) assert rv.status_code == 201 - mmd = module_build_service.utils.load_mmd(data[0]['modulemd']) + mmd = module_build_service.utils.load_mmd(data[0]["modulemd"]) assert len(mmd.get_dependencies()) == 1 dep = mmd.get_dependencies()[0] if platform_override: expected_br = {platform_override} else: - expected_br = {'f28'} - assert set(dep.get_buildrequires()['platform'].get()) == expected_br + expected_br = {"f28"} + assert set(dep.get_buildrequires()["platform"].get()) == expected_br # The requires should not change - assert set(dep.get_requires()['platform'].get()) == {'f28'} + assert set(dep.get_requires()["platform"].get()) == {"f28"} - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - @patch.object(module_build_service.config.Config, 'br_stream_override_regexes', - new_callable=PropertyMock) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + @patch.object( + module_build_service.config.Config, "br_stream_override_regexes", new_callable=PropertyMock + ) def test_submit_build_dep_override_from_branch_br_override( - self, mocked_regexes, mocked_scm, mocked_get_user): + self, mocked_regexes, mocked_scm, mocked_get_user + ): """ Test that when the branch includes a stream override for the platform module, that the provided "buildrequire_override" for the platform module takes precedence. """ - mocked_regexes.return_value = [r'(?:\-LP\-)(.+)$'] + mocked_regexes.return_value = [r"(?:\-LP\-)(.+)$"] init_data(data_size=1, multiple_stream_versions=True) # Create a platform for the override so the build submission succeeds - platform_mmd = load_mmd_file(path.join(base_dir, 'staged_data', 'platform.yaml')) - platform_mmd.set_stream('product1.3') + platform_mmd = load_mmd_file(path.join(base_dir, "staged_data", "platform.yaml")) + platform_mmd.set_stream("product1.3") import_mmd(db.session, platform_mmd) - FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4") - post_url = '/module-build-service/2/module-builds/' - scm_url = ('https://src.stg.fedoraproject.org/modules/testmodule.git?#68931c90de214d9d13fe' - 'efbd35246a81b6cb8d49') + post_url = "/module-build-service/2/module-builds/" + scm_url = ( + "https://src.stg.fedoraproject.org/modules/testmodule.git?#" + "68931c90de214d9d13feefbd35246a81b6cb8d49" + ) json_input = { - 'branch': '10-LP-product1.2', - 'scmurl': scm_url, - 'buildrequire_overrides': {'platform': ['product1.3']} + "branch": "10-LP-product1.2", + "scmurl": scm_url, + "buildrequire_overrides": {"platform": ["product1.3"]}, } rv = self.client.post(post_url, data=json.dumps(json_input)) data = json.loads(rv.data) assert rv.status_code == 201 - mmd = module_build_service.utils.load_mmd(data[0]['modulemd']) + mmd = module_build_service.utils.load_mmd(data[0]["modulemd"]) assert len(mmd.get_dependencies()) == 1 dep = mmd.get_dependencies()[0] # The buildrequire_override value should take precedence over the stream override from # parsing the branch - assert set(dep.get_buildrequires()['platform'].get()) == {'product1.3'} + assert set(dep.get_buildrequires()["platform"].get()) == {"product1.3"} # The requires should not change - assert set(dep.get_requires()['platform'].get()) == {'f28'} + assert set(dep.get_requires()["platform"].get()) == {"f28"} - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_br_xyz_version_no_virtual_streams(self, mocked_scm, mocked_get_user): """ Test that when a build is submitted with a buildrequire on a base module with x.y.z versioning and no virtual streams, that the dependency resolution succeeds. """ init_data(data_size=1, multiple_stream_versions=True) - platform_mmd = load_mmd_file(path.join(base_dir, 'staged_data', 'platform.yaml')) - platform_mmd.set_stream('el8.0.0') + platform_mmd = load_mmd_file(path.join(base_dir, "staged_data", "platform.yaml")) + platform_mmd.set_stream("el8.0.0") import_mmd(db.session, platform_mmd) - FakeSCM(mocked_scm, 'testmodule', 'testmodule_el800.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, + "testmodule", + "testmodule_el800.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) - post_url = '/module-build-service/2/module-builds/' - scm_url = ('https://src.stg.fedoraproject.org/modules/testmodule.git?#68931c90de214d9d13fe' - 'efbd35246a81b6cb8d49') + post_url = "/module-build-service/2/module-builds/" + scm_url = ( + "https://src.stg.fedoraproject.org/modules/testmodule.git?#" + "68931c90de214d9d13feefbd35246a81b6cb8d49" + ) - rv = self.client.post(post_url, data=json.dumps({'branch': 'master', 'scmurl': scm_url})) + rv = self.client.post(post_url, data=json.dumps({"branch": "master", "scmurl": scm_url})) assert rv.status_code == 201 - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') - @patch('module_build_service.config.Config.allowed_disttag_marking_module_names', - new_callable=PropertyMock, return_value=['build']) + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") + @patch( + "module_build_service.config.Config.allowed_disttag_marking_module_names", + new_callable=PropertyMock, + return_value=["build"], + ) def test_submit_build_with_disttag_marking_in_xmd( - self, mocked_admmn, mocked_scm, mocked_get_user): + self, mocked_admmn, mocked_scm, mocked_get_user + ): """ Test that white-listed modules may set the disttag_marking in xmd.mbs. """ - FakeSCM(mocked_scm, 'build', 'build_metadata_module_not_processed.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4', branch='product1.2') + FakeSCM( + mocked_scm, + "build", + "build_metadata_module_not_processed.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + branch="product1.2", + ) - post_url = '/module-build-service/2/module-builds/' - scm_url = ('https://src.stg.fedoraproject.org/modules/testmodule.git?#68931c90de214d9d13fe' - 'efbd35246a81b6cb8d49') + post_url = "/module-build-service/2/module-builds/" + scm_url = ( + "https://src.stg.fedoraproject.org/modules/testmodule.git?#" + "68931c90de214d9d13feefbd35246a81b6cb8d49" + ) rv = self.client.post( - post_url, data=json.dumps({'branch': 'product1.2', 'scmurl': scm_url})) + post_url, data=json.dumps({"branch": "product1.2", "scmurl": scm_url})) assert rv.status_code == 201 data = json.loads(rv.data)[0] - mmd = module_build_service.utils.load_mmd(data['modulemd']) - assert mmd.get_xmd()['mbs']['disttag_marking'] == 'product12' + mmd = module_build_service.utils.load_mmd(data["modulemd"]) + assert mmd.get_xmd()["mbs"]["disttag_marking"] == "product12" - @patch('module_build_service.auth.get_user', return_value=user) - @patch('module_build_service.scm.SCM') + @patch("module_build_service.auth.get_user", return_value=user) + @patch("module_build_service.scm.SCM") def test_submit_build_request_platform_virtual_stream(self, mocked_scm, mocked_get_user): # Create a platform with el8.25.0 but with the virtual stream el8 - mmd = load_mmd_file(path.join(base_dir, 'staged_data', 'platform.yaml')) - mmd.set_stream('el8.25.0') + mmd = load_mmd_file(path.join(base_dir, "staged_data", "platform.yaml")) + mmd.set_stream("el8.25.0") xmd = from_variant_dict(mmd.get_xmd()) - xmd['mbs']['virtual_streams'] = ['el8'] + xmd["mbs"]["virtual_streams"] = ["el8"] mmd.set_xmd(dict_values(xmd)) import_mmd(db.session, mmd) # Use a testmodule that buildrequires platform:el8 - FakeSCM(mocked_scm, 'testmodule', 'testmodule_el8.yaml', - '620ec77321b2ea7b0d67d82992dda3e1d67055b4') + FakeSCM( + mocked_scm, + "testmodule", + "testmodule_el8.yaml", + "620ec77321b2ea7b0d67d82992dda3e1d67055b4", + ) - post_url = '/module-build-service/2/module-builds/' - scm_url = ('https://src.stg.fedoraproject.org/modules/testmodule.git?#68931c90de214d9d13fe' - 'efbd35246a81b6cb8d49') - rv = self.client.post(post_url, data=json.dumps({'branch': 'master', 'scmurl': scm_url})) + post_url = "/module-build-service/2/module-builds/" + scm_url = ( + "https://src.stg.fedoraproject.org/modules/testmodule.git?#" + "68931c90de214d9d13feefbd35246a81b6cb8d49" + ) + rv = self.client.post(post_url, data=json.dumps({"branch": "master", "scmurl": scm_url})) data = json.loads(rv.data) print(data) - mmd = load_mmd(data[0]['modulemd']) + mmd = load_mmd(data[0]["modulemd"]) assert len(mmd.get_dependencies()) == 1 dep = mmd.get_dependencies()[0] - assert set(dep.get_buildrequires()['platform'].get()) == set(['el8.25.0']) - assert set(dep.get_requires()['platform'].get()) == set(['el8']) + assert set(dep.get_buildrequires()["platform"].get()) == set(["el8.25.0"]) + assert set(dep.get_requires()["platform"].get()) == set(["el8"]) diff --git a/tox.ini b/tox.ini index 7f1ddcbd..c421aa1d 100644 --- a/tox.ini +++ b/tox.ini @@ -7,7 +7,7 @@ envlist = flake8, py27, py3 [flake8] -ignore = E731,W504 +ignore = E731,W503 max-line-length = 100 exclude = ./.tox