mirror of
https://pagure.io/fm-orchestrator.git
synced 2026-02-03 05:03:43 +08:00
Format the coding style across the codebase using "black" and manual tweaks
The main benefit of this commit is that the use of double quotes is now consistent.
This commit is contained in:
108
conf/config.py
108
conf/config.py
@@ -4,37 +4,35 @@ from os import path
|
||||
# declared properly somewhere/somehow
|
||||
confdir = path.abspath(path.dirname(__file__))
|
||||
# use parent dir as dbdir else fallback to current dir
|
||||
dbdir = path.abspath(path.join(confdir, '..')) if confdir.endswith('conf') \
|
||||
else confdir
|
||||
dbdir = path.abspath(path.join(confdir, "..")) if confdir.endswith("conf") else confdir
|
||||
|
||||
|
||||
class BaseConfiguration(object):
|
||||
DEBUG = False
|
||||
# Make this random (used to generate session keys)
|
||||
SECRET_KEY = '74d9e9f9cd40e66fc6c4c2e9987dce48df3ce98542529fd0'
|
||||
SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format(path.join(
|
||||
dbdir, 'module_build_service.db'))
|
||||
SECRET_KEY = "74d9e9f9cd40e66fc6c4c2e9987dce48df3ce98542529fd0"
|
||||
SQLALCHEMY_DATABASE_URI = "sqlite:///{0}".format(path.join(dbdir, "module_build_service.db"))
|
||||
SQLALCHEMY_TRACK_MODIFICATIONS = True
|
||||
# Where we should run when running "manage.py run" directly.
|
||||
HOST = '0.0.0.0'
|
||||
HOST = "0.0.0.0"
|
||||
PORT = 5000
|
||||
|
||||
# Global network-related values, in seconds
|
||||
NET_TIMEOUT = 120
|
||||
NET_RETRY_INTERVAL = 30
|
||||
|
||||
SYSTEM = 'koji'
|
||||
MESSAGING = 'fedmsg' # or amq
|
||||
MESSAGING_TOPIC_PREFIX = ['org.fedoraproject.prod']
|
||||
KOJI_CONFIG = '/etc/module-build-service/koji.conf'
|
||||
KOJI_PROFILE = 'koji'
|
||||
ARCHES = ['i686', 'armv7hl', 'x86_64']
|
||||
SYSTEM = "koji"
|
||||
MESSAGING = "fedmsg" # or amq
|
||||
MESSAGING_TOPIC_PREFIX = ["org.fedoraproject.prod"]
|
||||
KOJI_CONFIG = "/etc/module-build-service/koji.conf"
|
||||
KOJI_PROFILE = "koji"
|
||||
ARCHES = ["i686", "armv7hl", "x86_64"]
|
||||
ALLOW_ARCH_OVERRIDE = False
|
||||
KOJI_REPOSITORY_URL = 'https://kojipkgs.fedoraproject.org/repos'
|
||||
KOJI_TAG_PREFIXES = ['module', 'scrmod']
|
||||
KOJI_REPOSITORY_URL = "https://kojipkgs.fedoraproject.org/repos"
|
||||
KOJI_TAG_PREFIXES = ["module", "scrmod"]
|
||||
KOJI_ENABLE_CONTENT_GENERATOR = True
|
||||
CHECK_FOR_EOL = False
|
||||
PDC_URL = 'https://pdc.fedoraproject.org/rest_api/v1'
|
||||
PDC_URL = "https://pdc.fedoraproject.org/rest_api/v1"
|
||||
PDC_INSECURE = False
|
||||
PDC_DEVELOP = True
|
||||
SCMURLS = ["https://src.fedoraproject.org/modules/"]
|
||||
@@ -50,30 +48,27 @@ class BaseConfiguration(object):
|
||||
|
||||
ALLOW_CUSTOM_SCMURLS = False
|
||||
|
||||
RPMS_DEFAULT_REPOSITORY = 'https://src.fedoraproject.org/rpms/'
|
||||
RPMS_DEFAULT_REPOSITORY = "https://src.fedoraproject.org/rpms/"
|
||||
RPMS_ALLOW_REPOSITORY = False
|
||||
RPMS_DEFAULT_CACHE = 'http://pkgs.fedoraproject.org/repo/pkgs/'
|
||||
RPMS_DEFAULT_CACHE = "http://pkgs.fedoraproject.org/repo/pkgs/"
|
||||
RPMS_ALLOW_CACHE = False
|
||||
|
||||
MODULES_DEFAULT_REPOSITORY = 'https://src.fedoraproject.org/modules/'
|
||||
MODULES_DEFAULT_REPOSITORY = "https://src.fedoraproject.org/modules/"
|
||||
MODULES_ALLOW_REPOSITORY = False
|
||||
MODULES_ALLOW_SCRATCH = False
|
||||
|
||||
ALLOWED_GROUPS = set([
|
||||
'packager',
|
||||
# 'modularity-wg',
|
||||
])
|
||||
ALLOWED_GROUPS = set(["packager"])
|
||||
|
||||
ALLOWED_GROUPS_TO_IMPORT_MODULE = set()
|
||||
|
||||
# Available backends are: console and file
|
||||
LOG_BACKEND = 'console'
|
||||
LOG_BACKEND = "console"
|
||||
|
||||
# Path to log file when LOG_BACKEND is set to "file".
|
||||
LOG_FILE = 'module_build_service.log'
|
||||
LOG_FILE = "module_build_service.log"
|
||||
|
||||
# Available log levels are: debug, info, warn, error.
|
||||
LOG_LEVEL = 'info'
|
||||
LOG_LEVEL = "info"
|
||||
|
||||
# Settings for Kerberos
|
||||
KRB_KEYTAB = None
|
||||
@@ -81,31 +76,32 @@ class BaseConfiguration(object):
|
||||
|
||||
# AMQ prefixed variables are required only while using 'amq' as messaging backend
|
||||
# Addresses to listen to
|
||||
AMQ_RECV_ADDRESSES = ['amqps://messaging.mydomain.com/Consumer.m8y.VirtualTopic.eng.koji',
|
||||
('amqps://messaging.mydomain.com/Consumer.m8y.VirtualTopic.eng.'
|
||||
'module_build_service')]
|
||||
AMQ_RECV_ADDRESSES = [
|
||||
"amqps://messaging.mydomain.com/Consumer.m8y.VirtualTopic.eng.koji",
|
||||
"amqps://messaging.mydomain.com/Consumer.m8y.VirtualTopic.eng.module_build_service",
|
||||
]
|
||||
# Address for sending messages
|
||||
AMQ_DEST_ADDRESS = ('amqps://messaging.mydomain.com/Consumer.m8y.'
|
||||
'VirtualTopic.eng.module_build_service')
|
||||
AMQ_CERT_FILE = '/etc/module_build_service/msg-m8y-client.crt'
|
||||
AMQ_PRIVATE_KEY_FILE = '/etc/module_build_service/msg-m8y-client.key'
|
||||
AMQ_TRUSTED_CERT_FILE = '/etc/module_build_service/Root-CA.crt'
|
||||
AMQ_DEST_ADDRESS = \
|
||||
"amqps://messaging.mydomain.com/Consumer.m8y.VirtualTopic.eng.module_build_service"
|
||||
AMQ_CERT_FILE = "/etc/module_build_service/msg-m8y-client.crt"
|
||||
AMQ_PRIVATE_KEY_FILE = "/etc/module_build_service/msg-m8y-client.key"
|
||||
AMQ_TRUSTED_CERT_FILE = "/etc/module_build_service/Root-CA.crt"
|
||||
|
||||
# Disable Client Authorization
|
||||
NO_AUTH = False
|
||||
|
||||
CACHE_DIR = '~/modulebuild/cache'
|
||||
CACHE_DIR = "~/modulebuild/cache"
|
||||
|
||||
|
||||
class TestConfiguration(BaseConfiguration):
|
||||
BUILD_LOGS_DIR = '/tmp'
|
||||
BUILD_LOGS_NAME_FORMAT = 'build-{id}.log'
|
||||
LOG_BACKEND = 'console'
|
||||
LOG_LEVEL = 'debug'
|
||||
SQLALCHEMY_DATABASE_URI = 'sqlite://'
|
||||
BUILD_LOGS_DIR = "/tmp"
|
||||
BUILD_LOGS_NAME_FORMAT = "build-{id}.log"
|
||||
LOG_BACKEND = "console"
|
||||
LOG_LEVEL = "debug"
|
||||
SQLALCHEMY_DATABASE_URI = "sqlite://"
|
||||
DEBUG = True
|
||||
MESSAGING = 'in_memory'
|
||||
PDC_URL = 'https://pdc.fedoraproject.org/rest_api/v1'
|
||||
MESSAGING = "in_memory"
|
||||
PDC_URL = "https://pdc.fedoraproject.org/rest_api/v1"
|
||||
|
||||
# Global network-related values, in seconds
|
||||
NET_TIMEOUT = 3
|
||||
@@ -114,19 +110,19 @@ class TestConfiguration(BaseConfiguration):
|
||||
SCM_NET_TIMEOUT = 0.1
|
||||
SCM_NET_RETRY_INTERVAL = 0.1
|
||||
|
||||
KOJI_CONFIG = './conf/koji.conf'
|
||||
KOJI_PROFILE = 'staging'
|
||||
SERVER_NAME = 'localhost'
|
||||
KOJI_CONFIG = "./conf/koji.conf"
|
||||
KOJI_PROFILE = "staging"
|
||||
SERVER_NAME = "localhost"
|
||||
|
||||
KOJI_REPOSITORY_URL = 'https://kojipkgs.stg.fedoraproject.org/repos'
|
||||
KOJI_REPOSITORY_URL = "https://kojipkgs.stg.fedoraproject.org/repos"
|
||||
SCMURLS = ["https://src.stg.fedoraproject.org/modules/"]
|
||||
AUTH_METHOD = 'oidc'
|
||||
RESOLVER = 'db'
|
||||
AUTH_METHOD = "oidc"
|
||||
RESOLVER = "db"
|
||||
|
||||
ALLOWED_GROUPS_TO_IMPORT_MODULE = set(['mbs-import-module'])
|
||||
GREENWAVE_DECISION_CONTEXT = 'osci_compose_gate_modules'
|
||||
ALLOWED_GROUPS_TO_IMPORT_MODULE = set(["mbs-import-module"])
|
||||
GREENWAVE_DECISION_CONTEXT = "osci_compose_gate_modules"
|
||||
|
||||
STREAM_SUFFIXES = {r'^el\d+\.\d+\.\d+\.z$': 0.1}
|
||||
STREAM_SUFFIXES = {r"^el\d+\.\d+\.\d+\.z$": 0.1}
|
||||
|
||||
|
||||
class ProdConfiguration(BaseConfiguration):
|
||||
@@ -134,22 +130,22 @@ class ProdConfiguration(BaseConfiguration):
|
||||
|
||||
|
||||
class LocalBuildConfiguration(BaseConfiguration):
|
||||
LOG_LEVEL = 'debug'
|
||||
MESSAGING = 'in_memory'
|
||||
LOG_LEVEL = "debug"
|
||||
MESSAGING = "in_memory"
|
||||
|
||||
ARCH_AUTODETECT = True
|
||||
ARCH_FALLBACK = 'x86_64'
|
||||
ARCH_FALLBACK = "x86_64"
|
||||
|
||||
ALLOW_CUSTOM_SCMURLS = True
|
||||
RESOLVER = 'mbs'
|
||||
RESOLVER = "mbs"
|
||||
RPMS_ALLOW_REPOSITORY = True
|
||||
MODULES_ALLOW_REPOSITORY = True
|
||||
|
||||
|
||||
class OfflineLocalBuildConfiguration(LocalBuildConfiguration):
|
||||
RESOLVER = 'local'
|
||||
RESOLVER = "local"
|
||||
|
||||
|
||||
class DevConfiguration(LocalBuildConfiguration):
|
||||
DEBUG = True
|
||||
LOG_BACKEND = 'console'
|
||||
LOG_BACKEND = "console"
|
||||
|
||||
@@ -2,16 +2,8 @@ config = dict(
|
||||
logging=dict(
|
||||
loggers=dict(
|
||||
# Quiet this guy down...
|
||||
requests={
|
||||
"level": "WARNING",
|
||||
"propagate": True,
|
||||
"handlers": ["console"],
|
||||
},
|
||||
module_build_service={
|
||||
"level": "INFO",
|
||||
"propagate": True,
|
||||
"handlers": ["console"],
|
||||
},
|
||||
),
|
||||
),
|
||||
requests={"level": "WARNING", "propagate": True, "handlers": ["console"]},
|
||||
module_build_service={"level": "INFO", "propagate": True, "handlers": ["console"]},
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,4 +1 @@
|
||||
config = {
|
||||
'mbsconsumer': True,
|
||||
'mbspoller': True,
|
||||
}
|
||||
config = {"mbsconsumer": True, "mbspoller": True}
|
||||
|
||||
@@ -3,10 +3,8 @@ import os
|
||||
config = {
|
||||
# Just for dev.
|
||||
"validate_signatures": False,
|
||||
|
||||
# Talk to the relay, so things also make it to composer.stg in our dev env
|
||||
"active": True,
|
||||
|
||||
# Since we're in active mode, we don't need to declare any of our own
|
||||
# passive endpoints. This placeholder value needs to be here for the tests
|
||||
# to pass in Jenkins, though. \o/
|
||||
@@ -14,10 +12,9 @@ config = {
|
||||
"fedora-infrastructure": [
|
||||
# Just listen to staging for now, not to production (spam!)
|
||||
# "tcp://hub.fedoraproject.org:9940",
|
||||
"tcp://stg.fedoraproject.org:9940",
|
||||
],
|
||||
"tcp://stg.fedoraproject.org:9940"
|
||||
]
|
||||
},
|
||||
|
||||
# Start of code signing configuration
|
||||
# 'sign_messages': True,
|
||||
# 'validate_signatures': True,
|
||||
@@ -37,12 +34,11 @@ config = {
|
||||
}
|
||||
|
||||
# developer's instance
|
||||
if 'MODULE_BUILD_SERVICE_DEVELOPER_ENV' in os.environ and \
|
||||
os.environ['MODULE_BUILD_SERVICE_DEVELOPER_ENV'].lower() in (
|
||||
'1', 'on', 'true', 'y', 'yes'):
|
||||
config['endpoints']['relay_outbound'] = ["tcp://fedmsg-relay:2001"]
|
||||
config['relay_inbound'] = ["tcp://fedmsg-relay:2003"]
|
||||
true_options = ("1", "on", "true", "y", "yes")
|
||||
if os.environ.get("MODULE_BUILD_SERVICE_DEVELOPER_ENV", "").lower() in true_options:
|
||||
config["endpoints"]["relay_outbound"] = ["tcp://fedmsg-relay:2001"]
|
||||
config["relay_inbound"] = ["tcp://fedmsg-relay:2003"]
|
||||
else:
|
||||
# These configuration values are reasonable for most other configurations.
|
||||
config['endpoints']['relay_outbound'] = ["tcp://127.0.0.1:4001"]
|
||||
config['relay_inbound'] = ["tcp://127.0.0.1:2003"]
|
||||
config["endpoints"]["relay_outbound"] = ["tcp://127.0.0.1:4001"]
|
||||
config["relay_inbound"] = ["tcp://127.0.0.1:2003"]
|
||||
|
||||
@@ -46,11 +46,10 @@ from flask_sqlalchemy import SQLAlchemy
|
||||
from sqlalchemy.pool import StaticPool
|
||||
from logging import getLogger
|
||||
import gi # noqa
|
||||
gi.require_version('Modulemd', '1.0') # noqa
|
||||
gi.require_version("Modulemd", "1.0") # noqa
|
||||
from gi.repository import Modulemd # noqa
|
||||
|
||||
from module_build_service.logger import (
|
||||
init_logging, ModuleBuildLogs, level_flags, MBSLogger)
|
||||
from module_build_service.logger import init_logging, ModuleBuildLogs, level_flags, MBSLogger
|
||||
|
||||
from module_build_service.errors import (
|
||||
ValidationError, Unauthorized, UnprocessableEntity, Conflict, NotFound,
|
||||
@@ -59,9 +58,9 @@ from module_build_service.config import init_config
|
||||
from module_build_service.proxy import ReverseProxy
|
||||
|
||||
try:
|
||||
version = pkg_resources.get_distribution('module-build-service').version
|
||||
version = pkg_resources.get_distribution("module-build-service").version
|
||||
except pkg_resources.DistributionNotFound:
|
||||
version = 'unknown'
|
||||
version = "unknown"
|
||||
api_version = 2
|
||||
|
||||
app = Flask(__name__)
|
||||
@@ -77,12 +76,13 @@ class MBSSQLAlchemy(SQLAlchemy):
|
||||
|
||||
This is used *only* during tests to make them faster.
|
||||
"""
|
||||
|
||||
def apply_driver_hacks(self, app, info, options):
|
||||
if info.drivername == 'sqlite' and info.database in (None, '', ':memory:'):
|
||||
options['poolclass'] = StaticPool
|
||||
options['connect_args'] = {'check_same_thread': False}
|
||||
if info.drivername == "sqlite" and info.database in (None, "", ":memory:"):
|
||||
options["poolclass"] = StaticPool
|
||||
options["connect_args"] = {"check_same_thread": False}
|
||||
try:
|
||||
del options['pool_size']
|
||||
del options["pool_size"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
@@ -107,59 +107,56 @@ def create_app(debug=False, verbose=False, quiet=False):
|
||||
|
||||
def load_views():
|
||||
from module_build_service import views
|
||||
|
||||
assert views
|
||||
|
||||
|
||||
@app.errorhandler(ValidationError)
|
||||
def validationerror_error(e):
|
||||
"""Flask error handler for ValidationError exceptions"""
|
||||
return json_error(400, 'Bad Request', str(e))
|
||||
return json_error(400, "Bad Request", str(e))
|
||||
|
||||
|
||||
@app.errorhandler(Unauthorized)
|
||||
def unauthorized_error(e):
|
||||
"""Flask error handler for NotAuthorized exceptions"""
|
||||
return json_error(401, 'Unauthorized', str(e))
|
||||
return json_error(401, "Unauthorized", str(e))
|
||||
|
||||
|
||||
@app.errorhandler(Forbidden)
|
||||
def forbidden_error(e):
|
||||
"""Flask error handler for Forbidden exceptions"""
|
||||
return json_error(403, 'Forbidden', str(e))
|
||||
return json_error(403, "Forbidden", str(e))
|
||||
|
||||
|
||||
@app.errorhandler(RuntimeError)
|
||||
def runtimeerror_error(e):
|
||||
"""Flask error handler for RuntimeError exceptions"""
|
||||
log.exception("RuntimeError exception raised")
|
||||
return json_error(500, 'Internal Server Error', str(e))
|
||||
return json_error(500, "Internal Server Error", str(e))
|
||||
|
||||
|
||||
@app.errorhandler(UnprocessableEntity)
|
||||
def unprocessableentity_error(e):
|
||||
"""Flask error handler for UnprocessableEntity exceptions"""
|
||||
return json_error(422, 'Unprocessable Entity', str(e))
|
||||
return json_error(422, "Unprocessable Entity", str(e))
|
||||
|
||||
|
||||
@app.errorhandler(Conflict)
|
||||
def conflict_error(e):
|
||||
"""Flask error handler for Conflict exceptions"""
|
||||
return json_error(409, 'Conflict', str(e))
|
||||
return json_error(409, "Conflict", str(e))
|
||||
|
||||
|
||||
@app.errorhandler(NotFound)
|
||||
def notfound_error(e):
|
||||
"""Flask error handler for Conflict exceptions"""
|
||||
return json_error(404, 'Not Found', str(e))
|
||||
return json_error(404, "Not Found", str(e))
|
||||
|
||||
|
||||
init_logging(conf)
|
||||
log = MBSLogger()
|
||||
build_logs = ModuleBuildLogs(
|
||||
conf.build_logs_dir,
|
||||
conf.build_logs_name_format,
|
||||
conf.log_level,
|
||||
)
|
||||
build_logs = ModuleBuildLogs(conf.build_logs_dir, conf.build_logs_name_format, conf.log_level)
|
||||
|
||||
|
||||
def get_url_for(*args, **kwargs):
|
||||
@@ -171,11 +168,13 @@ def get_url_for(*args, **kwargs):
|
||||
|
||||
# Localhost is right URL only when the scheduler runs on the same
|
||||
# system as the web views.
|
||||
app.config['SERVER_NAME'] = 'localhost'
|
||||
app.config["SERVER_NAME"] = "localhost"
|
||||
with app.app_context():
|
||||
log.debug("WARNING: get_url_for() has been called without the Flask "
|
||||
"app_context. That can lead to SQLAlchemy errors caused by "
|
||||
"multiple session being used in the same time.")
|
||||
log.debug(
|
||||
"WARNING: get_url_for() has been called without the Flask "
|
||||
"app_context. That can lead to SQLAlchemy errors caused by "
|
||||
"multiple session being used in the same time."
|
||||
)
|
||||
return url_for(*args, **kwargs)
|
||||
|
||||
|
||||
|
||||
@@ -31,6 +31,7 @@ import ssl
|
||||
import requests
|
||||
import kerberos
|
||||
from flask import Response, g
|
||||
|
||||
# Starting with Flask 0.9, the _app_ctx_stack is the correct one,
|
||||
# before that we need to use the _request_ctx_stack.
|
||||
try:
|
||||
@@ -50,12 +51,12 @@ except ImportError:
|
||||
|
||||
|
||||
client_secrets = None
|
||||
region = make_region().configure('dogpile.cache.memory')
|
||||
region = make_region().configure("dogpile.cache.memory")
|
||||
|
||||
|
||||
def _json_loads(content):
|
||||
if not isinstance(content, str):
|
||||
content = content.decode('utf-8')
|
||||
content = content.decode("utf-8")
|
||||
return json.loads(content)
|
||||
|
||||
|
||||
@@ -67,8 +68,7 @@ def _load_secrets():
|
||||
if "OIDC_CLIENT_SECRETS" not in app.config:
|
||||
raise Forbidden("OIDC_CLIENT_SECRETS must be set in server config.")
|
||||
|
||||
secrets = _json_loads(open(app.config['OIDC_CLIENT_SECRETS'],
|
||||
'r').read())
|
||||
secrets = _json_loads(open(app.config["OIDC_CLIENT_SECRETS"], "r").read())
|
||||
client_secrets = list(secrets.values())[0]
|
||||
|
||||
|
||||
@@ -79,13 +79,15 @@ def _get_token_info(token):
|
||||
if not client_secrets:
|
||||
return None
|
||||
|
||||
request = {'token': token,
|
||||
'token_type_hint': 'Bearer',
|
||||
'client_id': client_secrets['client_id'],
|
||||
'client_secret': client_secrets['client_secret']}
|
||||
headers = {'Content-type': 'application/x-www-form-urlencoded'}
|
||||
request = {
|
||||
"token": token,
|
||||
"token_type_hint": "Bearer",
|
||||
"client_id": client_secrets["client_id"],
|
||||
"client_secret": client_secrets["client_secret"],
|
||||
}
|
||||
headers = {"Content-type": "application/x-www-form-urlencoded"}
|
||||
|
||||
resp = requests.post(client_secrets['token_introspection_uri'], data=request, headers=headers)
|
||||
resp = requests.post(client_secrets["token_introspection_uri"], data=request, headers=headers)
|
||||
return resp.json()
|
||||
|
||||
|
||||
@@ -96,8 +98,8 @@ def _get_user_info(token):
|
||||
if not client_secrets:
|
||||
return None
|
||||
|
||||
headers = {'authorization': 'Bearer ' + token}
|
||||
resp = requests.get(client_secrets['userinfo_uri'], headers=headers)
|
||||
headers = {"authorization": "Bearer " + token}
|
||||
resp = requests.get(client_secrets["userinfo_uri"], headers=headers)
|
||||
return resp.json()
|
||||
|
||||
|
||||
@@ -110,8 +112,8 @@ def get_user_oidc(request):
|
||||
if "authorization" not in request.headers:
|
||||
raise Unauthorized("No 'authorization' header found.")
|
||||
|
||||
header = request.headers['authorization'].strip()
|
||||
prefix = 'Bearer '
|
||||
header = request.headers["authorization"].strip()
|
||||
prefix = "Bearer "
|
||||
if not header.startswith(prefix):
|
||||
raise Unauthorized("Authorization headers must start with %r" % prefix)
|
||||
|
||||
@@ -129,16 +131,15 @@ def get_user_oidc(request):
|
||||
if "OIDC_REQUIRED_SCOPE" not in app.config:
|
||||
raise Forbidden("OIDC_REQUIRED_SCOPE must be set in server config.")
|
||||
|
||||
presented_scopes = data['scope'].split(' ')
|
||||
presented_scopes = data["scope"].split(" ")
|
||||
required_scopes = [
|
||||
'openid',
|
||||
'https://id.fedoraproject.org/scope/groups',
|
||||
"openid",
|
||||
"https://id.fedoraproject.org/scope/groups",
|
||||
app.config["OIDC_REQUIRED_SCOPE"],
|
||||
]
|
||||
for scope in required_scopes:
|
||||
if scope not in presented_scopes:
|
||||
raise Unauthorized("Required OIDC scope %r not present: %r" % (
|
||||
scope, presented_scopes))
|
||||
raise Unauthorized("Required OIDC scope %r not present: %r" % (scope, presented_scopes))
|
||||
|
||||
try:
|
||||
extended_data = _get_user_info(token)
|
||||
@@ -153,7 +154,7 @@ def get_user_oidc(request):
|
||||
groups = set()
|
||||
else:
|
||||
try:
|
||||
groups = set(extended_data['groups'])
|
||||
groups = set(extended_data["groups"])
|
||||
except Exception as e:
|
||||
error = "Could not find groups in UserInfo from OIDC %s" % str(e)
|
||||
log.exception(extended_data)
|
||||
@@ -175,19 +176,20 @@ class KerberosAuthenticate(object):
|
||||
# If the config specifies a keytab to use, then override the KRB5_KTNAME
|
||||
# environment variable
|
||||
if conf.kerberos_keytab:
|
||||
os.environ['KRB5_KTNAME'] = conf.kerberos_keytab
|
||||
os.environ["KRB5_KTNAME"] = conf.kerberos_keytab
|
||||
|
||||
if 'KRB5_KTNAME' in os.environ:
|
||||
if "KRB5_KTNAME" in os.environ:
|
||||
try:
|
||||
principal = kerberos.getServerPrincipalDetails('HTTP', hostname)
|
||||
principal = kerberos.getServerPrincipalDetails("HTTP", hostname)
|
||||
except kerberos.KrbError as error:
|
||||
raise Unauthorized(
|
||||
'Kerberos: authentication failed with "{0}"'.format(str(error)))
|
||||
raise Unauthorized('Kerberos: authentication failed with "{0}"'.format(str(error)))
|
||||
|
||||
log.debug('Kerberos: server is identifying as "{0}"'.format(principal))
|
||||
else:
|
||||
raise Unauthorized('Kerberos: set the config value of "KERBEROS_KEYTAB" or the '
|
||||
'environment variable "KRB5_KTNAME" to your keytab file')
|
||||
raise Unauthorized(
|
||||
'Kerberos: set the config value of "KERBEROS_KEYTAB" or the '
|
||||
'environment variable "KRB5_KTNAME" to your keytab file'
|
||||
)
|
||||
|
||||
def _gssapi_authenticate(self, token):
|
||||
"""
|
||||
@@ -201,23 +203,23 @@ class KerberosAuthenticate(object):
|
||||
try:
|
||||
rc, state = kerberos.authGSSServerInit(self.service_name)
|
||||
if rc != kerberos.AUTH_GSS_COMPLETE:
|
||||
log.error('Kerberos: unable to initialize server context')
|
||||
log.error("Kerberos: unable to initialize server context")
|
||||
return None
|
||||
|
||||
rc = kerberos.authGSSServerStep(state, token)
|
||||
if rc == kerberos.AUTH_GSS_COMPLETE:
|
||||
log.debug('Kerberos: completed GSSAPI negotiation')
|
||||
log.debug("Kerberos: completed GSSAPI negotiation")
|
||||
ctx.kerberos_token = kerberos.authGSSServerResponse(state)
|
||||
ctx.kerberos_user = kerberos.authGSSServerUserName(state)
|
||||
return rc
|
||||
elif rc == kerberos.AUTH_GSS_CONTINUE:
|
||||
log.debug('Kerberos: continuing GSSAPI negotiation')
|
||||
log.debug("Kerberos: continuing GSSAPI negotiation")
|
||||
return kerberos.AUTH_GSS_CONTINUE
|
||||
else:
|
||||
log.debug('Kerberos: unable to step server context')
|
||||
log.debug("Kerberos: unable to step server context")
|
||||
return None
|
||||
except kerberos.GSSError as error:
|
||||
log.error('Kerberos: unable to authenticate: {0}'.format(str(error)))
|
||||
log.error("Kerberos: unable to authenticate: {0}".format(str(error)))
|
||||
return None
|
||||
finally:
|
||||
if state:
|
||||
@@ -235,25 +237,25 @@ class KerberosAuthenticate(object):
|
||||
kerberos_user = ctx.kerberos_user
|
||||
kerberos_token = ctx.kerberos_token
|
||||
elif rc != kerberos.AUTH_GSS_CONTINUE:
|
||||
raise Forbidden('Invalid Kerberos ticket')
|
||||
raise Forbidden("Invalid Kerberos ticket")
|
||||
|
||||
return kerberos_user, kerberos_token
|
||||
|
||||
|
||||
def get_user_kerberos(request):
|
||||
user = None
|
||||
if 'Authorization' not in request.headers:
|
||||
response = Response('Unauthorized', 401, {'WWW-Authenticate': 'Negotiate'})
|
||||
if "Authorization" not in request.headers:
|
||||
response = Response("Unauthorized", 401, {"WWW-Authenticate": "Negotiate"})
|
||||
exc = FlaskUnauthorized()
|
||||
# For some reason, certain versions of werkzeug raise an exception when passing `response`
|
||||
# in the constructor. This is a work-around.
|
||||
exc.response = response
|
||||
raise exc
|
||||
header = request.headers.get('Authorization')
|
||||
token = ''.join(header.strip().split()[1:])
|
||||
header = request.headers.get("Authorization")
|
||||
token = "".join(header.strip().split()[1:])
|
||||
user, kerberos_token = KerberosAuthenticate().process_request(token)
|
||||
# Remove the realm
|
||||
user = user.split('@')[0]
|
||||
user = user.split("@")[0]
|
||||
# If the user is part of the whitelist, then the group membership check is skipped
|
||||
if user in conf.allowed_users:
|
||||
groups = []
|
||||
@@ -275,20 +277,21 @@ def get_ldap_group_membership(uid):
|
||||
class Ldap(object):
|
||||
""" A class that handles LDAP connections and queries
|
||||
"""
|
||||
|
||||
connection = None
|
||||
base_dn = None
|
||||
|
||||
def __init__(self):
|
||||
if not conf.ldap_uri:
|
||||
raise Forbidden('LDAP_URI must be set in server config.')
|
||||
raise Forbidden("LDAP_URI must be set in server config.")
|
||||
if conf.ldap_groups_dn:
|
||||
self.base_dn = conf.ldap_groups_dn
|
||||
else:
|
||||
raise Forbidden('LDAP_GROUPS_DN must be set in server config.')
|
||||
raise Forbidden("LDAP_GROUPS_DN must be set in server config.")
|
||||
|
||||
if conf.ldap_uri.startswith('ldaps://'):
|
||||
tls = ldap3.Tls(ca_certs_file='/etc/pki/tls/certs/ca-bundle.crt',
|
||||
validate=ssl.CERT_REQUIRED)
|
||||
if conf.ldap_uri.startswith("ldaps://"):
|
||||
tls = ldap3.Tls(
|
||||
ca_certs_file="/etc/pki/tls/certs/ca-bundle.crt", validate=ssl.CERT_REQUIRED)
|
||||
server = ldap3.Server(conf.ldap_uri, use_ssl=True, tls=tls)
|
||||
else:
|
||||
server = ldap3.Server(conf.ldap_uri)
|
||||
@@ -296,26 +299,28 @@ class Ldap(object):
|
||||
try:
|
||||
self.connection.open()
|
||||
except ldap3.core.exceptions.LDAPSocketOpenError as error:
|
||||
log.error('The connection to "{0}" failed. The following error was raised: {1}'
|
||||
.format(conf.ldap_uri, str(error)))
|
||||
raise Forbidden('The connection to the LDAP server failed. Group membership '
|
||||
'couldn\'t be obtained.')
|
||||
log.error(
|
||||
'The connection to "{0}" failed. The following error was raised: {1}'.format(
|
||||
conf.ldap_uri, str(error)))
|
||||
raise Forbidden(
|
||||
"The connection to the LDAP server failed. Group membership couldn't be obtained.")
|
||||
|
||||
def get_user_membership(self, uid):
|
||||
""" Gets the group membership of a user
|
||||
:param uid: a string of the uid of the user
|
||||
:return: a list of common names of the posixGroups the user is a member of
|
||||
"""
|
||||
ldap_filter = '(memberUid={0})'.format(uid)
|
||||
ldap_filter = "(memberUid={0})".format(uid)
|
||||
# Only get the groups in the base container/OU
|
||||
self.connection.search(self.base_dn, ldap_filter, search_scope=ldap3.LEVEL,
|
||||
attributes=['cn'])
|
||||
self.connection.search(
|
||||
self.base_dn, ldap_filter, search_scope=ldap3.LEVEL, attributes=["cn"])
|
||||
groups = self.connection.response
|
||||
try:
|
||||
return [group['attributes']['cn'][0] for group in groups]
|
||||
return [group["attributes"]["cn"][0] for group in groups]
|
||||
except KeyError:
|
||||
log.exception('The LDAP groups could not be determined based on the search results '
|
||||
'of "{0}"'.format(str(groups)))
|
||||
log.exception(
|
||||
"The LDAP groups could not be determined based on the search results "
|
||||
'of "{0}"'.format(str(groups)))
|
||||
return []
|
||||
|
||||
|
||||
@@ -326,11 +331,11 @@ def get_user(request):
|
||||
membership such as ('mprahl', {'factory2', 'devel'})
|
||||
"""
|
||||
if conf.no_auth is True:
|
||||
log.debug('Authorization is disabled.')
|
||||
return 'anonymous', {'packager'}
|
||||
log.debug("Authorization is disabled.")
|
||||
return "anonymous", {"packager"}
|
||||
|
||||
if "user" not in g and "groups" not in g:
|
||||
get_user_func_name = 'get_user_{0}'.format(conf.auth_method)
|
||||
get_user_func_name = "get_user_{0}".format(conf.auth_method)
|
||||
get_user_func = globals().get(get_user_func_name)
|
||||
if not get_user_func:
|
||||
raise RuntimeError('The function "{0}" is not implemented'.format(get_user_func_name))
|
||||
|
||||
@@ -33,11 +33,11 @@ def jsonify(*args, **kwargs):
|
||||
# input only since 0.11, but RHEL7 contains 0.10.1.
|
||||
# https://github.com/pallets/flask/commit/daceb3e3a028b4b408c4bbdbdef0047f1de3a7c9
|
||||
indent = None
|
||||
separators = (',', ':')
|
||||
separators = (",", ":")
|
||||
|
||||
if module_build_service.app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not request.is_xhr:
|
||||
if module_build_service.app.config["JSONIFY_PRETTYPRINT_REGULAR"] and not request.is_xhr:
|
||||
indent = 2
|
||||
separators = (', ', ': ')
|
||||
separators = (", ", ": ")
|
||||
|
||||
if args and kwargs:
|
||||
raise TypeError("jsonify() behavior undefined when passed both args and kwargs")
|
||||
@@ -51,6 +51,5 @@ def jsonify(*args, **kwargs):
|
||||
# Note that we add '\n' to end of response
|
||||
# (see https://github.com/mitsuhiko/flask/pull/1262)
|
||||
rv = module_build_service.app.response_class(
|
||||
(dumps(data, indent=indent, separators=separators), '\n'),
|
||||
mimetype='application/json')
|
||||
(dumps(data, indent=indent, separators=separators), "\n"), mimetype="application/json")
|
||||
return rv
|
||||
|
||||
@@ -52,6 +52,7 @@ logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
def get_session(config, login=True):
|
||||
from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder
|
||||
|
||||
return KojiModuleBuilder.get_session(config, login=login)
|
||||
|
||||
|
||||
@@ -69,7 +70,7 @@ def strip_suffixes(s, suffixes):
|
||||
"""
|
||||
for suffix in suffixes:
|
||||
if s.endswith(suffix):
|
||||
s = s[:-len(suffix)]
|
||||
s = s[: -len(suffix)]
|
||||
break
|
||||
return s
|
||||
|
||||
@@ -79,8 +80,9 @@ def koji_retrying_multicall_map(*args, **kwargs):
|
||||
Wrapper around KojiModuleBuilder.koji_retrying_multicall_map, because
|
||||
we cannot import that method normally because of import loop.
|
||||
"""
|
||||
from module_build_service.builder.KojiModuleBuilder import \
|
||||
koji_retrying_multicall_map as multicall
|
||||
from module_build_service.builder.KojiModuleBuilder import (
|
||||
koji_retrying_multicall_map as multicall,)
|
||||
|
||||
return multicall(*args, **kwargs)
|
||||
|
||||
|
||||
@@ -109,7 +111,7 @@ class KojiContentGenerator(object):
|
||||
return "<KojiContentGenerator module: %s>" % (self.module_name)
|
||||
|
||||
@staticmethod
|
||||
def parse_rpm_output(output, tags, separator=';'):
|
||||
def parse_rpm_output(output, tags, separator=";"):
|
||||
"""
|
||||
Copied from:
|
||||
https://github.com/projectatomic/atomic-reactor/blob/master/atomic_reactor/plugins/exit_koji_promote.py
|
||||
@@ -130,42 +132,42 @@ class KojiContentGenerator(object):
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
if value == '(none)':
|
||||
if value == "(none)":
|
||||
return None
|
||||
|
||||
return value
|
||||
|
||||
components = []
|
||||
sigmarker = 'Key ID '
|
||||
sigmarker = "Key ID "
|
||||
for rpm in output:
|
||||
fields = rpm.rstrip('\n').split(separator)
|
||||
fields = rpm.rstrip("\n").split(separator)
|
||||
if len(fields) < len(tags):
|
||||
continue
|
||||
|
||||
signature = field('SIGPGP:pgpsig') or field('SIGGPG:pgpsig')
|
||||
signature = field("SIGPGP:pgpsig") or field("SIGGPG:pgpsig")
|
||||
if signature:
|
||||
parts = signature.split(sigmarker, 1)
|
||||
if len(parts) > 1:
|
||||
signature = parts[1]
|
||||
|
||||
component_rpm = {
|
||||
u'type': u'rpm',
|
||||
u'name': field('NAME'),
|
||||
u'version': field('VERSION'),
|
||||
u'release': field('RELEASE'),
|
||||
u'arch': field('ARCH'),
|
||||
u'sigmd5': field('SIGMD5'),
|
||||
u'signature': signature,
|
||||
u"type": u"rpm",
|
||||
u"name": field("NAME"),
|
||||
u"version": field("VERSION"),
|
||||
u"release": field("RELEASE"),
|
||||
u"arch": field("ARCH"),
|
||||
u"sigmd5": field("SIGMD5"),
|
||||
u"signature": signature,
|
||||
}
|
||||
|
||||
# Special handling for epoch as it must be an integer or None
|
||||
epoch = field('EPOCH')
|
||||
epoch = field("EPOCH")
|
||||
if epoch is not None:
|
||||
epoch = int(epoch)
|
||||
|
||||
component_rpm[u'epoch'] = epoch
|
||||
component_rpm[u"epoch"] = epoch
|
||||
|
||||
if component_rpm['name'] != 'gpg-pubkey':
|
||||
if component_rpm["name"] != "gpg-pubkey":
|
||||
components.append(component_rpm)
|
||||
|
||||
return components
|
||||
@@ -177,28 +179,25 @@ class KojiContentGenerator(object):
|
||||
|
||||
Build a list of installed RPMs in the format required for the
|
||||
metadata.
|
||||
""" # noqa
|
||||
""" # noqa
|
||||
|
||||
tags = [
|
||||
'NAME',
|
||||
'VERSION',
|
||||
'RELEASE',
|
||||
'ARCH',
|
||||
'EPOCH',
|
||||
'SIGMD5',
|
||||
'SIGPGP:pgpsig',
|
||||
'SIGGPG:pgpsig',
|
||||
"NAME",
|
||||
"VERSION",
|
||||
"RELEASE",
|
||||
"ARCH",
|
||||
"EPOCH",
|
||||
"SIGMD5",
|
||||
"SIGPGP:pgpsig",
|
||||
"SIGGPG:pgpsig",
|
||||
]
|
||||
|
||||
sep = ';'
|
||||
sep = ";"
|
||||
fmt = sep.join(["%%{%s}" % tag for tag in tags])
|
||||
cmd = "/bin/rpm -qa --qf '{0}\n'".format(fmt)
|
||||
with open('/dev/null', 'r+') as devnull:
|
||||
p = subprocess.Popen(cmd,
|
||||
shell=True,
|
||||
stdin=devnull,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=devnull)
|
||||
with open("/dev/null", "r+") as devnull:
|
||||
p = subprocess.Popen(
|
||||
cmd, shell=True, stdin=devnull, stdout=subprocess.PIPE, stderr=devnull)
|
||||
|
||||
(stdout, stderr) = p.communicate()
|
||||
status = p.wait()
|
||||
@@ -216,16 +215,12 @@ class KojiContentGenerator(object):
|
||||
# TODO: In libmodulemd v1.5, there'll be a property we can check instead
|
||||
# of using RPM
|
||||
try:
|
||||
libmodulemd_version = subprocess.check_output(
|
||||
['rpm', '--queryformat', '%{VERSION}', '-q', 'libmodulemd'],
|
||||
universal_newlines=True).strip()
|
||||
cmd = ["rpm", "--queryformat", "%{VERSION}", "-q", "libmodulemd"]
|
||||
libmodulemd_version = subprocess.check_output(cmd, universal_newlines=True).strip()
|
||||
except subprocess.CalledProcessError:
|
||||
libmodulemd_version = 'unknown'
|
||||
libmodulemd_version = "unknown"
|
||||
|
||||
return [{
|
||||
'name': 'libmodulemd',
|
||||
'version': libmodulemd_version
|
||||
}]
|
||||
return [{"name": "libmodulemd", "version": libmodulemd_version}]
|
||||
|
||||
def _koji_rpms_in_tag(self, tag):
|
||||
""" Return the list of koji rpms in a tag. """
|
||||
@@ -257,17 +252,20 @@ class KojiContentGenerator(object):
|
||||
# Prepare the arguments for Koji multicall.
|
||||
# We will call session.getRPMHeaders(...) for each SRC RPM to get exclusivearch,
|
||||
# excludearch and license headers.
|
||||
multicall_kwargs = [{"rpmID": rpm_id,
|
||||
"headers": ["exclusivearch", "excludearch", "license"]}
|
||||
for rpm_id in src_rpms.keys()]
|
||||
multicall_kwargs = [
|
||||
{"rpmID": rpm_id, "headers": ["exclusivearch", "excludearch", "license"]}
|
||||
for rpm_id in src_rpms.keys()
|
||||
]
|
||||
# For each binary RPM, we only care about the "license" header.
|
||||
multicall_kwargs += [{"rpmID": rpm_id, "headers": ["license"]}
|
||||
for rpm_id in binary_rpms.keys()]
|
||||
multicall_kwargs += [
|
||||
{"rpmID": rpm_id, "headers": ["license"]} for rpm_id in binary_rpms.keys()
|
||||
]
|
||||
rpms_headers = koji_retrying_multicall_map(
|
||||
session, session.getRPMHeaders, list_of_kwargs=multicall_kwargs)
|
||||
session, session.getRPMHeaders, list_of_kwargs=multicall_kwargs
|
||||
)
|
||||
|
||||
# Temporary dict with build_id as a key to find builds easily.
|
||||
builds = {build['build_id']: build for build in builds}
|
||||
builds = {build["build_id"]: build for build in builds}
|
||||
|
||||
# Create a mapping of build IDs to SRPM NEVRAs so that the for loop below can directly
|
||||
# access these values when adding the `srpm_nevra` key to the returned RPMs
|
||||
@@ -280,8 +278,7 @@ class KojiContentGenerator(object):
|
||||
# also other useful data from the Build associated with the RPM.
|
||||
for rpm, headers in zip(chain(src_rpms.values(), binary_rpms.values()), rpms_headers):
|
||||
if not headers:
|
||||
raise RuntimeError(
|
||||
"No RPM headers received from Koji for RPM %s" % rpm["name"])
|
||||
raise RuntimeError("No RPM headers received from Koji for RPM %s" % rpm["name"])
|
||||
if "license" not in headers:
|
||||
raise RuntimeError(
|
||||
"No RPM 'license' header received from Koji for RPM %s" % rpm["name"])
|
||||
@@ -291,44 +288,42 @@ class KojiContentGenerator(object):
|
||||
build["excludearch"] = headers["excludearch"]
|
||||
|
||||
rpm["license"] = headers["license"]
|
||||
rpm['srpm_name'] = build['name']
|
||||
rpm['srpm_nevra'] = build_id_to_srpm_nevra[rpm["build_id"]]
|
||||
rpm['exclusivearch'] = build['exclusivearch']
|
||||
rpm['excludearch'] = build['excludearch']
|
||||
rpm["srpm_name"] = build["name"]
|
||||
rpm["srpm_nevra"] = build_id_to_srpm_nevra[rpm["build_id"]]
|
||||
rpm["exclusivearch"] = build["exclusivearch"]
|
||||
rpm["excludearch"] = build["excludearch"]
|
||||
|
||||
return rpms
|
||||
|
||||
def _get_build(self):
|
||||
ret = {}
|
||||
ret[u'name'] = self.module.name
|
||||
ret[u"name"] = self.module.name
|
||||
if self.devel:
|
||||
ret['name'] += "-devel"
|
||||
ret[u'version'] = self.module.stream.replace("-", "_")
|
||||
ret["name"] += "-devel"
|
||||
ret[u"version"] = self.module.stream.replace("-", "_")
|
||||
# Append the context to the version to make NVRs of modules unique in the event of
|
||||
# module stream expansion
|
||||
ret[u'release'] = '{0}.{1}'.format(self.module.version, self.module.context)
|
||||
ret[u'source'] = self.module.scmurl
|
||||
ret[u'start_time'] = calendar.timegm(
|
||||
self.module.time_submitted.utctimetuple())
|
||||
ret[u'end_time'] = calendar.timegm(
|
||||
self.module.time_completed.utctimetuple())
|
||||
ret[u'extra'] = {
|
||||
ret[u"release"] = "{0}.{1}".format(self.module.version, self.module.context)
|
||||
ret[u"source"] = self.module.scmurl
|
||||
ret[u"start_time"] = calendar.timegm(self.module.time_submitted.utctimetuple())
|
||||
ret[u"end_time"] = calendar.timegm(self.module.time_completed.utctimetuple())
|
||||
ret[u"extra"] = {
|
||||
u"typeinfo": {
|
||||
u"module": {
|
||||
u"module_build_service_id": self.module.id,
|
||||
u"content_koji_tag": self.module.koji_tag,
|
||||
u"modulemd_str": self.module.modulemd,
|
||||
u"name": ret['name'],
|
||||
u"name": ret["name"],
|
||||
u"stream": self.module.stream,
|
||||
u"version": self.module.version,
|
||||
u"context": self.module.context
|
||||
u"context": self.module.context,
|
||||
}
|
||||
}
|
||||
}
|
||||
session = get_session(self.config, login=False)
|
||||
# Only add the CG build owner if the user exists in Koji
|
||||
if session.getUser(self.owner):
|
||||
ret[u'owner'] = self.owner
|
||||
ret[u"owner"] = self.owner
|
||||
return ret
|
||||
|
||||
def _get_buildroot(self):
|
||||
@@ -338,18 +333,15 @@ class KojiContentGenerator(object):
|
||||
u"id": 1,
|
||||
u"host": {
|
||||
u"arch": text_type(platform.machine()),
|
||||
u'os': u"%s %s" % (distro[0], distro[1])
|
||||
u"os": u"%s %s" % (distro[0], distro[1]),
|
||||
},
|
||||
u"content_generator": {
|
||||
u"name": u"module-build-service",
|
||||
u"version": text_type(version)
|
||||
},
|
||||
u"container": {
|
||||
u"arch": text_type(platform.machine()),
|
||||
u"type": u"none"
|
||||
u"version": text_type(version),
|
||||
},
|
||||
u"container": {u"arch": text_type(platform.machine()), u"type": u"none"},
|
||||
u"components": self.__get_rpms(),
|
||||
u"tools": self.__get_tools()
|
||||
u"tools": self.__get_tools(),
|
||||
}
|
||||
return ret
|
||||
|
||||
@@ -368,7 +360,7 @@ class KojiContentGenerator(object):
|
||||
u"arch": rpm["arch"],
|
||||
u"epoch": rpm["epoch"],
|
||||
u"sigmd5": rpm["payloadhash"],
|
||||
u"type": u"rpm"
|
||||
u"type": u"rpm",
|
||||
}
|
||||
|
||||
def _get_arch_mmd_output(self, output_path, arch):
|
||||
@@ -385,15 +377,11 @@ class KojiContentGenerator(object):
|
||||
:return: Dictionary with record in "output" list.
|
||||
"""
|
||||
ret = {
|
||||
'buildroot_id': 1,
|
||||
'arch': arch,
|
||||
'type': 'file',
|
||||
'extra': {
|
||||
'typeinfo': {
|
||||
'module': {}
|
||||
}
|
||||
},
|
||||
'checksum_type': 'md5',
|
||||
"buildroot_id": 1,
|
||||
"arch": arch,
|
||||
"type": "file",
|
||||
"extra": {"typeinfo": {"module": {}}},
|
||||
"checksum_type": "md5",
|
||||
}
|
||||
|
||||
# Noarch architecture represents "generic" modulemd.txt.
|
||||
@@ -406,13 +394,13 @@ class KojiContentGenerator(object):
|
||||
# parse it to get the Modulemd instance.
|
||||
mmd_path = os.path.join(output_path, mmd_filename)
|
||||
try:
|
||||
with open(mmd_path, 'rb') as mmd_f:
|
||||
with open(mmd_path, "rb") as mmd_f:
|
||||
raw_data = mmd_f.read()
|
||||
data = to_text_type(raw_data)
|
||||
mmd = load_mmd(data)
|
||||
ret['filename'] = mmd_filename
|
||||
ret['filesize'] = len(raw_data)
|
||||
ret['checksum'] = hashlib.md5(raw_data).hexdigest()
|
||||
ret["filename"] = mmd_filename
|
||||
ret["filesize"] = len(raw_data)
|
||||
ret["checksum"] = hashlib.md5(raw_data).hexdigest()
|
||||
except IOError:
|
||||
if arch == "src":
|
||||
# This might happen in case the Module is submitted directly
|
||||
@@ -428,8 +416,7 @@ class KojiContentGenerator(object):
|
||||
if arch in ["noarch", "src"]:
|
||||
# For generic noarch/src modulemd, include all the RPMs.
|
||||
for rpm in self.rpms:
|
||||
components.append(
|
||||
self._koji_rpm_to_component_record(rpm))
|
||||
components.append(self._koji_rpm_to_component_record(rpm))
|
||||
else:
|
||||
# Check the RPM artifacts built for this architecture in modulemd file,
|
||||
# find the matching RPM in the `rpms_dict` coming from Koji and use it
|
||||
@@ -438,11 +425,10 @@ class KojiContentGenerator(object):
|
||||
# RPM sigmd5 signature is not stored in MMD.
|
||||
for rpm in mmd.get_rpm_artifacts().get():
|
||||
if rpm not in self.rpms_dict:
|
||||
raise RuntimeError("RPM %s found in the final modulemd but not "
|
||||
"in Koji tag." % rpm)
|
||||
raise RuntimeError(
|
||||
"RPM %s found in the final modulemd but not in Koji tag." % rpm)
|
||||
tag_rpm = self.rpms_dict[rpm]
|
||||
components.append(
|
||||
self._koji_rpm_to_component_record(tag_rpm))
|
||||
components.append(self._koji_rpm_to_component_record(tag_rpm))
|
||||
ret["components"] = components
|
||||
return ret
|
||||
|
||||
@@ -455,18 +441,18 @@ class KojiContentGenerator(object):
|
||||
|
||||
try:
|
||||
log_path = os.path.join(output_path, "build.log")
|
||||
with open(log_path, 'rb') as build_log:
|
||||
with open(log_path, "rb") as build_log:
|
||||
checksum = hashlib.md5(build_log.read()).hexdigest()
|
||||
stat = os.stat(log_path)
|
||||
ret.append(
|
||||
{
|
||||
u'buildroot_id': 1,
|
||||
u'arch': u'noarch',
|
||||
u'type': u'log',
|
||||
u'filename': u'build.log',
|
||||
u'filesize': stat.st_size,
|
||||
u'checksum_type': u'md5',
|
||||
u'checksum': checksum
|
||||
u"buildroot_id": 1,
|
||||
u"arch": u"noarch",
|
||||
u"type": u"log",
|
||||
u"filename": u"build.log",
|
||||
u"filesize": stat.st_size,
|
||||
u"checksum_type": u"md5",
|
||||
u"checksum": checksum,
|
||||
}
|
||||
)
|
||||
except IOError:
|
||||
@@ -480,7 +466,7 @@ class KojiContentGenerator(object):
|
||||
u"metadata_version": 0,
|
||||
u"buildroots": [self._get_buildroot()],
|
||||
u"build": self._get_build(),
|
||||
u"output": self._get_output(output_path)
|
||||
u"output": self._get_output(output_path),
|
||||
}
|
||||
|
||||
return ret
|
||||
@@ -567,12 +553,10 @@ class KojiContentGenerator(object):
|
||||
# For example:
|
||||
# "x86_64" -> ['athlon', 'i386', 'i586', 'i486', 'i686']
|
||||
# "i686" -> []
|
||||
multilib_arches = set(compatible_arches) - set(
|
||||
pungi.arch.get_compatible_arches(arch))
|
||||
multilib_arches = set(compatible_arches) - set(pungi.arch.get_compatible_arches(arch))
|
||||
# List of architectures that should be in ExclusiveArch tag or missing
|
||||
# from ExcludeArch tag. Multilib should not be enabled here.
|
||||
exclusive_arches = pungi.arch.get_valid_arches(
|
||||
arch, multilib=False, add_noarch=False)
|
||||
exclusive_arches = pungi.arch.get_valid_arches(arch, multilib=False, add_noarch=False)
|
||||
|
||||
# Modulemd.SimpleSet into which we will add the RPMs.
|
||||
rpm_artifacts = Modulemd.SimpleSet()
|
||||
@@ -605,8 +589,7 @@ class KojiContentGenerator(object):
|
||||
# - the architecture of an RPM is not multilib architecture for `arch`.
|
||||
# - the architecture of an RPM is not the final mmd architecture.
|
||||
# - the architecture of an RPM is not "noarch" or "src".
|
||||
if (rpm["arch"] not in multilib_arches and
|
||||
rpm["arch"] not in [arch, "noarch", "src"]):
|
||||
if rpm["arch"] not in multilib_arches and rpm["arch"] not in [arch, "noarch", "src"]:
|
||||
continue
|
||||
|
||||
# Skip the RPM if it is excluded on this arch or exclusive
|
||||
@@ -728,8 +711,7 @@ class KojiContentGenerator(object):
|
||||
commit = xmd.get("mbs", {}).get("commit")
|
||||
scmurl = xmd.get("mbs", {}).get("scmurl")
|
||||
if not commit or not scmurl:
|
||||
log.warning("%r: xmd['mbs'] does not contain 'commit' or 'scmurl'.",
|
||||
self.module)
|
||||
log.warning("%r: xmd['mbs'] does not contain 'commit' or 'scmurl'.", self.module)
|
||||
return
|
||||
|
||||
td = None
|
||||
@@ -747,9 +729,7 @@ class KojiContentGenerator(object):
|
||||
if td is not None:
|
||||
shutil.rmtree(td)
|
||||
except Exception as e:
|
||||
log.warning(
|
||||
"Failed to remove temporary directory {!r}: {}".format(
|
||||
td, str(e)))
|
||||
log.warning("Failed to remove temporary directory {!r}: {}".format(td, str(e)))
|
||||
|
||||
def _prepare_file_directory(self):
|
||||
""" Creates a temporary directory that will contain all the files
|
||||
@@ -787,10 +767,10 @@ class KojiContentGenerator(object):
|
||||
Uploads output files to Koji hub.
|
||||
"""
|
||||
to_upload = []
|
||||
for info in metadata['output']:
|
||||
if info.get('metadata_only', False):
|
||||
for info in metadata["output"]:
|
||||
if info.get("metadata_only", False):
|
||||
continue
|
||||
localpath = os.path.join(file_dir, info['filename'])
|
||||
localpath = os.path.join(file_dir, info["filename"])
|
||||
if not os.path.exists(localpath):
|
||||
err = "Cannot upload %s to Koji. No such file." % localpath
|
||||
log.error(err)
|
||||
@@ -799,7 +779,7 @@ class KojiContentGenerator(object):
|
||||
to_upload.append([localpath, info])
|
||||
|
||||
# Create unique server directory.
|
||||
serverdir = 'mbs/%r.%d' % (time.time(), self.module.id)
|
||||
serverdir = "mbs/%r.%d" % (time.time(), self.module.id)
|
||||
|
||||
for localpath, info in to_upload:
|
||||
log.info("Uploading %s to Koji" % localpath)
|
||||
@@ -816,8 +796,8 @@ class KojiContentGenerator(object):
|
||||
|
||||
tag_name = self.module.cg_build_koji_tag
|
||||
if not tag_name:
|
||||
log.info("%r: Not tagging Content Generator build, no "
|
||||
"cg_build_koji_tag set", self.module)
|
||||
log.info(
|
||||
"%r: Not tagging Content Generator build, no cg_build_koji_tag set", self.module)
|
||||
return
|
||||
|
||||
tag_names_to_try = [tag_name, self.config.koji_cg_default_build_tag]
|
||||
@@ -827,20 +807,19 @@ class KojiContentGenerator(object):
|
||||
if tag_info:
|
||||
break
|
||||
|
||||
log.info("%r: Tag %s not found in Koji, trying next one.",
|
||||
self.module, tag)
|
||||
log.info("%r: Tag %s not found in Koji, trying next one.", self.module, tag)
|
||||
|
||||
if not tag_info:
|
||||
log.warning(
|
||||
"%r:, Not tagging Content Generator build, no available tag"
|
||||
" found, tried %r", self.module, tag_names_to_try)
|
||||
"%r:, Not tagging Content Generator build, no available tag found, tried %r",
|
||||
self.module, tag_names_to_try,
|
||||
)
|
||||
return
|
||||
|
||||
build = self._get_build()
|
||||
nvr = "%s-%s-%s" % (build["name"], build["version"], build["release"])
|
||||
|
||||
log.info("Content generator build %s will be tagged as %s in "
|
||||
"Koji", nvr, tag)
|
||||
log.info("Content generator build %s will be tagged as %s in Koji", nvr, tag)
|
||||
session.tagBuild(tag_info["id"], nvr)
|
||||
|
||||
def _load_koji_tag(self, koji_session):
|
||||
@@ -879,7 +858,7 @@ class KojiContentGenerator(object):
|
||||
except koji.GenericError as e:
|
||||
if "Build already exists" not in str(e):
|
||||
raise
|
||||
log.warning('Failed to import content generator')
|
||||
log.warning("Failed to import content generator")
|
||||
build_info = None
|
||||
if conf.koji_cg_tag_build:
|
||||
self._tag_cg_build()
|
||||
|
||||
@@ -76,8 +76,10 @@ def koji_multicall_map(koji_session, koji_session_fnc, list_of_args=None, list_o
|
||||
if list_of_args is None and list_of_kwargs is None:
|
||||
raise ProgrammingError("One of list_of_args or list_of_kwargs must be set.")
|
||||
|
||||
if (type(list_of_args) not in [type(None), list] or
|
||||
type(list_of_kwargs) not in [type(None), list]):
|
||||
if (
|
||||
type(list_of_args) not in [type(None), list]
|
||||
or type(list_of_kwargs) not in [type(None), list]
|
||||
):
|
||||
raise ProgrammingError("list_of_args and list_of_kwargs must be list or None.")
|
||||
|
||||
if list_of_kwargs is None:
|
||||
@@ -99,16 +101,19 @@ def koji_multicall_map(koji_session, koji_session_fnc, list_of_args=None, list_o
|
||||
try:
|
||||
responses = koji_session.multiCall(strict=True)
|
||||
except Exception:
|
||||
log.exception("Exception raised for multicall of method %r with args %r, %r:",
|
||||
koji_session_fnc, args, kwargs)
|
||||
log.exception(
|
||||
"Exception raised for multicall of method %r with args %r, %r:",
|
||||
koji_session_fnc, args, kwargs,
|
||||
)
|
||||
return None
|
||||
|
||||
if not responses:
|
||||
log.error("Koji did not return response for multicall of %r", koji_session_fnc)
|
||||
return None
|
||||
if type(responses) != list:
|
||||
log.error("Fault element was returned for multicall of method %r: %r",
|
||||
koji_session_fnc, responses)
|
||||
log.error(
|
||||
"Fault element was returned for multicall of method %r: %r", koji_session_fnc, responses
|
||||
)
|
||||
return None
|
||||
|
||||
results = []
|
||||
@@ -122,13 +127,17 @@ def koji_multicall_map(koji_session, koji_session_fnc, list_of_args=None, list_o
|
||||
for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs):
|
||||
if type(response) == list:
|
||||
if not response:
|
||||
log.error("Empty list returned for multicall of method %r with args %r, %r",
|
||||
koji_session_fnc, args, kwargs)
|
||||
log.error(
|
||||
"Empty list returned for multicall of method %r with args %r, %r",
|
||||
koji_session_fnc, args, kwargs
|
||||
)
|
||||
return None
|
||||
results.append(response[0])
|
||||
else:
|
||||
log.error("Unexpected data returned for multicall of method %r with args %r, %r: %r",
|
||||
koji_session_fnc, args, kwargs, response)
|
||||
log.error(
|
||||
"Unexpected data returned for multicall of method %r with args %r, %r: %r",
|
||||
koji_session_fnc, args, kwargs, response
|
||||
)
|
||||
return None
|
||||
|
||||
return results
|
||||
@@ -150,9 +159,9 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
|
||||
backend = "koji"
|
||||
_build_lock = threading.Lock()
|
||||
region = dogpile.cache.make_region().configure('dogpile.cache.memory')
|
||||
region = dogpile.cache.make_region().configure("dogpile.cache.memory")
|
||||
|
||||
@module_build_service.utils.validate_koji_tag('tag_name')
|
||||
@module_build_service.utils.validate_koji_tag("tag_name")
|
||||
def __init__(self, owner, module, config, tag_name, components):
|
||||
"""
|
||||
:param owner: a string representing who kicked off the builds
|
||||
@@ -186,12 +195,11 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
self.components = components
|
||||
|
||||
def __repr__(self):
|
||||
return "<KojiModuleBuilder module: %s, tag: %s>" % (
|
||||
self.module_str, self.tag_name)
|
||||
return "<KojiModuleBuilder module: %s, tag: %s>" % (self.module_str, self.tag_name)
|
||||
|
||||
@region.cache_on_arguments()
|
||||
def getPerms(self):
|
||||
return dict([(p['name'], p['id']) for p in self.koji_session.getAllPerms()])
|
||||
return dict([(p["name"], p["id"]) for p in self.koji_session.getAllPerms()])
|
||||
|
||||
@module_build_service.utils.retry(wait_on=(IOError, koji.GenericError))
|
||||
def buildroot_ready(self, artifacts=None):
|
||||
@@ -201,24 +209,22 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
"""
|
||||
assert self.module_target, "Invalid build target"
|
||||
|
||||
tag_id = self.module_target['build_tag']
|
||||
tag_id = self.module_target["build_tag"]
|
||||
repo = self.koji_session.getRepo(tag_id)
|
||||
builds = [self.koji_session.getBuild(a, strict=True) for a in artifacts or []]
|
||||
log.info("%r checking buildroot readiness for "
|
||||
"repo: %r, tag_id: %r, artifacts: %r, builds: %r" % (
|
||||
self, repo, tag_id, artifacts, builds))
|
||||
log.info(
|
||||
"%r checking buildroot readiness for repo: %r, tag_id: %r, artifacts: %r, builds: %r"
|
||||
% (self, repo, tag_id, artifacts, builds)
|
||||
)
|
||||
|
||||
if not repo:
|
||||
log.info("Repo is not generated yet, buildroot is not ready yet.")
|
||||
return False
|
||||
|
||||
ready = bool(koji.util.checkForBuilds(
|
||||
self.koji_session,
|
||||
tag_id,
|
||||
builds,
|
||||
repo['create_event'],
|
||||
latest=True,
|
||||
))
|
||||
ready = bool(
|
||||
koji.util.checkForBuilds(
|
||||
self.koji_session, tag_id, builds, repo["create_event"], latest=True)
|
||||
)
|
||||
if ready:
|
||||
log.info("%r buildroot is ready" % self)
|
||||
else:
|
||||
@@ -239,19 +245,22 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
# Get all the RPMs and builds of the reusable module in Koji
|
||||
rpms, builds = koji_session.listTaggedRPMS(reusable_module.koji_tag, latest=True)
|
||||
# Convert the list to a dict where each key is the build_id
|
||||
builds = {build['build_id']: build for build in builds}
|
||||
builds = {build["build_id"]: build for build in builds}
|
||||
# Create a mapping of package (SRPM) to the RPMs in NVR format
|
||||
package_to_rpms = {}
|
||||
for rpm in rpms:
|
||||
package = builds[rpm['build_id']]['name']
|
||||
package = builds[rpm["build_id"]]["name"]
|
||||
if package not in package_to_rpms:
|
||||
package_to_rpms[package] = []
|
||||
package_to_rpms[package].append(kobo.rpmlib.make_nvr(rpm))
|
||||
|
||||
components_in_module = [c.package for c in module_build.component_builds]
|
||||
reusable_components = get_reusable_components(
|
||||
db_session, module_build, components_in_module,
|
||||
previous_module_build=reusable_module)
|
||||
db_session,
|
||||
module_build,
|
||||
components_in_module,
|
||||
previous_module_build=reusable_module,
|
||||
)
|
||||
# Loop through all the reusable components to find if any of their RPMs are
|
||||
# being filtered
|
||||
for reusable_component in reusable_components:
|
||||
@@ -261,7 +270,7 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
# We must get the component name from the NVR and not from
|
||||
# reusable_component.package because macros such as those used
|
||||
# by SCLs can change the name of the underlying build
|
||||
component_name = kobo.rpmlib.parse_nvr(reusable_component.nvr)['name']
|
||||
component_name = kobo.rpmlib.parse_nvr(reusable_component.nvr)["name"]
|
||||
|
||||
if component_name not in package_to_rpms:
|
||||
continue
|
||||
@@ -270,13 +279,13 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
for nvr in package_to_rpms[component_name]:
|
||||
parsed_nvr = kobo.rpmlib.parse_nvr(nvr)
|
||||
# Don't compare with the epoch
|
||||
parsed_nvr['epoch'] = None
|
||||
parsed_nvr["epoch"] = None
|
||||
# Loop through all the filtered RPMs to find a match with the reusable
|
||||
# component's RPMs.
|
||||
for nvr2 in list(filtered_rpms):
|
||||
parsed_nvr2 = kobo.rpmlib.parse_nvr(nvr2)
|
||||
# Don't compare with the epoch
|
||||
parsed_nvr2['epoch'] = None
|
||||
parsed_nvr2["epoch"] = None
|
||||
# Only remove the filter if we are going to reuse a component with
|
||||
# the same exact NVR
|
||||
if parsed_nvr == parsed_nvr2:
|
||||
@@ -299,10 +308,10 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
# Taken from Karsten's create-distmacro-pkg.sh
|
||||
# - however removed any provides to system-release/redhat-release
|
||||
|
||||
name = 'module-build-macros'
|
||||
name = "module-build-macros"
|
||||
version = "0.1"
|
||||
release = "1"
|
||||
today = datetime.date.today().strftime('%a %b %d %Y')
|
||||
today = datetime.date.today().strftime("%a %b %d %Y")
|
||||
mmd = module_build.mmd()
|
||||
|
||||
# Generate "Conflicts: name = version-release". This is workaround for
|
||||
@@ -320,19 +329,20 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
module_build, req_data["filtered_rpms"])
|
||||
else:
|
||||
filtered_rpms = req_data["filtered_rpms"]
|
||||
filter_conflicts.extend(map(
|
||||
KojiModuleBuilder.format_conflicts_line, filtered_rpms))
|
||||
filter_conflicts.extend(map(KojiModuleBuilder.format_conflicts_line, filtered_rpms))
|
||||
|
||||
if req_name in conf.base_module_names and 'ursine_rpms' in req_data:
|
||||
if req_name in conf.base_module_names and "ursine_rpms" in req_data:
|
||||
comments = (
|
||||
'# Filter out RPMs from stream collision modules found from ursine content'
|
||||
' for base module {}:'.format(req_name),
|
||||
'# ' + ', '.join(req_data['stream_collision_modules']),
|
||||
("# Filter out RPMs from stream collision modules found from ursine content"
|
||||
" for base module {}:".format(req_name)),
|
||||
"# " + ", ".join(req_data["stream_collision_modules"]),
|
||||
)
|
||||
filter_conflicts.extend(
|
||||
chain(
|
||||
comments,
|
||||
map(KojiModuleBuilder.format_conflicts_line, req_data["ursine_rpms"]),
|
||||
)
|
||||
)
|
||||
filter_conflicts.extend(chain(
|
||||
comments,
|
||||
map(KojiModuleBuilder.format_conflicts_line, req_data['ursine_rpms'])
|
||||
))
|
||||
|
||||
spec_content = textwrap.dedent("""
|
||||
%global dist {disttag}
|
||||
@@ -433,11 +443,20 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
log.debug("Building %s.spec" % name)
|
||||
|
||||
# We are not interested in the rpmbuild stdout...
|
||||
null_fd = open(os.devnull, 'w')
|
||||
execute_cmd(['rpmbuild', '-bs', '%s.spec' % name,
|
||||
'--define', '_topdir %s' % td,
|
||||
'--define', '_sourcedir %s' % sources_dir],
|
||||
cwd=td, stdout=null_fd)
|
||||
null_fd = open(os.devnull, "w")
|
||||
execute_cmd(
|
||||
[
|
||||
"rpmbuild",
|
||||
"-bs",
|
||||
"%s.spec" % name,
|
||||
"--define",
|
||||
"_topdir %s" % td,
|
||||
"--define",
|
||||
"_sourcedir %s" % sources_dir,
|
||||
],
|
||||
cwd=td,
|
||||
stdout=null_fd,
|
||||
)
|
||||
null_fd.close()
|
||||
sdir = os.path.join(td, "SRPMS")
|
||||
srpm_paths = glob.glob("%s/*.src.rpm" % sdir)
|
||||
@@ -458,10 +477,8 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
:return: the Koji session object.
|
||||
:rtype: :class:`koji.ClientSession`
|
||||
"""
|
||||
koji_config = munch.Munch(koji.read_config(
|
||||
profile_name=config.koji_profile,
|
||||
user_config=config.koji_config,
|
||||
))
|
||||
koji_config = munch.Munch(
|
||||
koji.read_config(profile_name=config.koji_profile, user_config=config.koji_config))
|
||||
# Timeout after 10 minutes. The default is 12 hours.
|
||||
koji_config["timeout"] = 60 * 10
|
||||
|
||||
@@ -494,9 +511,7 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
koji_session.krb_login(principal=principal, keytab=keytab, ctx=ctx, ccache=ccache)
|
||||
elif authtype == "ssl":
|
||||
koji_session.ssl_login(
|
||||
os.path.expanduser(koji_config.cert),
|
||||
None,
|
||||
os.path.expanduser(koji_config.serverca)
|
||||
os.path.expanduser(koji_config.cert), None, os.path.expanduser(koji_config.serverca)
|
||||
)
|
||||
else:
|
||||
raise ValueError("Unrecognized koji authtype %r" % authtype)
|
||||
@@ -512,8 +527,7 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
|
||||
# Create or update individual tags
|
||||
# the main tag needs arches so pungi can dump it
|
||||
self.module_tag = self._koji_create_tag(
|
||||
self.tag_name, self.arches, perm="admin")
|
||||
self.module_tag = self._koji_create_tag(self.tag_name, self.arches, perm="admin")
|
||||
self.module_build_tag = self._koji_create_tag(
|
||||
self.tag_name + "-build", self.arches, perm="admin")
|
||||
|
||||
@@ -530,19 +544,23 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
|
||||
@module_build_service.utils.retry(wait_on=SysCallError, interval=5)
|
||||
def add_groups():
|
||||
return self._koji_add_groups_to_tag(
|
||||
dest_tag=self.module_build_tag,
|
||||
groups=groups,
|
||||
)
|
||||
return self._koji_add_groups_to_tag(dest_tag=self.module_build_tag, groups=groups)
|
||||
|
||||
add_groups()
|
||||
|
||||
# Koji targets can only be 50 characters long, but the generate_koji_tag function
|
||||
# checks the length with '-build' at the end, but we know we will never append '-build',
|
||||
# so we can safely have the name check be more characters
|
||||
target_length = 50 + len('-build')
|
||||
target_length = 50 + len("-build")
|
||||
target = module_build_service.utils.generate_koji_tag(
|
||||
self.module.name, self.module.stream, self.module.version, self.module.context,
|
||||
target_length, scratch=self.module.scratch, scratch_id=self.module.id)
|
||||
self.module.name,
|
||||
self.module.stream,
|
||||
self.module.version,
|
||||
self.module.context,
|
||||
target_length,
|
||||
scratch=self.module.scratch,
|
||||
scratch_id=self.module.id,
|
||||
)
|
||||
# Add main build target.
|
||||
self.module_target = self._koji_add_target(target, self.module_build_tag, self.module_tag)
|
||||
|
||||
@@ -570,17 +588,19 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
This method is safe to call multiple times.
|
||||
"""
|
||||
log.info("%r adding artifacts %r" % (self, artifacts))
|
||||
build_tag = self._get_tag(self.module_build_tag)['id']
|
||||
build_tag = self._get_tag(self.module_build_tag)["id"]
|
||||
|
||||
xmd = self.mmd.get_xmd()
|
||||
if "mbs_options" in xmd.keys() and "blocked_packages" in xmd["mbs_options"].keys():
|
||||
packages = [kobo.rpmlib.parse_nvr(nvr)["name"] for nvr in artifacts]
|
||||
packages = [package for package in packages
|
||||
if package in xmd["mbs_options"]["blocked_packages"]]
|
||||
packages = [
|
||||
package for package in packages
|
||||
if package in xmd["mbs_options"]["blocked_packages"]
|
||||
]
|
||||
if packages:
|
||||
self._koji_unblock_packages(packages)
|
||||
|
||||
tagged_nvrs = self._get_tagged_nvrs(self.module_build_tag['name'])
|
||||
tagged_nvrs = self._get_tagged_nvrs(self.module_build_tag["name"])
|
||||
|
||||
self.koji_session.multicall = True
|
||||
for nvr in artifacts:
|
||||
@@ -593,8 +613,8 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
if not install:
|
||||
continue
|
||||
|
||||
for group in ('srpm-build', 'build'):
|
||||
name = kobo.rpmlib.parse_nvr(nvr)['name']
|
||||
for group in ("srpm-build", "build"):
|
||||
name = kobo.rpmlib.parse_nvr(nvr)["name"]
|
||||
log.info("%r adding %s to group %s" % (self, name, group))
|
||||
self.koji_session.groupPackageListAdd(build_tag, group, name)
|
||||
self.koji_session.multiCall(strict=True)
|
||||
@@ -606,11 +626,11 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
:return: None
|
||||
"""
|
||||
if dest_tag:
|
||||
tag = self._get_tag(self.module_tag)['id']
|
||||
tagged_nvrs = self._get_tagged_nvrs(self.module_tag['name'])
|
||||
tag = self._get_tag(self.module_tag)["id"]
|
||||
tagged_nvrs = self._get_tagged_nvrs(self.module_tag["name"])
|
||||
else:
|
||||
tag = self._get_tag(self.module_build_tag)['id']
|
||||
tagged_nvrs = self._get_tagged_nvrs(self.module_build_tag['name'])
|
||||
tag = self._get_tag(self.module_build_tag)["id"]
|
||||
tagged_nvrs = self._get_tagged_nvrs(self.module_build_tag["name"])
|
||||
|
||||
self.koji_session.multicall = True
|
||||
for nvr in artifacts:
|
||||
@@ -626,18 +646,18 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
:param artifacts: a list of NVRs to untag
|
||||
:return: None
|
||||
"""
|
||||
build_tag_name = self.tag_name + '-build'
|
||||
build_tag_name = self.tag_name + "-build"
|
||||
dest_tag = self._get_tag(self.tag_name, strict=False)
|
||||
build_tag = self._get_tag(build_tag_name, strict=False)
|
||||
# Get the NVRs in the tags to make sure the builds exist and they're tagged before
|
||||
# untagging them
|
||||
if dest_tag:
|
||||
dest_tagged_nvrs = self._get_tagged_nvrs(dest_tag['name'])
|
||||
dest_tagged_nvrs = self._get_tagged_nvrs(dest_tag["name"])
|
||||
else:
|
||||
log.info('The tag "{0}" doesn\'t exist'.format(self.tag_name))
|
||||
dest_tagged_nvrs = []
|
||||
if build_tag:
|
||||
build_tagged_nvrs = self._get_tagged_nvrs(build_tag['name'])
|
||||
build_tagged_nvrs = self._get_tagged_nvrs(build_tag["name"])
|
||||
else:
|
||||
log.info('The tag "{0}" doesn\'t exist'.format(build_tag_name))
|
||||
build_tagged_nvrs = []
|
||||
@@ -649,11 +669,11 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
self.koji_session.multicall = True
|
||||
for nvr in artifacts:
|
||||
if nvr in dest_tagged_nvrs:
|
||||
log.info("%r untagging %r from %r" % (self, nvr, dest_tag['id']))
|
||||
self.koji_session.untagBuild(dest_tag['id'], nvr)
|
||||
log.info("%r untagging %r from %r" % (self, nvr, dest_tag["id"]))
|
||||
self.koji_session.untagBuild(dest_tag["id"], nvr)
|
||||
if nvr in build_tagged_nvrs:
|
||||
log.info("%r untagging %r from %r" % (self, nvr, build_tag['id']))
|
||||
self.koji_session.untagBuild(build_tag['id'], nvr)
|
||||
log.info("%r untagging %r from %r" % (self, nvr, build_tag["id"]))
|
||||
self.koji_session.untagBuild(build_tag["id"], nvr)
|
||||
self.koji_session.multiCall(strict=True)
|
||||
|
||||
def wait_task(self, task_id):
|
||||
@@ -683,12 +703,12 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
:param component_build: a ComponentBuild object
|
||||
:return: a list of msgs that MBS needs to process
|
||||
"""
|
||||
opts = {'latest': True, 'package': component_build.package, 'inherit': False}
|
||||
build_tagged = self.koji_session.listTagged(self.module_build_tag['name'], **opts)
|
||||
opts = {"latest": True, "package": component_build.package, "inherit": False}
|
||||
build_tagged = self.koji_session.listTagged(self.module_build_tag["name"], **opts)
|
||||
dest_tagged = None
|
||||
# Only check the destination tag if the component is not a build_time_only component
|
||||
if not component_build.build_time_only:
|
||||
dest_tagged = self.koji_session.listTagged(self.module_tag['name'], **opts)
|
||||
dest_tagged = self.koji_session.listTagged(self.module_tag["name"], **opts)
|
||||
for rv in [build_tagged, dest_tagged]:
|
||||
if rv and len(rv) != 1:
|
||||
raise ValueError("Expected exactly one item in list. Got %s" % rv)
|
||||
@@ -716,33 +736,48 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
return further_work
|
||||
|
||||
# Start setting up MBS' database to use the existing build
|
||||
log.info('Skipping build of "{0}" since it already exists.'.format(build['nvr']))
|
||||
log.info('Skipping build of "{0}" since it already exists.'.format(build["nvr"]))
|
||||
# Set it to COMPLETE so it doesn't count towards the concurrent component threshold
|
||||
component_build.state = koji.BUILD_STATES['COMPLETE']
|
||||
component_build.nvr = build['nvr']
|
||||
component_build.task_id = build['task_id']
|
||||
component_build.state_reason = 'Found existing build'
|
||||
component_build.state = koji.BUILD_STATES["COMPLETE"]
|
||||
component_build.nvr = build["nvr"]
|
||||
component_build.task_id = build["task_id"]
|
||||
component_build.state_reason = "Found existing build"
|
||||
nvr_dict = kobo.rpmlib.parse_nvr(component_build.nvr)
|
||||
# Trigger a completed build message
|
||||
further_work.append(module_build_service.messaging.KojiBuildChange(
|
||||
'recover_orphaned_artifact: fake message', build['build_id'],
|
||||
build['task_id'], koji.BUILD_STATES['COMPLETE'], component_build.package,
|
||||
nvr_dict['version'], nvr_dict['release'], component_build.module_build.id))
|
||||
further_work.append(
|
||||
module_build_service.messaging.KojiBuildChange(
|
||||
"recover_orphaned_artifact: fake message",
|
||||
build["build_id"],
|
||||
build["task_id"],
|
||||
koji.BUILD_STATES["COMPLETE"],
|
||||
component_build.package,
|
||||
nvr_dict["version"],
|
||||
nvr_dict["release"],
|
||||
component_build.module_build.id,
|
||||
)
|
||||
)
|
||||
|
||||
component_tagged_in = []
|
||||
if build_tagged:
|
||||
component_tagged_in.append(self.module_build_tag['name'])
|
||||
component_tagged_in.append(self.module_build_tag["name"])
|
||||
else:
|
||||
# Tag it in the build tag if it's not there
|
||||
self.tag_artifacts([component_build.nvr], dest_tag=False)
|
||||
if dest_tagged:
|
||||
component_tagged_in.append(self.module_tag['name'])
|
||||
component_tagged_in.append(self.module_tag["name"])
|
||||
for tag in component_tagged_in:
|
||||
log.info('The build being skipped isn\'t tagged in the "{0}" tag. Will send a '
|
||||
'message to the tag handler'.format(tag))
|
||||
further_work.append(module_build_service.messaging.KojiTagChange(
|
||||
'recover_orphaned_artifact: fake message', tag, component_build.package,
|
||||
component_build.nvr))
|
||||
log.info(
|
||||
'The build being skipped isn\'t tagged in the "{0}" tag. Will send a message to '
|
||||
"the tag handler".format(tag)
|
||||
)
|
||||
further_work.append(
|
||||
module_build_service.messaging.KojiTagChange(
|
||||
"recover_orphaned_artifact: fake message",
|
||||
tag,
|
||||
component_build.package,
|
||||
component_build.nvr,
|
||||
)
|
||||
)
|
||||
return further_work
|
||||
|
||||
def build(self, artifact_name, source):
|
||||
@@ -768,21 +803,23 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
# For some reason repr(time.time()) includes 4 or 5
|
||||
# more digits of precision than str(time.time())
|
||||
# Unnamed Engineer: Guido v. R., I am disappoint
|
||||
return '%s/%r.%s' % (prefix, time.time(),
|
||||
''.join([random.choice(string.ascii_letters)
|
||||
for i in range(8)]))
|
||||
return "%s/%r.%s" % (
|
||||
prefix,
|
||||
time.time(),
|
||||
"".join([random.choice(string.ascii_letters) for i in range(8)]),
|
||||
)
|
||||
|
||||
if not self.__prep:
|
||||
raise RuntimeError("Buildroot is not prep-ed")
|
||||
|
||||
self._koji_whitelist_packages([artifact_name])
|
||||
|
||||
if source.startswith('cli-build/'):
|
||||
if source.startswith("cli-build/"):
|
||||
# treat source as a custom srpm that has already been uploaded to koji
|
||||
pass
|
||||
elif '://' not in source:
|
||||
elif "://" not in source:
|
||||
# treat source as an srpm and upload it
|
||||
serverdir = _unique_path('cli-build')
|
||||
serverdir = _unique_path("cli-build")
|
||||
callback = None
|
||||
self.koji_session.uploadWrapper(source, serverdir, callback=callback)
|
||||
source = "%s/%s" % (serverdir, os.path.basename(source))
|
||||
@@ -792,32 +829,30 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
# The reason is that it is faster to build this RPM in
|
||||
# already existing shared target, because Koji does not need to do
|
||||
# repo-regen.
|
||||
if (artifact_name == "module-build-macros" and
|
||||
self.config.koji_build_macros_target):
|
||||
if artifact_name == "module-build-macros" and self.config.koji_build_macros_target:
|
||||
module_target = self.config.koji_build_macros_target
|
||||
else:
|
||||
module_target = self.module_target['name']
|
||||
module_target = self.module_target["name"]
|
||||
|
||||
build_opts = {
|
||||
"skip_tag": True,
|
||||
"mbs_artifact_name": artifact_name,
|
||||
"mbs_module_target": module_target
|
||||
"mbs_module_target": module_target,
|
||||
}
|
||||
|
||||
# disabled by default, wouldn't work until Koji issue #1158 is done
|
||||
if conf.allow_arch_override:
|
||||
build_opts['arch_override'] = \
|
||||
self.mmd.get_rpm_components()[artifact_name].get_arches().get()
|
||||
build_opts["arch_override"] = (
|
||||
self.mmd.get_rpm_components()[artifact_name].get_arches().get())
|
||||
|
||||
task_id = self.koji_session.build(source, module_target, build_opts,
|
||||
priority=self.build_priority)
|
||||
log.info("submitted build of %s (task_id=%s), via %s" % (
|
||||
source, task_id, self))
|
||||
task_id = self.koji_session.build(
|
||||
source, module_target, build_opts, priority=self.build_priority)
|
||||
log.info("submitted build of %s (task_id=%s), via %s" % (source, task_id, self))
|
||||
if task_id:
|
||||
state = koji.BUILD_STATES['BUILDING']
|
||||
state = koji.BUILD_STATES["BUILDING"]
|
||||
reason = "Submitted %s to Koji" % (artifact_name)
|
||||
else:
|
||||
state = koji.BUILD_STATES['FAILED']
|
||||
state = koji.BUILD_STATES["FAILED"]
|
||||
reason = "Failed to submit artifact %s to Koji" % (artifact_name)
|
||||
return task_id, state, reason, None
|
||||
|
||||
@@ -825,8 +860,10 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
try:
|
||||
self.koji_session.cancelTask(task_id)
|
||||
except Exception as error:
|
||||
log.error('Failed to cancel task ID {0} in Koji. The error '
|
||||
'message was: {1}'.format(task_id, str(error)))
|
||||
log.error(
|
||||
"Failed to cancel task ID {0} in Koji. The error "
|
||||
"message was: {1}".format(task_id, str(error))
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repo_from_tag(cls, config, tag_name, arch):
|
||||
@@ -840,52 +877,52 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
"""
|
||||
return "%s/%s/latest/%s" % (config.koji_repository_url, tag_name, arch)
|
||||
|
||||
@module_build_service.utils.validate_koji_tag('tag', post='')
|
||||
@module_build_service.utils.validate_koji_tag("tag", post="")
|
||||
def _get_tag(self, tag, strict=True):
|
||||
if isinstance(tag, dict):
|
||||
tag = tag['name']
|
||||
tag = tag["name"]
|
||||
taginfo = self.koji_session.getTag(tag)
|
||||
if not taginfo:
|
||||
if strict:
|
||||
raise SystemError("Unknown tag: %s" % tag)
|
||||
return taginfo
|
||||
|
||||
@module_build_service.utils.validate_koji_tag(['tag_name'], post='')
|
||||
@module_build_service.utils.validate_koji_tag(["tag_name"], post="")
|
||||
def _koji_add_many_tag_inheritance(self, tag_name, parent_tags):
|
||||
tag = self._get_tag(tag_name)
|
||||
# highest priority num is at the end
|
||||
inheritance_data = sorted(self.koji_session.getInheritanceData(tag['name']) or
|
||||
[], key=lambda k: k['priority'])
|
||||
inheritance_data = sorted(
|
||||
self.koji_session.getInheritanceData(tag["name"]) or [], key=lambda k: k["priority"])
|
||||
# Set initial priority to last record in inheritance data or 0
|
||||
priority = 0
|
||||
if inheritance_data:
|
||||
priority = inheritance_data[-1]['priority'] + 10
|
||||
priority = inheritance_data[-1]["priority"] + 10
|
||||
|
||||
def record_exists(parent_id, data):
|
||||
for item in data:
|
||||
if parent_id == item['parent_id']:
|
||||
if parent_id == item["parent_id"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
for parent in parent_tags: # We expect that they're sorted
|
||||
parent = self._get_tag(parent)
|
||||
if record_exists(parent['id'], inheritance_data):
|
||||
if record_exists(parent["id"], inheritance_data):
|
||||
continue
|
||||
|
||||
parent_data = {}
|
||||
parent_data['parent_id'] = parent['id']
|
||||
parent_data['priority'] = priority
|
||||
parent_data['maxdepth'] = None
|
||||
parent_data['intransitive'] = False
|
||||
parent_data['noconfig'] = False
|
||||
parent_data['pkg_filter'] = ''
|
||||
parent_data["parent_id"] = parent["id"]
|
||||
parent_data["priority"] = priority
|
||||
parent_data["maxdepth"] = None
|
||||
parent_data["intransitive"] = False
|
||||
parent_data["noconfig"] = False
|
||||
parent_data["pkg_filter"] = ""
|
||||
inheritance_data.append(parent_data)
|
||||
priority += 10
|
||||
|
||||
if inheritance_data:
|
||||
self.koji_session.setInheritanceData(tag['id'], inheritance_data)
|
||||
self.koji_session.setInheritanceData(tag["id"], inheritance_data)
|
||||
|
||||
@module_build_service.utils.validate_koji_tag('dest_tag')
|
||||
@module_build_service.utils.validate_koji_tag("dest_tag")
|
||||
def _koji_add_groups_to_tag(self, dest_tag, groups):
|
||||
"""Add groups to a tag as well as packages listed by group
|
||||
|
||||
@@ -899,17 +936,17 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
log.debug("Adding groups=%s to tag=%s" % (list(groups), dest_tag))
|
||||
if groups and not isinstance(groups, dict):
|
||||
raise ValueError("Expected dict {'group' : [str(package1), ...]")
|
||||
dest_tag = self._get_tag(dest_tag)['name']
|
||||
existing_groups = dict([(p['name'], p['group_id'])
|
||||
for p
|
||||
in self.koji_session.getTagGroups(dest_tag, inherit=False)
|
||||
])
|
||||
dest_tag = self._get_tag(dest_tag)["name"]
|
||||
existing_groups = dict([
|
||||
(p["name"], p["group_id"])
|
||||
for p in self.koji_session.getTagGroups(dest_tag, inherit=False)
|
||||
])
|
||||
|
||||
for group, packages in groups.items():
|
||||
group_id = existing_groups.get(group, None)
|
||||
if group_id is not None:
|
||||
log.debug("Group %s already exists for tag %s. Skipping creation."
|
||||
% (group, dest_tag))
|
||||
log.debug(
|
||||
"Group %s already exists for tag %s. Skipping creation." % (group, dest_tag))
|
||||
continue
|
||||
|
||||
self.koji_session.groupListAdd(dest_tag, group)
|
||||
@@ -919,7 +956,7 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
for pkg in packages:
|
||||
self.koji_session.groupPackageListAdd(dest_tag, group, pkg)
|
||||
|
||||
@module_build_service.utils.validate_koji_tag('tag_name')
|
||||
@module_build_service.utils.validate_koji_tag("tag_name")
|
||||
def _koji_create_tag(self, tag_name, arches=None, perm=None):
|
||||
"""Create a tag in Koji
|
||||
|
||||
@@ -945,16 +982,16 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
raise ValueError("Expected list or None on input got %s" % type(arches))
|
||||
|
||||
current_arches = []
|
||||
if taginfo['arches']: # None if none
|
||||
current_arches = taginfo['arches'].split() # string separated by empty spaces
|
||||
if taginfo["arches"]: # None if none
|
||||
current_arches = taginfo["arches"].split() # string separated by empty spaces
|
||||
|
||||
if set(arches) != set(current_arches):
|
||||
opts['arches'] = " ".join(arches)
|
||||
opts["arches"] = " ".join(arches)
|
||||
|
||||
if perm:
|
||||
if taginfo['locked']:
|
||||
raise SystemError("Tag %s: master lock already set. Can't edit tag"
|
||||
% taginfo['name'])
|
||||
if taginfo["locked"]:
|
||||
raise SystemError(
|
||||
"Tag %s: master lock already set. Can't edit tag" % taginfo["name"])
|
||||
|
||||
perm_ids = self.getPerms()
|
||||
|
||||
@@ -962,15 +999,15 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
raise ValueError("Unknown permissions %s" % perm)
|
||||
|
||||
perm_id = perm_ids[perm]
|
||||
if taginfo['perm'] not in (perm_id, perm): # check either id or the string
|
||||
opts['perm'] = perm_id
|
||||
if taginfo["perm"] not in (perm_id, perm): # check either id or the string
|
||||
opts["perm"] = perm_id
|
||||
|
||||
# Create deepcopy of conf dict, because we are going to change it later.
|
||||
opts['extra'] = copy.deepcopy(conf.koji_tag_extra_opts)
|
||||
opts["extra"] = copy.deepcopy(conf.koji_tag_extra_opts)
|
||||
|
||||
xmd = self.mmd.get_xmd()
|
||||
if "mbs_options" in xmd.keys() and "repo_include_all" in xmd["mbs_options"].keys():
|
||||
opts['extra']['repo_include_all'] = xmd["mbs_options"]["repo_include_all"]
|
||||
opts["extra"]["repo_include_all"] = xmd["mbs_options"]["repo_include_all"]
|
||||
|
||||
# edit tag with opts
|
||||
self.koji_session.editTag2(tag_name, **opts)
|
||||
@@ -983,18 +1020,20 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
# This will help with potential resubmitting of failed builds
|
||||
pkglists = {}
|
||||
for tag in tags:
|
||||
pkglists[tag['id']] = dict([(p['package_name'], p['package_id'])
|
||||
for p in self.koji_session.listPackages(tagID=tag['id'])])
|
||||
pkglists[tag["id"]] = dict([
|
||||
(p["package_name"], p["package_id"])
|
||||
for p in self.koji_session.listPackages(tagID=tag["id"])
|
||||
])
|
||||
|
||||
self.koji_session.multicall = True
|
||||
for tag in tags:
|
||||
pkglist = pkglists[tag['id']]
|
||||
pkglist = pkglists[tag["id"]]
|
||||
for package in packages:
|
||||
if pkglist.get(package, None):
|
||||
log.debug("%s Package %s is already whitelisted." % (self, package))
|
||||
continue
|
||||
|
||||
self.koji_session.packageListAdd(tag['name'], package, self.owner)
|
||||
self.koji_session.packageListAdd(tag["name"], package, self.owner)
|
||||
self.koji_session.multiCall(strict=True)
|
||||
|
||||
def _koji_block_packages(self, packages):
|
||||
@@ -1013,7 +1052,7 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
args = [[self.module_build_tag["name"], package] for package in packages]
|
||||
koji_multicall_map(self.koji_session, self.koji_session.packageListUnblock, args)
|
||||
|
||||
@module_build_service.utils.validate_koji_tag(['build_tag', 'dest_tag'])
|
||||
@module_build_service.utils.validate_koji_tag(["build_tag", "dest_tag"])
|
||||
def _koji_add_target(self, name, build_tag, dest_tag):
|
||||
"""Add build target if it doesn't exist or validate the existing one
|
||||
|
||||
@@ -1036,25 +1075,29 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
target_info = self.koji_session.getBuildTarget(name)
|
||||
|
||||
barches = build_tag.get("arches", None)
|
||||
assert barches, "Build tag %s has no arches defined." % build_tag['name']
|
||||
assert barches, "Build tag %s has no arches defined." % build_tag["name"]
|
||||
|
||||
if not target_info:
|
||||
target_info = self.koji_session.createBuildTarget(name, build_tag['name'],
|
||||
dest_tag['name'])
|
||||
target_info = self.koji_session.createBuildTarget(
|
||||
name, build_tag["name"], dest_tag["name"])
|
||||
|
||||
else: # verify whether build and destination tag matches
|
||||
if build_tag['name'] != target_info['build_tag_name']:
|
||||
raise SystemError(("Target references unexpected build_tag_name. "
|
||||
"Got '%s', expected '%s'. Please contact administrator.")
|
||||
% (target_info['build_tag_name'], build_tag['name']))
|
||||
if dest_tag['name'] != target_info['dest_tag_name']:
|
||||
raise SystemError(("Target references unexpected dest_tag_name. "
|
||||
"Got '%s', expected '%s'. Please contact administrator.")
|
||||
% (target_info['dest_tag_name'], dest_tag['name']))
|
||||
if build_tag["name"] != target_info["build_tag_name"]:
|
||||
raise SystemError(
|
||||
"Target references unexpected build_tag_name. "
|
||||
"Got '%s', expected '%s'. Please contact administrator."
|
||||
% (target_info["build_tag_name"], build_tag["name"])
|
||||
)
|
||||
if dest_tag["name"] != target_info["dest_tag_name"]:
|
||||
raise SystemError(
|
||||
"Target references unexpected dest_tag_name. "
|
||||
"Got '%s', expected '%s'. Please contact administrator."
|
||||
% (target_info["dest_tag_name"], dest_tag["name"])
|
||||
)
|
||||
|
||||
return self.koji_session.getBuildTarget(name)
|
||||
|
||||
def list_tasks_for_components(self, component_builds=None, state='active'):
|
||||
def list_tasks_for_components(self, component_builds=None, state="active"):
|
||||
"""
|
||||
:param component_builds: list of component builds which we want to check
|
||||
:param state: limit the check only for Koji tasks in the given state
|
||||
@@ -1064,33 +1107,36 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
"""
|
||||
|
||||
component_builds = component_builds or []
|
||||
if state == 'active':
|
||||
states = [koji.TASK_STATES['FREE'],
|
||||
koji.TASK_STATES['OPEN'],
|
||||
koji.TASK_STATES['ASSIGNED']]
|
||||
if state == "active":
|
||||
states = [
|
||||
koji.TASK_STATES["FREE"],
|
||||
koji.TASK_STATES["OPEN"],
|
||||
koji.TASK_STATES["ASSIGNED"],
|
||||
]
|
||||
elif state.upper() in koji.TASK_STATES:
|
||||
states = [koji.TASK_STATES[state.upper()]]
|
||||
else:
|
||||
raise ValueError("State {} is not valid within Koji task states."
|
||||
.format(state))
|
||||
raise ValueError("State {} is not valid within Koji task states.".format(state))
|
||||
|
||||
tasks = []
|
||||
for task in self.koji_session.listTasks(opts={'state': states,
|
||||
'decode': True,
|
||||
'method': 'build'}):
|
||||
task_opts = task['request'][-1]
|
||||
for task in self.koji_session.listTasks(
|
||||
opts={"state": states, "decode": True, "method": "build"}
|
||||
):
|
||||
task_opts = task["request"][-1]
|
||||
assert isinstance(task_opts, dict), "Task options shall be a dict."
|
||||
if 'scratch' in task_opts and task_opts['scratch']:
|
||||
if "scratch" in task_opts and task_opts["scratch"]:
|
||||
continue
|
||||
if 'mbs_artifact_name' not in task_opts:
|
||||
task_opts['mbs_artifact_name'] = None
|
||||
if 'mbs_module_target' not in task_opts:
|
||||
task_opts['mbs_module_target'] = None
|
||||
if "mbs_artifact_name" not in task_opts:
|
||||
task_opts["mbs_artifact_name"] = None
|
||||
if "mbs_module_target" not in task_opts:
|
||||
task_opts["mbs_module_target"] = None
|
||||
for c in component_builds:
|
||||
# TODO: https://pagure.io/fm-orchestrator/issue/397
|
||||
# Subj: Do not mix target/tag when looking for component builds
|
||||
if (c.package == task_opts['mbs_artifact_name'] and
|
||||
c.module_build.koji_tag == task_opts['mbs_module_target']):
|
||||
if (
|
||||
c.package == task_opts["mbs_artifact_name"]
|
||||
and c.module_build.koji_tag == task_opts["mbs_module_target"]
|
||||
):
|
||||
tasks.append(task)
|
||||
|
||||
return tasks
|
||||
@@ -1143,7 +1189,8 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
"packageID": component_id,
|
||||
"userID": mbs_user_id,
|
||||
"state": koji.BUILD_STATES["COMPLETE"],
|
||||
"queryOpts": {"order": "-build_id", "limit": 1}})
|
||||
"queryOpts": {"order": "-build_id", "limit": 1},
|
||||
})
|
||||
|
||||
# Get the latest Koji build created by MBS for every component in single Koji call.
|
||||
builds_per_component = koji_retrying_multicall_map(
|
||||
@@ -1209,8 +1256,7 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
"""
|
||||
with models.make_session(conf) as db_session:
|
||||
build = models.ModuleBuild.get_build_from_nsvc(
|
||||
db_session, mmd.get_name(), mmd.get_stream(), mmd.get_version(),
|
||||
mmd.get_context())
|
||||
db_session, mmd.get_name(), mmd.get_stream(), mmd.get_version(), mmd.get_context())
|
||||
koji_session = KojiModuleBuilder.get_session(conf, login=False)
|
||||
rpms = koji_session.listTaggedRPMS(build.koji_tag, latest=True)[0]
|
||||
nvrs = set(kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms)
|
||||
@@ -1218,9 +1264,11 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
|
||||
def finalize(self, succeeded=True):
|
||||
# Only import to koji CG if the module is "build" and not scratch.
|
||||
if (not self.module.scratch and
|
||||
self.config.koji_enable_content_generator and
|
||||
self.module.state == models.BUILD_STATES['build']):
|
||||
if (
|
||||
not self.module.scratch
|
||||
and self.config.koji_enable_content_generator
|
||||
and self.module.state == models.BUILD_STATES["build"]
|
||||
):
|
||||
cg = KojiContentGenerator(self.module, self.config)
|
||||
cg.koji_import()
|
||||
if conf.koji_cg_devel_module:
|
||||
@@ -1244,8 +1292,10 @@ class KojiModuleBuilder(GenericBuilder):
|
||||
tags = []
|
||||
koji_tags = session.listTags(rpm_md["build_id"])
|
||||
for t in koji_tags:
|
||||
if (not t["name"].endswith("-build") and
|
||||
t["name"].startswith(tuple(conf.koji_tag_prefixes))):
|
||||
if (
|
||||
not t["name"].endswith("-build")
|
||||
and t["name"].startswith(tuple(conf.koji_tag_prefixes))
|
||||
):
|
||||
tags.append(t["name"])
|
||||
|
||||
return tags
|
||||
|
||||
@@ -43,7 +43,7 @@ from module_build_service.builder.utils import (
|
||||
create_local_repo_from_koji_tag,
|
||||
execute_cmd,
|
||||
find_srpm,
|
||||
get_koji_config
|
||||
get_koji_config,
|
||||
)
|
||||
from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder
|
||||
|
||||
@@ -68,8 +68,7 @@ class MockModuleBuilder(GenericBuilder):
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
raise IOError("None of {} mock config files found."
|
||||
.format(conf.mock_config_file))
|
||||
raise IOError("None of {} mock config files found.".format(conf.mock_config_file))
|
||||
|
||||
# Load yum config file template
|
||||
for cf in conf.yum_config_file:
|
||||
@@ -80,10 +79,9 @@ class MockModuleBuilder(GenericBuilder):
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
raise IOError("None of {} yum config files found."
|
||||
.format(conf.yum_config_file))
|
||||
raise IOError("None of {} yum config files found.".format(conf.yum_config_file))
|
||||
|
||||
@module_build_service.utils.validate_koji_tag('tag_name')
|
||||
@module_build_service.utils.validate_koji_tag("tag_name")
|
||||
def __init__(self, owner, module, config, tag_name, components):
|
||||
self.module_str = module.name
|
||||
self.module = module
|
||||
@@ -101,8 +99,7 @@ class MockModuleBuilder(GenericBuilder):
|
||||
if arch_detected:
|
||||
self.arch = arch_detected
|
||||
else:
|
||||
log.warning("Couldn't determine machine arch. Falling back "
|
||||
"to configured arch.")
|
||||
log.warning("Couldn't determine machine arch. Falling back to configured arch.")
|
||||
self.arch = conf.arch_fallback
|
||||
else:
|
||||
self.arch = conf.arch_fallback
|
||||
@@ -144,8 +141,8 @@ class MockModuleBuilder(GenericBuilder):
|
||||
for name in os.listdir(self.configdir):
|
||||
os.remove(os.path.join(self.configdir, name))
|
||||
|
||||
log.info("MockModuleBuilder initialized, tag_name=%s, tag_dir=%s" %
|
||||
(tag_name, self.tag_dir))
|
||||
log.info(
|
||||
"MockModuleBuilder initialized, tag_name=%s, tag_dir=%s" % (tag_name, self.tag_dir))
|
||||
|
||||
@property
|
||||
def module_build_tag(self):
|
||||
@@ -175,18 +172,21 @@ class MockModuleBuilder(GenericBuilder):
|
||||
m1_mmd = self.module.mmd()
|
||||
artifacts = Modulemd.SimpleSet()
|
||||
|
||||
rpm_files = [f
|
||||
for f in os.listdir(self.resultsdir)
|
||||
if f.endswith(".rpm")]
|
||||
rpm_files = [f for f in os.listdir(self.resultsdir) if f.endswith(".rpm")]
|
||||
|
||||
if rpm_files:
|
||||
output = subprocess.check_output(['rpm',
|
||||
'--queryformat',
|
||||
'%{NAME} %{EPOCHNUM} %{VERSION} %{RELEASE} %{ARCH}\n',
|
||||
'-qp'] + rpm_files,
|
||||
cwd=self.resultsdir,
|
||||
universal_newlines=True)
|
||||
nevras = output.strip().split('\n')
|
||||
output = subprocess.check_output(
|
||||
[
|
||||
"rpm",
|
||||
"--queryformat",
|
||||
"%{NAME} %{EPOCHNUM} %{VERSION} %{RELEASE} %{ARCH}\n",
|
||||
"-qp",
|
||||
]
|
||||
+ rpm_files,
|
||||
cwd=self.resultsdir,
|
||||
universal_newlines=True,
|
||||
)
|
||||
nevras = output.strip().split("\n")
|
||||
if len(nevras) != len(rpm_files):
|
||||
raise RuntimeError("rpm -qp returned an unexpected number of lines")
|
||||
|
||||
@@ -198,20 +198,20 @@ class MockModuleBuilder(GenericBuilder):
|
||||
if name in m1_mmd.get_rpm_filter().get():
|
||||
continue
|
||||
|
||||
pkglist_f.write(rpm_file + '\n')
|
||||
artifacts.add('{}-{}:{}-{}.{}'.format(name, epoch, version, release, arch))
|
||||
pkglist_f.write(rpm_file + "\n")
|
||||
artifacts.add("{}-{}:{}-{}.{}".format(name, epoch, version, release, arch))
|
||||
|
||||
pkglist_f.close()
|
||||
m1_mmd.set_rpm_artifacts(artifacts)
|
||||
|
||||
# Generate repo.
|
||||
execute_cmd(['/usr/bin/createrepo_c', '--pkglist', pkglist, path])
|
||||
execute_cmd(["/usr/bin/createrepo_c", "--pkglist", pkglist, path])
|
||||
|
||||
# ...and inject modules.yaml there if asked.
|
||||
if include_module_yaml:
|
||||
mmd_path = os.path.join(path, "modules.yaml")
|
||||
m1_mmd.dump(mmd_path)
|
||||
execute_cmd(['/usr/bin/modifyrepo_c', '--mdtype=modules', mmd_path, repodata_path])
|
||||
execute_cmd(["/usr/bin/modifyrepo_c", "--mdtype=modules", mmd_path, repodata_path])
|
||||
|
||||
def _add_repo(self, name, baseurl, extra=""):
|
||||
"""
|
||||
@@ -247,18 +247,18 @@ class MockModuleBuilder(GenericBuilder):
|
||||
|
||||
with MockModuleBuilder._config_lock:
|
||||
infile = os.path.join(self.configdir, "mock.cfg")
|
||||
with open(infile, 'r') as f:
|
||||
with open(infile, "r") as f:
|
||||
# This looks scary, but it is the way how mock itself loads the
|
||||
# config file ...
|
||||
config_opts = {}
|
||||
code = compile(f.read(), infile, 'exec')
|
||||
code = compile(f.read(), infile, "exec")
|
||||
# pylint: disable=exec-used
|
||||
exec(code)
|
||||
|
||||
self.groups = config_opts["chroot_setup_cmd"].split(" ")[1:]
|
||||
self.yum_conf = config_opts['yum.conf']
|
||||
self.enabled_modules = config_opts['module_enable']
|
||||
self.releasever = config_opts['releasever']
|
||||
self.yum_conf = config_opts["yum.conf"]
|
||||
self.enabled_modules = config_opts["module_enable"]
|
||||
self.releasever = config_opts["releasever"]
|
||||
|
||||
def _write_mock_config(self):
|
||||
"""
|
||||
@@ -267,8 +267,8 @@ class MockModuleBuilder(GenericBuilder):
|
||||
|
||||
with MockModuleBuilder._config_lock:
|
||||
config = str(MockModuleBuilder.mock_config_template)
|
||||
config = config.replace("$root", "%s-%s" % (self.tag_name,
|
||||
str(threading.current_thread().name)))
|
||||
config = config.replace(
|
||||
"$root", "%s-%s" % (self.tag_name, str(threading.current_thread().name)))
|
||||
config = config.replace("$arch", self.arch)
|
||||
config = config.replace("$group", " ".join(self.groups))
|
||||
config = config.replace("$yum_conf", self.yum_conf)
|
||||
@@ -278,13 +278,13 @@ class MockModuleBuilder(GenericBuilder):
|
||||
# We write the most recent config to "mock.cfg", so thread-related
|
||||
# configs can be later (re-)generated from it using _load_mock_config.
|
||||
outfile = os.path.join(self.configdir, "mock.cfg")
|
||||
with open(outfile, 'w') as f:
|
||||
with open(outfile, "w") as f:
|
||||
f.write(config)
|
||||
|
||||
# Write the config to thread-related configuration file.
|
||||
outfile = os.path.join(self.configdir, "mock-%s.cfg" %
|
||||
str(threading.current_thread().name))
|
||||
with open(outfile, 'w') as f:
|
||||
outfile = os.path.join(
|
||||
self.configdir, "mock-%s.cfg" % str(threading.current_thread().name))
|
||||
with open(outfile, "w") as f:
|
||||
f.write(config)
|
||||
|
||||
def buildroot_connect(self, groups):
|
||||
@@ -319,6 +319,7 @@ class MockModuleBuilder(GenericBuilder):
|
||||
self._write_mock_config()
|
||||
|
||||
from module_build_service.scheduler.consumer import fake_repo_done_message
|
||||
|
||||
fake_repo_done_message(self.tag_name)
|
||||
|
||||
def tag_artifacts(self, artifacts):
|
||||
@@ -361,11 +362,11 @@ class MockModuleBuilder(GenericBuilder):
|
||||
repo = koji_session.getRepo(repo_name)
|
||||
if repo:
|
||||
baseurl = koji.PathInfo(topdir=koji_config.topurl).repo(repo["id"], repo_name)
|
||||
baseurl = '{0}/{1}/'.format(baseurl, self.arch)
|
||||
baseurl = "{0}/{1}/".format(baseurl, self.arch)
|
||||
else:
|
||||
repo_dir = os.path.join(self.config.cache_dir, "koji_tags", tag)
|
||||
create_local_repo_from_koji_tag(self.config, tag, repo_dir,
|
||||
[self.arch, "noarch"])
|
||||
create_local_repo_from_koji_tag(
|
||||
self.config, tag, repo_dir, [self.arch, "noarch"])
|
||||
baseurl = "file://" + repo_dir
|
||||
# Check to see if there are any external repos tied to the tag
|
||||
for ext_repo in koji_session.getTagExternalRepos(repo_name):
|
||||
@@ -382,13 +383,13 @@ class MockModuleBuilder(GenericBuilder):
|
||||
# build_id=1 and task_id=1 are OK here, because we are building just
|
||||
# one RPM at the time.
|
||||
msg = module_build_service.messaging.KojiBuildChange(
|
||||
msg_id='a faked internal message',
|
||||
msg_id="a faked internal message",
|
||||
build_id=build_id,
|
||||
task_id=build_id,
|
||||
build_name=nvr["name"],
|
||||
build_new_state=state,
|
||||
build_release=nvr["release"],
|
||||
build_version=nvr["version"]
|
||||
build_version=nvr["version"],
|
||||
)
|
||||
module_build_service.scheduler.consumer.work_queue_put(msg)
|
||||
|
||||
@@ -411,7 +412,7 @@ class MockModuleBuilder(GenericBuilder):
|
||||
os.remove(log_path)
|
||||
|
||||
# Remove other files containing useless information
|
||||
elif logf.endswith('-srpm-stdout.log'):
|
||||
elif logf.endswith("-srpm-stdout.log"):
|
||||
with open(log_path) as f:
|
||||
data = f.read(4096)
|
||||
if re.match("Downloading [^\n]*\n\n\nWrote: [^\n]", data):
|
||||
@@ -421,24 +422,27 @@ class MockModuleBuilder(GenericBuilder):
|
||||
"""
|
||||
Builds the artifact from the SRPM.
|
||||
"""
|
||||
state = koji.BUILD_STATES['BUILDING']
|
||||
state = koji.BUILD_STATES["BUILDING"]
|
||||
|
||||
# Use the mock config associated with this thread.
|
||||
mock_config = os.path.join(self.configdir,
|
||||
"mock-%s.cfg" % str(threading.current_thread().name))
|
||||
mock_config = os.path.join(
|
||||
self.configdir, "mock-%s.cfg" % str(threading.current_thread().name))
|
||||
|
||||
# Open the logs to which we will forward mock stdout/stderr.
|
||||
mock_stdout_log = open(os.path.join(self.resultsdir,
|
||||
artifact_name + "-mock-stdout.log"), "w")
|
||||
mock_stderr_log = open(os.path.join(self.resultsdir,
|
||||
artifact_name + "-mock-stderr.log"), "w")
|
||||
mock_stdout_log = open(
|
||||
os.path.join(self.resultsdir, artifact_name + "-mock-stdout.log"), "w")
|
||||
mock_stderr_log = open(
|
||||
os.path.join(self.resultsdir, artifact_name + "-mock-stderr.log"), "w")
|
||||
|
||||
srpm = artifact_name
|
||||
resultsdir = builder.resultsdir
|
||||
try:
|
||||
# Initialize mock.
|
||||
execute_cmd(["mock", "-v", "-r", mock_config, "--init"],
|
||||
stdout=mock_stdout_log, stderr=mock_stderr_log)
|
||||
execute_cmd(
|
||||
["mock", "-v", "-r", mock_config, "--init"],
|
||||
stdout=mock_stdout_log,
|
||||
stderr=mock_stderr_log,
|
||||
)
|
||||
|
||||
# Start the build and store results to resultsdir
|
||||
builder.build(mock_stdout_log, mock_stderr_log)
|
||||
@@ -448,23 +452,21 @@ class MockModuleBuilder(GenericBuilder):
|
||||
# are put in the scheduler's work queue and are handled
|
||||
# by MBS after the build_srpm() method returns and scope gets
|
||||
# back to scheduler.main.main() method.
|
||||
state = koji.BUILD_STATES['COMPLETE']
|
||||
state = koji.BUILD_STATES["COMPLETE"]
|
||||
self._send_build_change(state, srpm, build_id)
|
||||
|
||||
with open(os.path.join(resultsdir, "status.log"), 'w') as f:
|
||||
with open(os.path.join(resultsdir, "status.log"), "w") as f:
|
||||
f.write("complete\n")
|
||||
except Exception as e:
|
||||
log.error("Error while building artifact %s: %s" % (artifact_name,
|
||||
str(e)))
|
||||
log.error("Error while building artifact %s: %s" % (artifact_name, str(e)))
|
||||
|
||||
# Emit messages simulating complete build. These messages
|
||||
# are put in the scheduler's work queue and are handled
|
||||
# by MBS after the build_srpm() method returns and scope gets
|
||||
# back to scheduler.main.main() method.
|
||||
state = koji.BUILD_STATES['FAILED']
|
||||
self._send_build_change(state, srpm,
|
||||
build_id)
|
||||
with open(os.path.join(resultsdir, "status.log"), 'w') as f:
|
||||
state = koji.BUILD_STATES["FAILED"]
|
||||
self._send_build_change(state, srpm, build_id)
|
||||
with open(os.path.join(resultsdir, "status.log"), "w") as f:
|
||||
f.write("failed\n")
|
||||
|
||||
mock_stdout_log.close()
|
||||
@@ -493,7 +495,7 @@ class MockModuleBuilder(GenericBuilder):
|
||||
# already in repository ready to be used. This is not a case for Mock
|
||||
# backend in the time we return here.
|
||||
reason = "Building %s in Mock" % (artifact_name)
|
||||
return build_id, koji.BUILD_STATES['BUILDING'], reason, None
|
||||
return build_id, koji.BUILD_STATES["BUILDING"], reason, None
|
||||
|
||||
def build(self, artifact_name, source):
|
||||
log.info("Starting building artifact %s: %s" % (artifact_name, source))
|
||||
@@ -502,8 +504,8 @@ class MockModuleBuilder(GenericBuilder):
|
||||
# generate the thread-specific mock config by writing it to fs again.
|
||||
self._load_mock_config()
|
||||
self._write_mock_config()
|
||||
mock_config = os.path.join(self.configdir, "mock-%s.cfg"
|
||||
% str(threading.current_thread().name))
|
||||
mock_config = os.path.join(
|
||||
self.configdir, "mock-%s.cfg" % str(threading.current_thread().name))
|
||||
|
||||
# Get the build-id in thread-safe manner.
|
||||
build_id = None
|
||||
@@ -513,15 +515,14 @@ class MockModuleBuilder(GenericBuilder):
|
||||
|
||||
# Clear resultsdir associated with this thread or in case it does not
|
||||
# exist, create it.
|
||||
resultsdir = os.path.join(self.resultsdir,
|
||||
str(threading.current_thread().name))
|
||||
resultsdir = os.path.join(self.resultsdir, str(threading.current_thread().name))
|
||||
if os.path.exists(resultsdir):
|
||||
for name in os.listdir(resultsdir):
|
||||
os.remove(os.path.join(resultsdir, name))
|
||||
else:
|
||||
os.makedirs(resultsdir)
|
||||
|
||||
if source.endswith('.src.rpm'):
|
||||
if source.endswith(".src.rpm"):
|
||||
builder = SRPMBuilder(mock_config, resultsdir, source)
|
||||
else:
|
||||
# Otherwise, assume we're building from some scm repo
|
||||
@@ -536,7 +537,7 @@ class MockModuleBuilder(GenericBuilder):
|
||||
def cancel_build(self, task_id):
|
||||
pass
|
||||
|
||||
def list_tasks_for_components(self, component_builds=None, state='active'):
|
||||
def list_tasks_for_components(self, component_builds=None, state="active"):
|
||||
pass
|
||||
|
||||
def repo_from_tag(cls, config, tag_name, arch):
|
||||
@@ -557,8 +558,7 @@ class MockModuleBuilder(GenericBuilder):
|
||||
"""
|
||||
with models.make_session(conf) as db_session:
|
||||
build = models.ModuleBuild.get_build_from_nsvc(
|
||||
db_session, mmd.get_name(), mmd.get_stream(), mmd.get_version(),
|
||||
mmd.get_context())
|
||||
db_session, mmd.get_name(), mmd.get_stream(), mmd.get_version(), mmd.get_context())
|
||||
if build.koji_tag.startswith("repofile://"):
|
||||
# Modules from local repository have already the RPMs filled in mmd.
|
||||
return list(mmd.get_rpm_artifacts().get())
|
||||
@@ -573,9 +573,7 @@ class BaseBuilder(object):
|
||||
def __init__(self, config, resultsdir):
|
||||
self.config = config
|
||||
self.resultsdir = resultsdir
|
||||
self.cmd = ["mock", "-v", "-r", config,
|
||||
"--no-clean",
|
||||
"--resultdir=%s" % resultsdir]
|
||||
self.cmd = ["mock", "-v", "-r", config, "--no-clean", "--resultdir=%s" % resultsdir]
|
||||
|
||||
def build(self, stdout, stderr):
|
||||
execute_cmd(self.cmd, stdout=stdout, stderr=stderr)
|
||||
@@ -602,24 +600,20 @@ class SCMBuilder(BaseBuilder):
|
||||
# See https://bugzilla.redhat.com/show_bug.cgi?id=1459437 for
|
||||
# more info. Once mock-scm supports this feature, we can remove
|
||||
# this code.
|
||||
distgit_get_branch = \
|
||||
"sh -c {}'; git -C {} checkout {}'".format(pipes.quote(distgit_get),
|
||||
artifact_name,
|
||||
branch)
|
||||
distgit_get_branch = "sh -c {}'; git -C {} checkout {}'".format(
|
||||
pipes.quote(distgit_get), artifact_name, branch)
|
||||
|
||||
f.writelines([
|
||||
"config_opts['scm'] = True\n",
|
||||
"config_opts['scm_opts']['method'] = 'distgit'\n",
|
||||
"config_opts['scm_opts']['package'] = '{}'\n".format(
|
||||
artifact_name),
|
||||
"config_opts['scm_opts']['distgit_get'] = {!r}\n".format(
|
||||
distgit_get_branch),
|
||||
"config_opts['scm_opts']['package'] = '{}'\n".format(artifact_name),
|
||||
"config_opts['scm_opts']['distgit_get'] = {!r}\n".format(distgit_get_branch),
|
||||
])
|
||||
|
||||
# Set distgit_src_get only if it's defined.
|
||||
if distgit_cmds[1]:
|
||||
f.write("config_opts['scm_opts']['distgit_src_get'] = '{}'\n".format(
|
||||
distgit_cmds[1]))
|
||||
f.write(
|
||||
"config_opts['scm_opts']['distgit_src_get'] = '{}'\n".format(distgit_cmds[1]))
|
||||
|
||||
# The local git repositories cloned by `fedpkg clone` typically do not have
|
||||
# the tarballs with sources committed in a git repo. They normally live in lookaside
|
||||
@@ -633,7 +627,7 @@ class SCMBuilder(BaseBuilder):
|
||||
|
||||
def _make_executable(self, path):
|
||||
mode = os.stat(path).st_mode
|
||||
mode |= (mode & 0o444) >> 2 # copy R bits to X
|
||||
mode |= (mode & 0o444) >> 2 # copy R bits to X
|
||||
os.chmod(path, mode)
|
||||
|
||||
def _get_distgit_commands(self, source):
|
||||
@@ -658,6 +652,6 @@ class SCMBuilder(BaseBuilder):
|
||||
# let's return 0.0 so the type is consistent
|
||||
return self.koji_session.getAverageBuildDuration(component.package) or 0.0
|
||||
except Exception:
|
||||
log.debug('The Koji call to getAverageBuildDuration failed. Is Koji properly '
|
||||
'configured?')
|
||||
log.debug(
|
||||
"The Koji call to getAverageBuildDuration failed. Is Koji properly configured?")
|
||||
return 0.0
|
||||
|
||||
@@ -2,9 +2,7 @@ import pkg_resources
|
||||
|
||||
from module_build_service.builder.base import GenericBuilder
|
||||
|
||||
__all__ = [
|
||||
GenericBuilder
|
||||
]
|
||||
__all__ = [GenericBuilder]
|
||||
|
||||
for entrypoint in pkg_resources.iter_entry_points('mbs.builder_backends'):
|
||||
for entrypoint in pkg_resources.iter_entry_points("mbs.builder_backends"):
|
||||
GenericBuilder.register_backend_class(entrypoint.load())
|
||||
|
||||
@@ -91,9 +91,10 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
|
||||
# We are skipping the caching based on the first two arguments of
|
||||
# default_buildroot_groups, because they are "self" and db.session
|
||||
# instance which are different each call we call that method.
|
||||
default_buildroot_groups_cache = dogpile.cache.make_region(
|
||||
function_key_generator=create_dogpile_key_generator_func(2)).configure(
|
||||
'dogpile.cache.memory')
|
||||
default_buildroot_groups_cache = (
|
||||
dogpile.cache.make_region(function_key_generator=create_dogpile_key_generator_func(2))
|
||||
.configure("dogpile.cache.memory")
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def register_backend_class(cls, backend_class):
|
||||
@@ -113,13 +114,14 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
|
||||
# check if the backend is within allowed backends for the used resolver
|
||||
resolver = module_build_service.resolver.system_resolver
|
||||
if not resolver.is_builder_compatible(backend):
|
||||
raise ValueError("Builder backend '{}' is not compatible with "
|
||||
"resolver backend '{}'. Check your configuration."
|
||||
.format(backend, resolver.backend))
|
||||
raise ValueError(
|
||||
"Builder backend '{}' is not compatible with resolver backend '{}'. Check your "
|
||||
"configuration.".format(backend, resolver.backend)
|
||||
)
|
||||
|
||||
if backend in GenericBuilder.backends:
|
||||
return GenericBuilder.backends[backend](owner=owner, module=module,
|
||||
config=config, **extra)
|
||||
return GenericBuilder.backends[backend](
|
||||
owner=owner, module=module, config=config, **extra)
|
||||
else:
|
||||
raise ValueError("Builder backend='%s' not recognized" % backend)
|
||||
|
||||
@@ -137,8 +139,13 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
|
||||
"""
|
||||
components = [c.package for c in module.component_builds]
|
||||
builder = GenericBuilder.create(
|
||||
module.owner, module, config.system, config, tag_name=module.koji_tag,
|
||||
components=components)
|
||||
module.owner,
|
||||
module,
|
||||
config.system,
|
||||
config,
|
||||
tag_name=module.koji_tag,
|
||||
components=components,
|
||||
)
|
||||
if buildroot_connect is True:
|
||||
groups = GenericBuilder.default_buildroot_groups(session, module)
|
||||
builder.buildroot_connect(groups)
|
||||
@@ -156,8 +163,7 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
|
||||
the tag with particular name and architecture.
|
||||
"""
|
||||
if backend in GenericBuilder.backends:
|
||||
return GenericBuilder.backends[backend].repo_from_tag(
|
||||
config, tag_name, arch)
|
||||
return GenericBuilder.backends[backend].repo_from_tag(config, tag_name, arch)
|
||||
else:
|
||||
raise ValueError("Builder backend='%s' not recognized" % backend)
|
||||
|
||||
@@ -310,23 +316,18 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
|
||||
|
||||
# Resolve default buildroot groups using the MBS, but only for
|
||||
# non-local modules.
|
||||
groups = resolver.resolve_profiles(
|
||||
mmd, ('buildroot', 'srpm-buildroot'))
|
||||
|
||||
groups = {
|
||||
'build': groups['buildroot'],
|
||||
'srpm-build': groups['srpm-buildroot'],
|
||||
}
|
||||
groups = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))
|
||||
groups = {"build": groups["buildroot"], "srpm-build": groups["srpm-buildroot"]}
|
||||
except ValueError:
|
||||
reason = "Failed to gather buildroot groups from SCM."
|
||||
log.exception(reason)
|
||||
module.transition(conf, state="failed", state_reason=reason, failure_type='user')
|
||||
module.transition(conf, state="failed", state_reason=reason, failure_type="user")
|
||||
session.commit()
|
||||
raise
|
||||
return groups
|
||||
|
||||
@abstractmethod
|
||||
def list_tasks_for_components(self, component_builds=None, state='active'):
|
||||
def list_tasks_for_components(self, component_builds=None, state="active"):
|
||||
"""
|
||||
:param component_builds: list of component builds which we want to check
|
||||
:param state: limit the check only for tasks in the given state
|
||||
@@ -416,13 +417,15 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
|
||||
continue
|
||||
|
||||
if average_time_to_build < 0:
|
||||
log.warning("Negative average build duration for component %s: %s",
|
||||
component, str(average_time_to_build))
|
||||
log.warning(
|
||||
"Negative average build duration for component %s: %s",
|
||||
component, str(average_time_to_build),
|
||||
)
|
||||
weights[component] = weight
|
||||
continue
|
||||
|
||||
# Increase the task weight by 0.75 for every hour of build duration.
|
||||
adj = (average_time_to_build / ((60 * 60) / 0.75))
|
||||
adj = average_time_to_build / ((60 * 60) / 0.75)
|
||||
# cap the adjustment at +4.5
|
||||
weight += min(4.5, adj)
|
||||
|
||||
|
||||
@@ -58,10 +58,8 @@ def get_koji_config(mbs_config):
|
||||
# Placed here to avoid py2/py3 conflicts...
|
||||
import koji
|
||||
|
||||
koji_config = munch.Munch(koji.read_config(
|
||||
profile_name=mbs_config.koji_profile,
|
||||
user_config=mbs_config.koji_config,
|
||||
))
|
||||
koji_config = munch.Munch(
|
||||
koji.read_config(profile_name=mbs_config.koji_profile, user_config=mbs_config.koji_config))
|
||||
# Timeout after 10 minutes. The default is 12 hours.
|
||||
koji_config["timeout"] = 60 * 10
|
||||
return koji_config
|
||||
@@ -93,7 +91,7 @@ def create_local_repo_from_koji_tag(config, tag, repo_dir, archs=None):
|
||||
log.exception("Failed to list rpms in tag %r" % tag)
|
||||
|
||||
# Reformat builds so they are dict with build_id as a key.
|
||||
builds = {build['build_id']: build for build in builds}
|
||||
builds = {build["build_id"]: build for build in builds}
|
||||
|
||||
# Prepare pathinfo we will use to generate the URL.
|
||||
pathinfo = koji.PathInfo(topdir=session.opts["topurl"])
|
||||
@@ -104,26 +102,25 @@ def create_local_repo_from_koji_tag(config, tag, repo_dir, archs=None):
|
||||
# Prepare the list of URLs to download
|
||||
download_args = []
|
||||
for rpm in rpms:
|
||||
build_info = builds[rpm['build_id']]
|
||||
build_info = builds[rpm["build_id"]]
|
||||
|
||||
# We do not download debuginfo packages or packages built for archs
|
||||
# we are not interested in.
|
||||
if koji.is_debuginfo(rpm['name']) or not rpm['arch'] in archs:
|
||||
if koji.is_debuginfo(rpm["name"]) or not rpm["arch"] in archs:
|
||||
continue
|
||||
|
||||
fname = pathinfo.rpm(rpm)
|
||||
relpath = os.path.basename(fname)
|
||||
local_fn = os.path.join(repo_dir, relpath)
|
||||
# Download only when the RPM is not downloaded or the size does not match.
|
||||
if not os.path.exists(local_fn) or os.path.getsize(local_fn) != rpm['size']:
|
||||
if not os.path.exists(local_fn) or os.path.getsize(local_fn) != rpm["size"]:
|
||||
if os.path.exists(local_fn):
|
||||
os.remove(local_fn)
|
||||
repo_changed = True
|
||||
url = pathinfo.build(build_info) + '/' + fname
|
||||
url = pathinfo.build(build_info) + "/" + fname
|
||||
download_args.append((url, local_fn))
|
||||
|
||||
log.info(
|
||||
"Downloading %d packages from Koji tag %s to %s" % (len(download_args), tag, repo_dir))
|
||||
log.info("Downloading %d packages from Koji tag %s to %s" % (len(download_args), tag, repo_dir))
|
||||
|
||||
# Create the output directory
|
||||
try:
|
||||
@@ -162,4 +159,4 @@ def create_local_repo_from_koji_tag(config, tag, repo_dir, archs=None):
|
||||
shutil.rmtree(repodata_path)
|
||||
|
||||
log.info("Creating local repository in %s" % repo_dir)
|
||||
execute_cmd(['/usr/bin/createrepo_c', repo_dir])
|
||||
execute_cmd(["/usr/bin/createrepo_c", repo_dir])
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -56,9 +56,6 @@ class StreamAmbigous(ValueError):
|
||||
|
||||
|
||||
def json_error(status, error, message):
|
||||
response = jsonify(
|
||||
{'status': status,
|
||||
'error': error,
|
||||
'message': message})
|
||||
response = jsonify({"status": status, "error": error, "message": message})
|
||||
response.status_code = status
|
||||
return response
|
||||
|
||||
@@ -39,9 +39,9 @@ def variant_str(s):
|
||||
""" Converts a string to a GLib.Variant
|
||||
"""
|
||||
if not isinstance(s, str):
|
||||
raise TypeError('Only strings are supported for scalars')
|
||||
raise TypeError("Only strings are supported for scalars")
|
||||
|
||||
return GLib.Variant('s', s)
|
||||
return GLib.Variant("s", s)
|
||||
|
||||
|
||||
def variant_list(l):
|
||||
@@ -50,11 +50,11 @@ def variant_list(l):
|
||||
l_variant = list()
|
||||
for item in l:
|
||||
if item is None:
|
||||
item = ''
|
||||
item = ""
|
||||
if type(item) == str:
|
||||
l_variant.append(variant_str(item))
|
||||
elif type(item) == text_type:
|
||||
l_variant.append(variant_str(item.encode('utf-8')))
|
||||
l_variant.append(variant_str(item.encode("utf-8")))
|
||||
elif type(item) == list:
|
||||
l_variant.append(variant_list(item))
|
||||
elif type(item) == dict:
|
||||
@@ -62,33 +62,33 @@ def variant_list(l):
|
||||
elif type(item) == bool:
|
||||
l_variant.append(variant_bool(item))
|
||||
else:
|
||||
raise TypeError('Cannot convert unknown type')
|
||||
return GLib.Variant('av', l_variant)
|
||||
raise TypeError("Cannot convert unknown type")
|
||||
return GLib.Variant("av", l_variant)
|
||||
|
||||
|
||||
def variant_bool(b):
|
||||
""" Converts a boolean to a GLib.Varant
|
||||
"""
|
||||
if not isinstance(b, bool):
|
||||
raise TypeError('Only booleans are supported')
|
||||
raise TypeError("Only booleans are supported")
|
||||
|
||||
return GLib.Variant('b', b)
|
||||
return GLib.Variant("b", b)
|
||||
|
||||
|
||||
def dict_values(d):
|
||||
""" Converts each dictionary value to a GLib.Variant
|
||||
"""
|
||||
if not isinstance(d, dict):
|
||||
raise TypeError('Only dictionaries are supported for mappings')
|
||||
raise TypeError("Only dictionaries are supported for mappings")
|
||||
|
||||
d_variant = dict()
|
||||
for k, v in d.items():
|
||||
if v is None:
|
||||
v = ''
|
||||
v = ""
|
||||
if type(v) == str:
|
||||
d_variant[k] = variant_str(v)
|
||||
elif type(v) == text_type:
|
||||
d_variant[k] = variant_str(v.encode('utf-8'))
|
||||
d_variant[k] = variant_str(v.encode("utf-8"))
|
||||
elif type(v) == list:
|
||||
d_variant[k] = variant_list(v)
|
||||
elif type(v) == dict:
|
||||
@@ -96,7 +96,7 @@ def dict_values(d):
|
||||
elif type(v) == bool:
|
||||
d_variant[k] = variant_bool(v)
|
||||
else:
|
||||
raise TypeError('Cannot convert unknown type')
|
||||
raise TypeError("Cannot convert unknown type")
|
||||
return d_variant
|
||||
|
||||
|
||||
@@ -104,7 +104,7 @@ def variant_dict(d):
|
||||
""" Converts a dictionary to a dictionary of GLib.Variant
|
||||
"""
|
||||
if not isinstance(d, dict):
|
||||
raise TypeError('Only dictionaries are supported for mappings')
|
||||
raise TypeError("Only dictionaries are supported for mappings")
|
||||
|
||||
d_variant = dict_values(d)
|
||||
return GLib.Variant('a{sv}', d_variant)
|
||||
return GLib.Variant("a{sv}", d_variant)
|
||||
|
||||
@@ -58,7 +58,7 @@ level_flags["verbose"] = levels["info"]
|
||||
level_flags["quiet"] = levels["error"]
|
||||
|
||||
|
||||
log_format = '%(asctime)s - %(threadName)s - %(name)s - %(levelname)s - %(message)s'
|
||||
log_format = "%(asctime)s - %(threadName)s - %(name)s - %(levelname)s - %(message)s"
|
||||
|
||||
|
||||
class ModuleBuildFileHandler(logging.FileHandler):
|
||||
@@ -66,7 +66,8 @@ class ModuleBuildFileHandler(logging.FileHandler):
|
||||
FileHandler subclass which handles only messages generated during
|
||||
particular module build with `build_id` set in its constructor.
|
||||
"""
|
||||
def __init__(self, build_id, filename, mode='a', encoding=None, delay=0):
|
||||
|
||||
def __init__(self, build_id, filename, mode="a", encoding=None, delay=0):
|
||||
logging.FileHandler.__init__(self, filename, mode, encoding, delay)
|
||||
self.build_id = build_id
|
||||
|
||||
@@ -88,6 +89,7 @@ class ModuleBuildLogs(object):
|
||||
"""
|
||||
Manages ModuleBuildFileHandler logging handlers.
|
||||
"""
|
||||
|
||||
def __init__(self, build_logs_dir, build_logs_name_format, level=logging.INFO):
|
||||
"""
|
||||
Creates new ModuleBuildLogs instance. Module build logs are stored
|
||||
@@ -152,7 +154,7 @@ class ModuleBuildLogs(object):
|
||||
|
||||
class MBSLogger:
|
||||
def __init__(self):
|
||||
self._logger = logging.getLogger('MBS')
|
||||
self._logger = logging.getLogger("MBS")
|
||||
self._level = logging.NOTSET
|
||||
self._current_path = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
@@ -173,33 +175,33 @@ class MBSLogger:
|
||||
self.level = level
|
||||
|
||||
def debug(self, *args, **kwargs):
|
||||
return self._log_call('debug', args, kwargs)
|
||||
return self._log_call("debug", args, kwargs)
|
||||
|
||||
def info(self, *args, **kwargs):
|
||||
return self._log_call('info', args, kwargs)
|
||||
return self._log_call("info", args, kwargs)
|
||||
|
||||
def warning(self, *args, **kwargs):
|
||||
return self._log_call('warning', args, kwargs)
|
||||
return self._log_call("warning", args, kwargs)
|
||||
|
||||
def error(self, *args, **kwargs):
|
||||
return self._log_call('error', args, kwargs)
|
||||
return self._log_call("error", args, kwargs)
|
||||
|
||||
def critical(self, *args, **kwargs):
|
||||
return self._log_call('critical', args, kwargs)
|
||||
return self._log_call("critical", args, kwargs)
|
||||
|
||||
def exception(self, *args, **kwargs):
|
||||
return self._log_call('exception', args, kwargs)
|
||||
return self._log_call("exception", args, kwargs)
|
||||
|
||||
def log(self, *args, **kwargs):
|
||||
return self._log_call('log', args, kwargs)
|
||||
return self._log_call("log", args, kwargs)
|
||||
|
||||
def _log_call(self, level_name, args, kwargs):
|
||||
caller_filename = inspect.stack()[2][1]
|
||||
caller_filename = os.path.normpath(caller_filename)
|
||||
if not caller_filename.startswith(self._current_path):
|
||||
log_name = 'MBS'
|
||||
log_name = "MBS"
|
||||
else:
|
||||
log_name = 'MBS' + caller_filename[len(self._current_path):-3].replace('/', '.')
|
||||
log_name = "MBS" + caller_filename[len(self._current_path):-3].replace("/", ".")
|
||||
return getattr(logging.getLogger(log_name), level_name)(*args, **kwargs)
|
||||
|
||||
|
||||
@@ -231,6 +233,5 @@ def init_logging(conf):
|
||||
log = MBSLogger()
|
||||
log.level = conf.log_level
|
||||
else:
|
||||
logging.basicConfig(filename=conf.log_file, level=conf.log_level,
|
||||
format=log_format)
|
||||
logging.basicConfig(filename=conf.log_file, level=conf.log_level, format=log_format)
|
||||
log = MBSLogger()
|
||||
|
||||
@@ -28,14 +28,17 @@ import flask_migrate
|
||||
import logging
|
||||
import os
|
||||
import getpass
|
||||
import textwrap
|
||||
|
||||
from werkzeug.datastructures import FileStorage
|
||||
from module_build_service import app, conf, db, create_app
|
||||
from module_build_service import models
|
||||
from module_build_service.utils import (
|
||||
submit_module_build_from_yaml,
|
||||
load_local_builds, load_mmd, import_mmd,
|
||||
import_builds_from_local_dnf_repos
|
||||
load_local_builds,
|
||||
load_mmd,
|
||||
import_mmd,
|
||||
import_builds_from_local_dnf_repos,
|
||||
)
|
||||
from module_build_service.errors import StreamAmbigous
|
||||
import module_build_service.messaging
|
||||
@@ -43,31 +46,36 @@ import module_build_service.scheduler.consumer
|
||||
|
||||
|
||||
manager = Manager(create_app)
|
||||
help_args = ('-?', '--help')
|
||||
help_args = ("-?", "--help")
|
||||
manager.help_args = help_args
|
||||
migrate = flask_migrate.Migrate(app, db)
|
||||
manager.add_command('db', flask_migrate.MigrateCommand)
|
||||
manager.add_option('-d', '--debug', dest='debug', action='store_true')
|
||||
manager.add_option('-v', '--verbose', dest='verbose', action='store_true')
|
||||
manager.add_option('-q', '--quiet', dest='quiet', action='store_true')
|
||||
manager.add_command("db", flask_migrate.MigrateCommand)
|
||||
manager.add_option("-d", "--debug", dest="debug", action="store_true")
|
||||
manager.add_option("-v", "--verbose", dest="verbose", action="store_true")
|
||||
manager.add_option("-q", "--quiet", dest="quiet", action="store_true")
|
||||
|
||||
|
||||
def console_script_help(f):
|
||||
@wraps(f)
|
||||
def wrapped(*args, **kwargs):
|
||||
import sys
|
||||
|
||||
if any([arg in help_args for arg in sys.argv[1:]]):
|
||||
command = os.path.basename(sys.argv[0])
|
||||
print("""{0}
|
||||
print(textwrap.dedent(
|
||||
"""\
|
||||
{0}
|
||||
|
||||
Usage: {0} [{1}]
|
||||
Usage: {0} [{1}]
|
||||
|
||||
See also:
|
||||
mbs-manager(1)""".format(command,
|
||||
'|'.join(help_args)))
|
||||
See also:
|
||||
mbs-manager(1)
|
||||
""").strip().format(command, "|".join(help_args))
|
||||
)
|
||||
sys.exit(2)
|
||||
r = f(*args, **kwargs)
|
||||
return r
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
@@ -76,10 +84,9 @@ See also:
|
||||
def upgradedb():
|
||||
""" Upgrades the database schema to the latest revision
|
||||
"""
|
||||
app.config["SERVER_NAME"] = 'localhost'
|
||||
app.config["SERVER_NAME"] = "localhost"
|
||||
# TODO: configurable?
|
||||
migrations_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)),
|
||||
'migrations')
|
||||
migrations_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "migrations")
|
||||
with app.app_context():
|
||||
flask_migrate.upgrade(directory=migrations_dir)
|
||||
|
||||
@@ -101,28 +108,36 @@ def import_module(mmd_file):
|
||||
import_mmd(db.session, mmd)
|
||||
|
||||
|
||||
@manager.option('--stream', action='store', dest="stream")
|
||||
@manager.option('--file', action='store', dest="yaml_file")
|
||||
@manager.option('--srpm', action='append', default=[], dest="srpms", metavar='SRPM')
|
||||
@manager.option('--skiptests', action='store_true', dest="skiptests")
|
||||
@manager.option('--offline', action='store_true', dest="offline")
|
||||
@manager.option('-l', '--add-local-build', action='append', default=None, dest='local_build_nsvs')
|
||||
@manager.option('-s', '--set-stream', action='append', default=[], dest='default_streams')
|
||||
@manager.option('-r', '--platform-repo-file', action='append', default=[],
|
||||
dest='platform_repofiles')
|
||||
@manager.option('-p', '--platform-id', action='store', default=None,
|
||||
dest='platform_id')
|
||||
def build_module_locally(local_build_nsvs=None, yaml_file=None, srpms=None,
|
||||
stream=None, skiptests=False, default_streams=None,
|
||||
offline=False, platform_repofiles=None, platform_id=None):
|
||||
@manager.option("--stream", action="store", dest="stream")
|
||||
@manager.option("--file", action="store", dest="yaml_file")
|
||||
@manager.option("--srpm", action="append", default=[], dest="srpms", metavar="SRPM")
|
||||
@manager.option("--skiptests", action="store_true", dest="skiptests")
|
||||
@manager.option("--offline", action="store_true", dest="offline")
|
||||
@manager.option("-l", "--add-local-build", action="append", default=None, dest="local_build_nsvs")
|
||||
@manager.option("-s", "--set-stream", action="append", default=[], dest="default_streams")
|
||||
@manager.option(
|
||||
"-r", "--platform-repo-file", action="append", default=[], dest="platform_repofiles"
|
||||
)
|
||||
@manager.option("-p", "--platform-id", action="store", default=None, dest="platform_id")
|
||||
def build_module_locally(
|
||||
local_build_nsvs=None,
|
||||
yaml_file=None,
|
||||
srpms=None,
|
||||
stream=None,
|
||||
skiptests=False,
|
||||
default_streams=None,
|
||||
offline=False,
|
||||
platform_repofiles=None,
|
||||
platform_id=None,
|
||||
):
|
||||
""" Performs local module build using Mock
|
||||
"""
|
||||
if 'SERVER_NAME' not in app.config or not app.config['SERVER_NAME']:
|
||||
app.config["SERVER_NAME"] = 'localhost'
|
||||
if "SERVER_NAME" not in app.config or not app.config["SERVER_NAME"]:
|
||||
app.config["SERVER_NAME"] = "localhost"
|
||||
|
||||
if app.config['RESOLVER'] == 'db':
|
||||
raise ValueError("Please set RESOLVER to 'mbs' in your "
|
||||
"configuration for local builds.")
|
||||
if app.config["RESOLVER"] == "db":
|
||||
raise ValueError(
|
||||
"Please set RESOLVER to 'mbs' in your configuration for local builds.")
|
||||
|
||||
with app.app_context():
|
||||
conf.set_item("system", "mock")
|
||||
@@ -130,10 +145,10 @@ def build_module_locally(local_build_nsvs=None, yaml_file=None, srpms=None,
|
||||
|
||||
# Use our own local SQLite3 database.
|
||||
confdir = os.path.abspath(os.getcwd())
|
||||
dbdir = os.path.abspath(os.path.join(confdir, '..')) if confdir.endswith('conf') \
|
||||
else confdir
|
||||
dbpath = '/{0}'.format(os.path.join(dbdir, '.mbs_local_build.db'))
|
||||
dburi = 'sqlite://' + dbpath
|
||||
dbdir = \
|
||||
os.path.abspath(os.path.join(confdir, "..")) if confdir.endswith("conf") else confdir
|
||||
dbpath = "/{0}".format(os.path.join(dbdir, ".mbs_local_build.db"))
|
||||
dburi = "sqlite://" + dbpath
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = dburi
|
||||
conf.set_item("sqlalchemy_database_uri", dburi)
|
||||
if os.path.exists(dbpath):
|
||||
@@ -164,11 +179,11 @@ def build_module_locally(local_build_nsvs=None, yaml_file=None, srpms=None,
|
||||
handle.filename = filename
|
||||
try:
|
||||
modules_list = submit_module_build_from_yaml(
|
||||
username, handle, params, stream=str(stream), skiptests=skiptests)
|
||||
username, handle, params, stream=str(stream), skiptests=skiptests
|
||||
)
|
||||
except StreamAmbigous as e:
|
||||
logging.error(str(e))
|
||||
logging.error(
|
||||
"Use '-s module_name:module_stream' to choose the stream")
|
||||
logging.error("Use '-s module_name:module_stream' to choose the stream")
|
||||
return
|
||||
|
||||
stop = module_build_service.scheduler.make_simple_stop_condition(db.session)
|
||||
@@ -176,57 +191,60 @@ def build_module_locally(local_build_nsvs=None, yaml_file=None, srpms=None,
|
||||
# Run the consumer until stop_condition returns True
|
||||
module_build_service.scheduler.main([], stop)
|
||||
|
||||
if any(module.state == models.BUILD_STATES['failed'] for module in modules_list):
|
||||
raise RuntimeError('Module build failed')
|
||||
if any(module.state == models.BUILD_STATES["failed"] for module in modules_list):
|
||||
raise RuntimeError("Module build failed")
|
||||
|
||||
|
||||
@manager.option('identifier', metavar='NAME:STREAM[:VERSION[:CONTEXT]]',
|
||||
help='Identifier for selecting module builds to retire')
|
||||
@manager.option('--confirm', action='store_true', default=False,
|
||||
help='Perform retire operation without prompting')
|
||||
@manager.option(
|
||||
"identifier",
|
||||
metavar="NAME:STREAM[:VERSION[:CONTEXT]]",
|
||||
help="Identifier for selecting module builds to retire",
|
||||
)
|
||||
@manager.option(
|
||||
"--confirm",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Perform retire operation without prompting",
|
||||
)
|
||||
def retire(identifier, confirm=False):
|
||||
""" Retire module build(s) by placing them into 'garbage' state.
|
||||
"""
|
||||
# Parse identifier and build query
|
||||
parts = identifier.split(':')
|
||||
parts = identifier.split(":")
|
||||
if len(parts) < 2:
|
||||
raise ValueError('Identifier must contain at least NAME:STREAM')
|
||||
raise ValueError("Identifier must contain at least NAME:STREAM")
|
||||
if len(parts) >= 5:
|
||||
raise ValueError('Too many parts in identifier')
|
||||
raise ValueError("Too many parts in identifier")
|
||||
|
||||
filter_by_kwargs = {
|
||||
'state': models.BUILD_STATES['ready'],
|
||||
'name': parts[0],
|
||||
'stream': parts[1],
|
||||
}
|
||||
filter_by_kwargs = {"state": models.BUILD_STATES["ready"], "name": parts[0], "stream": parts[1]}
|
||||
|
||||
if len(parts) >= 3:
|
||||
filter_by_kwargs['version'] = parts[2]
|
||||
filter_by_kwargs["version"] = parts[2]
|
||||
if len(parts) >= 4:
|
||||
filter_by_kwargs['context'] = parts[3]
|
||||
filter_by_kwargs["context"] = parts[3]
|
||||
|
||||
# Find module builds to retire
|
||||
module_builds = db.session.query(models.ModuleBuild).filter_by(**filter_by_kwargs).all()
|
||||
|
||||
if not module_builds:
|
||||
logging.info('No module builds found.')
|
||||
logging.info("No module builds found.")
|
||||
return
|
||||
|
||||
logging.info('Found %d module builds:', len(module_builds))
|
||||
logging.info("Found %d module builds:", len(module_builds))
|
||||
for build in module_builds:
|
||||
logging.info('\t%s', ':'.join((build.name, build.stream, build.version, build.context)))
|
||||
logging.info("\t%s", ":".join((build.name, build.stream, build.version, build.context)))
|
||||
|
||||
# Prompt for confirmation
|
||||
is_confirmed = confirm or prompt_bool('Retire {} module builds?'.format(len(module_builds)))
|
||||
is_confirmed = confirm or prompt_bool("Retire {} module builds?".format(len(module_builds)))
|
||||
if not is_confirmed:
|
||||
logging.info('Module builds were NOT retired.')
|
||||
logging.info("Module builds were NOT retired.")
|
||||
return
|
||||
|
||||
# Retire module builds
|
||||
for build in module_builds:
|
||||
build.transition(conf, models.BUILD_STATES['garbage'], 'Module build retired')
|
||||
build.transition(conf, models.BUILD_STATES["garbage"], "Module build retired")
|
||||
db.session.commit()
|
||||
logging.info('Module builds retired.')
|
||||
logging.info("Module builds retired.")
|
||||
|
||||
|
||||
@console_script_help
|
||||
@@ -238,13 +256,9 @@ def run(host=None, port=None, debug=None):
|
||||
port = port or conf.port
|
||||
debug = debug or conf.debug
|
||||
|
||||
logging.info('Starting Module Build Service frontend')
|
||||
logging.info("Starting Module Build Service frontend")
|
||||
|
||||
app.run(
|
||||
host=host,
|
||||
port=port,
|
||||
debug=debug
|
||||
)
|
||||
app.run(host=host, port=port, debug=debug)
|
||||
|
||||
|
||||
def manager_wrapper():
|
||||
|
||||
@@ -66,9 +66,10 @@ class BaseMessage(object):
|
||||
"{}={!r}".format(name, getattr(self, name))
|
||||
if param.default != param.empty
|
||||
else repr(getattr(self, name))
|
||||
for name, param in init_sig.parameters.items())
|
||||
for name, param in init_sig.parameters.items()
|
||||
)
|
||||
|
||||
return "{}({})".format(type(self).__name__, ', '.join(args_strs))
|
||||
return "{}({})".format(type(self).__name__, ", ".join(args_strs))
|
||||
|
||||
def __getitem__(self, key):
|
||||
""" Used to trick moksha into thinking we are a dict. """
|
||||
@@ -87,13 +88,11 @@ class BaseMessage(object):
|
||||
|
||||
|
||||
class MessageParser(object):
|
||||
|
||||
def parse(self, msg):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class FedmsgMessageParser(MessageParser):
|
||||
|
||||
def parse(self, msg):
|
||||
"""
|
||||
Takes a fedmsg topic and message and converts it to a message object
|
||||
@@ -101,83 +100,105 @@ class FedmsgMessageParser(MessageParser):
|
||||
:return: an object of BaseMessage descent if the message is a type
|
||||
that the app looks for, otherwise None is returned
|
||||
"""
|
||||
if 'body' in msg:
|
||||
msg = msg['body']
|
||||
topic = msg['topic']
|
||||
topic_categories = _messaging_backends['fedmsg']['services']
|
||||
categories_re = '|'.join(map(re.escape, topic_categories))
|
||||
if "body" in msg:
|
||||
msg = msg["body"]
|
||||
topic = msg["topic"]
|
||||
topic_categories = _messaging_backends["fedmsg"]["services"]
|
||||
categories_re = "|".join(map(re.escape, topic_categories))
|
||||
regex_pattern = re.compile(
|
||||
r'(?P<category>' + categories_re + r')'
|
||||
r'(?:(?:\.)(?P<object>build|repo|module|decision))?'
|
||||
r'(?:(?:\.)(?P<subobject>state|build))?'
|
||||
r'(?:\.)(?P<event>change|done|end|tag|update)$'
|
||||
r"(?P<category>" + categories_re + r")"
|
||||
r"(?:(?:\.)(?P<object>build|repo|module|decision))?"
|
||||
r"(?:(?:\.)(?P<subobject>state|build))?"
|
||||
r"(?:\.)(?P<event>change|done|end|tag|update)$"
|
||||
)
|
||||
regex_results = re.search(regex_pattern, topic)
|
||||
|
||||
if regex_results:
|
||||
category = regex_results.group('category')
|
||||
object = regex_results.group('object')
|
||||
subobject = regex_results.group('subobject')
|
||||
event = regex_results.group('event')
|
||||
category = regex_results.group("category")
|
||||
object = regex_results.group("object")
|
||||
subobject = regex_results.group("subobject")
|
||||
event = regex_results.group("event")
|
||||
|
||||
msg_id = msg.get('msg_id')
|
||||
msg_inner_msg = msg.get('msg')
|
||||
msg_id = msg.get("msg_id")
|
||||
msg_inner_msg = msg.get("msg")
|
||||
|
||||
# If there isn't a msg dict in msg then this message can be skipped
|
||||
if not msg_inner_msg:
|
||||
log.debug(('Skipping message without any content with the '
|
||||
'topic "{0}"').format(topic))
|
||||
log.debug(
|
||||
"Skipping message without any content with the " 'topic "{0}"'.format(topic))
|
||||
return None
|
||||
|
||||
msg_obj = None
|
||||
|
||||
# Ignore all messages from the secondary koji instances.
|
||||
if category == 'buildsys':
|
||||
instance = msg_inner_msg.get('instance', 'primary')
|
||||
if instance != 'primary':
|
||||
if category == "buildsys":
|
||||
instance = msg_inner_msg.get("instance", "primary")
|
||||
if instance != "primary":
|
||||
log.debug("Ignoring message from %r koji hub." % instance)
|
||||
return
|
||||
|
||||
if category == 'buildsys' and object == 'build' and \
|
||||
subobject == 'state' and event == 'change':
|
||||
build_id = msg_inner_msg.get('build_id')
|
||||
task_id = msg_inner_msg.get('task_id')
|
||||
build_new_state = msg_inner_msg.get('new')
|
||||
build_name = msg_inner_msg.get('name')
|
||||
build_version = msg_inner_msg.get('version')
|
||||
build_release = msg_inner_msg.get('release')
|
||||
if (
|
||||
category == "buildsys"
|
||||
and object == "build"
|
||||
and subobject == "state"
|
||||
and event == "change"
|
||||
):
|
||||
build_id = msg_inner_msg.get("build_id")
|
||||
task_id = msg_inner_msg.get("task_id")
|
||||
build_new_state = msg_inner_msg.get("new")
|
||||
build_name = msg_inner_msg.get("name")
|
||||
build_version = msg_inner_msg.get("version")
|
||||
build_release = msg_inner_msg.get("release")
|
||||
|
||||
msg_obj = KojiBuildChange(
|
||||
msg_id, build_id, task_id, build_new_state, build_name,
|
||||
build_version, build_release)
|
||||
msg_id,
|
||||
build_id,
|
||||
task_id,
|
||||
build_new_state,
|
||||
build_name,
|
||||
build_version,
|
||||
build_release,
|
||||
)
|
||||
|
||||
elif category == 'buildsys' and object == 'repo' and \
|
||||
subobject is None and event == 'done':
|
||||
repo_tag = msg_inner_msg.get('tag')
|
||||
elif (
|
||||
category == "buildsys"
|
||||
and object == "repo"
|
||||
and subobject is None
|
||||
and event == "done"
|
||||
):
|
||||
repo_tag = msg_inner_msg.get("tag")
|
||||
msg_obj = KojiRepoChange(msg_id, repo_tag)
|
||||
|
||||
elif category == 'buildsys' and event == 'tag':
|
||||
tag = msg_inner_msg.get('tag')
|
||||
name = msg_inner_msg.get('name')
|
||||
version = msg_inner_msg.get('version')
|
||||
release = msg_inner_msg.get('release')
|
||||
elif category == "buildsys" and event == "tag":
|
||||
tag = msg_inner_msg.get("tag")
|
||||
name = msg_inner_msg.get("name")
|
||||
version = msg_inner_msg.get("version")
|
||||
release = msg_inner_msg.get("release")
|
||||
nvr = None
|
||||
if name and version and release:
|
||||
nvr = '-'.join((name, version, release))
|
||||
nvr = "-".join((name, version, release))
|
||||
msg_obj = KojiTagChange(msg_id, tag, name, nvr)
|
||||
|
||||
elif category == 'mbs' and object == 'module' and \
|
||||
subobject == 'state' and event == 'change':
|
||||
msg_obj = MBSModule(
|
||||
msg_id, msg_inner_msg.get('id'), msg_inner_msg.get('state'))
|
||||
elif (
|
||||
category == "mbs"
|
||||
and object == "module"
|
||||
and subobject == "state"
|
||||
and event == "change"
|
||||
):
|
||||
msg_obj = MBSModule(msg_id, msg_inner_msg.get("id"), msg_inner_msg.get("state"))
|
||||
|
||||
elif (category == 'greenwave' and object == 'decision' and
|
||||
subobject is None and event == 'update'):
|
||||
elif (
|
||||
category == "greenwave"
|
||||
and object == "decision"
|
||||
and subobject is None
|
||||
and event == "update"
|
||||
):
|
||||
msg_obj = GreenwaveDecisionUpdate(
|
||||
msg_id=msg_id,
|
||||
decision_context=msg_inner_msg.get('decision_context'),
|
||||
policies_satisfied=msg_inner_msg.get('policies_satisfied'),
|
||||
subject_identifier=msg_inner_msg.get('subject_identifier'))
|
||||
decision_context=msg_inner_msg.get("decision_context"),
|
||||
policies_satisfied=msg_inner_msg.get("policies_satisfied"),
|
||||
subject_identifier=msg_inner_msg.get("subject_identifier"),
|
||||
)
|
||||
|
||||
# If the message matched the regex and is important to the app,
|
||||
# it will be returned
|
||||
@@ -201,9 +222,19 @@ class KojiBuildChange(BaseMessage):
|
||||
:param module_build_id: the optional id of the module_build in the database
|
||||
:param state_reason: the optional reason as to why the state changed
|
||||
"""
|
||||
def __init__(self, msg_id, build_id, task_id, build_new_state, build_name,
|
||||
build_version, build_release, module_build_id=None,
|
||||
state_reason=None):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
msg_id,
|
||||
build_id,
|
||||
task_id,
|
||||
build_new_state,
|
||||
build_name,
|
||||
build_version,
|
||||
build_release,
|
||||
module_build_id=None,
|
||||
state_reason=None,
|
||||
):
|
||||
if task_id is None:
|
||||
raise IgnoreMessage("KojiBuildChange with a null task_id is invalid.")
|
||||
super(KojiBuildChange, self).__init__(msg_id)
|
||||
@@ -225,6 +256,7 @@ class KojiTagChange(BaseMessage):
|
||||
:param artifact: the name of tagged artifact (e.g. module-build-macros)
|
||||
:param nvr: the nvr of the tagged artifact
|
||||
"""
|
||||
|
||||
def __init__(self, msg_id, tag, artifact, nvr):
|
||||
super(KojiTagChange, self).__init__(msg_id)
|
||||
self.tag = tag
|
||||
@@ -238,6 +270,7 @@ class KojiRepoChange(BaseMessage):
|
||||
:param msg_id: the id of the msg (e.g. 2016-SomeGUID)
|
||||
:param repo_tag: the repo's tag (e.g. SHADOWBUILD-f25-build)
|
||||
"""
|
||||
|
||||
def __init__(self, msg_id, repo_tag):
|
||||
super(KojiRepoChange, self).__init__(msg_id)
|
||||
self.repo_tag = repo_tag
|
||||
@@ -250,6 +283,7 @@ class MBSModule(BaseMessage):
|
||||
:param module_build_id: the id of the module build
|
||||
:param module_build_state: the state of the module build
|
||||
"""
|
||||
|
||||
def __init__(self, msg_id, module_build_id, module_build_state):
|
||||
super(MBSModule, self).__init__(msg_id)
|
||||
self.module_build_id = module_build_id
|
||||
@@ -259,8 +293,7 @@ class MBSModule(BaseMessage):
|
||||
class GreenwaveDecisionUpdate(BaseMessage):
|
||||
"""A class representing message send to topic greenwave.decision.update"""
|
||||
|
||||
def __init__(self, msg_id, decision_context, policies_satisfied,
|
||||
subject_identifier):
|
||||
def __init__(self, msg_id, decision_context, policies_satisfied, subject_identifier):
|
||||
super(GreenwaveDecisionUpdate, self).__init__(msg_id)
|
||||
self.decision_context = decision_context
|
||||
self.policies_satisfied = policies_satisfied
|
||||
@@ -277,14 +310,18 @@ def publish(topic, msg, conf, service):
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
handler = _messaging_backends[conf.messaging]['publish']
|
||||
handler = _messaging_backends[conf.messaging]["publish"]
|
||||
except KeyError:
|
||||
raise KeyError("No messaging backend found for %r in %r" % (
|
||||
conf.messaging, _messaging_backends.keys()))
|
||||
raise KeyError(
|
||||
"No messaging backend found for %r in %r" % (conf.messaging, _messaging_backends.keys())
|
||||
)
|
||||
|
||||
from module_build_service.monitor import (
|
||||
messaging_tx_to_send_counter, messaging_tx_sent_ok_counter,
|
||||
messaging_tx_failed_counter)
|
||||
messaging_tx_to_send_counter,
|
||||
messaging_tx_sent_ok_counter,
|
||||
messaging_tx_failed_counter,
|
||||
)
|
||||
|
||||
messaging_tx_to_send_counter.inc()
|
||||
try:
|
||||
rv = handler(topic, msg, conf, service)
|
||||
@@ -298,6 +335,7 @@ def publish(topic, msg, conf, service):
|
||||
def _fedmsg_publish(topic, msg, conf, service):
|
||||
# fedmsg doesn't really need access to conf, however other backends do
|
||||
import fedmsg
|
||||
|
||||
return fedmsg.publish(topic, msg=msg, modname=service)
|
||||
|
||||
|
||||
@@ -318,11 +356,12 @@ def _in_memory_publish(topic, msg, conf, service):
|
||||
wrapped_msg = FedmsgMessageParser().parse({
|
||||
"msg_id": str(_in_memory_msg_id),
|
||||
"topic": service + "." + topic,
|
||||
"msg": msg,
|
||||
"msg": msg
|
||||
})
|
||||
|
||||
# Put the message to queue.
|
||||
from module_build_service.scheduler.consumer import work_queue_put
|
||||
|
||||
try:
|
||||
work_queue_put(wrapped_msg)
|
||||
except ValueError as e:
|
||||
@@ -336,26 +375,25 @@ def _in_memory_publish(topic, msg, conf, service):
|
||||
|
||||
|
||||
_fedmsg_backend = {
|
||||
'publish': _fedmsg_publish,
|
||||
'services': ['buildsys', 'mbs', 'greenwave'],
|
||||
'parser': FedmsgMessageParser(),
|
||||
'topic_suffix': '.',
|
||||
"publish": _fedmsg_publish,
|
||||
"services": ["buildsys", "mbs", "greenwave"],
|
||||
"parser": FedmsgMessageParser(),
|
||||
"topic_suffix": ".",
|
||||
}
|
||||
_in_memory_backend = {
|
||||
'publish': _in_memory_publish,
|
||||
'services': [],
|
||||
'parser': FedmsgMessageParser(), # re-used. :)
|
||||
'topic_suffix': '.',
|
||||
"publish": _in_memory_publish,
|
||||
"services": [],
|
||||
"parser": FedmsgMessageParser(), # re-used. :)
|
||||
"topic_suffix": ".",
|
||||
}
|
||||
|
||||
|
||||
_messaging_backends = {}
|
||||
for entrypoint in pkg_resources.iter_entry_points('mbs.messaging_backends'):
|
||||
for entrypoint in pkg_resources.iter_entry_points("mbs.messaging_backends"):
|
||||
_messaging_backends[entrypoint.name] = ep = entrypoint.load()
|
||||
required = ['publish', 'services', 'parser', 'topic_suffix']
|
||||
required = ["publish", "services", "parser", "topic_suffix"]
|
||||
if any([key not in ep for key in required]):
|
||||
raise ValueError('messaging backend %r is malformed: %r' % (
|
||||
entrypoint.name, ep))
|
||||
raise ValueError("messaging backend %r is malformed: %r" % (entrypoint.name, ep))
|
||||
|
||||
if not _messaging_backends:
|
||||
raise ValueError("No messaging plugins are installed or available.")
|
||||
|
||||
@@ -32,7 +32,7 @@ from module_build_service.models import ModuleBuild
|
||||
|
||||
|
||||
class MMDResolverPolicy(enum.Enum):
|
||||
All = "all" # All possible top-level combinations
|
||||
All = "all" # All possible top-level combinations
|
||||
First = "first" # All possible top-level combinations (filtered by N:S, first picked)
|
||||
|
||||
|
||||
@@ -106,7 +106,8 @@ class MMDResolver(object):
|
||||
# This method creates such solve.Dep.
|
||||
stream_dep = lambda n, s: pool.Dep("module(%s:%s)" % (n, s))
|
||||
versioned_stream_dep = lambda n, s, v, op: pool.Dep("module(%s:%s)" % (n, s)).Rel(
|
||||
op, pool.Dep(str(v)))
|
||||
op, pool.Dep(str(v))
|
||||
)
|
||||
|
||||
# There are relations between modules in `deps`. For example:
|
||||
# deps = [{'gtk': ['1'], 'foo': ['1']}]" means "gtk:1 and foo:1" are both required.
|
||||
@@ -144,14 +145,15 @@ class MMDResolver(object):
|
||||
|
||||
# In case x.y.z versioning is not used for this base module, do not
|
||||
# use versions solv.Dep.
|
||||
if len(str(ModuleBuild.get_stream_version(
|
||||
stream_for_version, right_pad=False))) < 5:
|
||||
stream_version_str = str(
|
||||
ModuleBuild.get_stream_version(stream_for_version, right_pad=False))
|
||||
if len(stream_version_str) < 5:
|
||||
if stream.startswith("-"):
|
||||
req_neg = rel_or_dep(
|
||||
req_neg, solv.REL_OR, stream_dep(name, stream[1:]))
|
||||
req_neg, solv.REL_OR, stream_dep(name, stream[1:])
|
||||
)
|
||||
else:
|
||||
req_pos = rel_or_dep(
|
||||
req_pos, solv.REL_OR, stream_dep(name, stream))
|
||||
req_pos = rel_or_dep(req_pos, solv.REL_OR, stream_dep(name, stream))
|
||||
else:
|
||||
# The main reason why to use `exact_versions` is the case when
|
||||
# adding deps for the input module we want to resolve. This module
|
||||
@@ -178,19 +180,23 @@ class MMDResolver(object):
|
||||
if not exact_versions:
|
||||
op |= solv.REL_GT
|
||||
version = ModuleBuild.get_stream_version(
|
||||
stream_for_version, right_pad=False)
|
||||
stream_for_version, right_pad=False
|
||||
)
|
||||
if stream.startswith("-"):
|
||||
req_neg = rel_or_dep(
|
||||
req_neg, solv.REL_OR,
|
||||
versioned_stream_dep(name, stream[1:], version, op))
|
||||
req_neg,
|
||||
solv.REL_OR,
|
||||
versioned_stream_dep(name, stream[1:], version, op),
|
||||
)
|
||||
else:
|
||||
req_pos = rel_or_dep(
|
||||
req_pos, solv.REL_OR,
|
||||
versioned_stream_dep(name, stream, version, op))
|
||||
req_pos,
|
||||
solv.REL_OR,
|
||||
versioned_stream_dep(name, stream, version, op),
|
||||
)
|
||||
else:
|
||||
if stream.startswith("-"):
|
||||
req_neg = rel_or_dep(
|
||||
req_neg, solv.REL_OR, stream_dep(name, stream[1:]))
|
||||
req_neg = rel_or_dep(req_neg, solv.REL_OR, stream_dep(name, stream[1:]))
|
||||
else:
|
||||
req_pos = rel_or_dep(req_pos, solv.REL_OR, stream_dep(name, stream))
|
||||
|
||||
@@ -291,9 +297,10 @@ class MMDResolver(object):
|
||||
# Helper method to return the dependencies of `mmd` in the {name: [streams], ... form}.
|
||||
# The `fn` is either "get_requires" or "get_buildrequires" str depending on whether
|
||||
# the return deps should be runtime requires or buildrequires.
|
||||
normdeps = lambda mmd, fn: [{name: streams.get()
|
||||
for name, streams in getattr(dep, fn)().items()}
|
||||
for dep in mmd.get_dependencies()]
|
||||
normdeps = lambda mmd, fn: [
|
||||
{name: streams.get() for name, streams in getattr(dep, fn)().items()}
|
||||
for dep in mmd.get_dependencies()
|
||||
]
|
||||
|
||||
base_module_stream_overrides = self._get_base_module_stream_overrides(mmd)
|
||||
|
||||
@@ -319,20 +326,21 @@ class MMDResolver(object):
|
||||
# This is used for example to find the buildrequired module when
|
||||
# no particular stream is used - for example when buildrequiring
|
||||
# "gtk: []"
|
||||
solvable.add_deparray(solv.SOLVABLE_PROVIDES,
|
||||
pool.Dep("module(%s)" % n))
|
||||
solvable.add_deparray(solv.SOLVABLE_PROVIDES, pool.Dep("module(%s)" % n))
|
||||
# Add "Provides: module(name:stream) = version", so we can find buildrequired
|
||||
# modules when "gtk:[1]" is used and also choose the latest version.
|
||||
solvable.add_deparray(solv.SOLVABLE_PROVIDES,
|
||||
pool.Dep("module(%s:%s)" % (n, s)).Rel(
|
||||
solv.REL_EQ, pool.Dep(str(v))))
|
||||
solvable.add_deparray(
|
||||
solv.SOLVABLE_PROVIDES,
|
||||
pool.Dep("module(%s:%s)" % (n, s)).Rel(solv.REL_EQ, pool.Dep(str(v))),
|
||||
)
|
||||
|
||||
self._add_base_module_provides(solvable, mmd)
|
||||
|
||||
# Fill in the "Requires" of this module, so we can track its dependencies
|
||||
# on other modules.
|
||||
requires = self._deps2reqs(normdeps(mmd, "get_requires"),
|
||||
base_module_stream_overrides, False)
|
||||
requires = self._deps2reqs(
|
||||
normdeps(mmd, "get_requires"), base_module_stream_overrides, False
|
||||
)
|
||||
log.debug("Adding module %s with requires: %r", solvable.name, requires)
|
||||
solvable.add_deparray(solv.SOLVABLE_REQUIRES, requires)
|
||||
|
||||
@@ -491,8 +499,10 @@ class MMDResolver(object):
|
||||
deps[0] = deps[0][1:]
|
||||
deps[-1] = deps[-1][:-1]
|
||||
# Generate the new deps using the parserpmrichdep.
|
||||
deps = [self.pool.parserpmrichdep(dep) if dep.startswith("(") else self.pool.Dep(dep)
|
||||
for dep in deps]
|
||||
deps = [
|
||||
self.pool.parserpmrichdep(dep) if dep.startswith("(") else self.pool.Dep(dep)
|
||||
for dep in deps
|
||||
]
|
||||
|
||||
# 2) For each dep (name:stream), get the set of all solvables in particular NSVCs,
|
||||
# which provides that name:stream. Then use itertools.product() to actually
|
||||
@@ -519,8 +529,10 @@ class MMDResolver(object):
|
||||
# we are currently trying, otherwise it would just choose some random ones.
|
||||
# We do that by FAVORING those modules - this is done in libsolv by another
|
||||
# job prepending to our main job to resolve the deps of input module.
|
||||
jobs = [self.pool.Job(solv.Job.SOLVER_FAVOR | solv.Job.SOLVER_SOLVABLE, s.id)
|
||||
for s in opt] + [job]
|
||||
jobs = [
|
||||
self.pool.Job(solv.Job.SOLVER_FAVOR | solv.Job.SOLVER_SOLVABLE, s.id)
|
||||
for s in opt
|
||||
] + [job]
|
||||
|
||||
# Log the job.
|
||||
log.debug("Jobs:")
|
||||
@@ -533,10 +545,11 @@ class MMDResolver(object):
|
||||
if problem_str:
|
||||
err_msg = problem_str
|
||||
else:
|
||||
err_msg = ', '.join(str(p) for p in problems)
|
||||
err_msg = ", ".join(str(p) for p in problems)
|
||||
raise RuntimeError(
|
||||
'Problems were found during module dependency resolution: {}'
|
||||
.format(err_msg))
|
||||
"Problems were found during module dependency resolution: {}".format(
|
||||
err_msg)
|
||||
)
|
||||
# Find out what was actually resolved by libsolv to be installed as a result
|
||||
# of our jobs - those are the modules we are looking for.
|
||||
newsolvables = solver.transaction().newsolvables()
|
||||
@@ -603,9 +616,11 @@ class MMDResolver(object):
|
||||
transactions[ns] = [trans[sorted_trans[0][0]]]
|
||||
|
||||
# Convert the solvables in alternatives to nsvc and return them as set of frozensets.
|
||||
return set(frozenset(s2nsvc(s) for s in transactions[0])
|
||||
for src_alternatives in alternatives.values()
|
||||
for transactions in src_alternatives.values())
|
||||
return set(
|
||||
frozenset(s2nsvc(s) for s in transactions[0])
|
||||
for src_alternatives in alternatives.values()
|
||||
for transactions in src_alternatives.values()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _detect_transitive_stream_collision(problems):
|
||||
@@ -636,9 +651,9 @@ class MMDResolver(object):
|
||||
pair.sort() # only for pretty print
|
||||
yield pair
|
||||
|
||||
formatted_conflicts_pairs = ', '.join(
|
||||
'{} and {}'.format(*item) for item in find_conflicts_pairs()
|
||||
formatted_conflicts_pairs = ", ".join(
|
||||
"{} and {}".format(*item) for item in find_conflicts_pairs()
|
||||
)
|
||||
if formatted_conflicts_pairs:
|
||||
return 'The module has conflicting buildrequires of: {}'.format(
|
||||
return "The module has conflicting buildrequires of: {}".format(
|
||||
formatted_conflicts_pairs)
|
||||
|
||||
@@ -43,7 +43,7 @@ import module_build_service.messaging
|
||||
from module_build_service.glib import from_variant_dict
|
||||
from module_build_service import db, log, get_url_for, app, conf
|
||||
|
||||
DEFAULT_MODULE_CONTEXT = '00000000'
|
||||
DEFAULT_MODULE_CONTEXT = "00000000"
|
||||
|
||||
|
||||
# Just like koji.BUILD_STATES, except our own codes for modules.
|
||||
@@ -58,32 +58,26 @@ BUILD_STATES = {
|
||||
# fetch them. If this is all good, then we set the build to the 'wait'
|
||||
# state. If anything goes wrong, we jump immediately to the 'failed' state.
|
||||
"init": 0,
|
||||
|
||||
# Here, the scheduler picks up tasks in wait and switches to build
|
||||
# immediately. Eventually, we'll add throttling logic here so we don't
|
||||
# submit too many builds for the build system to handle
|
||||
"wait": 1,
|
||||
|
||||
# The scheduler works on builds in this state. We prepare the buildroot,
|
||||
# submit builds for all the components, and wait for the results to come
|
||||
# back.
|
||||
"build": 2,
|
||||
|
||||
# Once all components have succeeded, we set the top-level module build
|
||||
# to 'done'.
|
||||
"done": 3,
|
||||
|
||||
# If any of the component builds fail, then we set the top-level module
|
||||
# build to 'failed' also.
|
||||
"failed": 4,
|
||||
|
||||
# This is a state to be set when a module is ready to be part of a
|
||||
# larger compose. perhaps it is set by an external service that knows
|
||||
# about the Grand Plan.
|
||||
"ready": 5,
|
||||
|
||||
# If the module has failed and was garbage collected by MBS
|
||||
"garbage": 6
|
||||
"garbage": 6,
|
||||
}
|
||||
|
||||
INVERSE_BUILD_STATES = {v: k for k, v in BUILD_STATES.items()}
|
||||
@@ -115,13 +109,12 @@ def _setup_event_listeners(session):
|
||||
"""
|
||||
Starts listening for events related to database session.
|
||||
"""
|
||||
if not sqlalchemy.event.contains(
|
||||
session, 'before_commit', session_before_commit_handlers):
|
||||
sqlalchemy.event.listen(session, 'before_commit',
|
||||
session_before_commit_handlers)
|
||||
if not sqlalchemy.event.contains(session, "before_commit", session_before_commit_handlers):
|
||||
sqlalchemy.event.listen(session, "before_commit", session_before_commit_handlers)
|
||||
|
||||
# initialize DB event listeners from the monitor module
|
||||
from module_build_service.monitor import db_hook_event_listeners
|
||||
|
||||
db_hook_event_listeners(session.bind.engine)
|
||||
|
||||
|
||||
@@ -134,16 +127,15 @@ def make_session(conf):
|
||||
# Do not use scoped_session in case we are using in-memory database,
|
||||
# because we want to use the same session across all threads to be able
|
||||
# to use the same in-memory database in tests.
|
||||
if conf.sqlalchemy_database_uri == 'sqlite://':
|
||||
if conf.sqlalchemy_database_uri == "sqlite://":
|
||||
_setup_event_listeners(db.session)
|
||||
yield db.session
|
||||
db.session.commit()
|
||||
return
|
||||
|
||||
# Needs to be set to create app_context.
|
||||
if (not has_app_context() and
|
||||
('SERVER_NAME' not in app.config or not app.config['SERVER_NAME'])):
|
||||
app.config['SERVER_NAME'] = 'localhost'
|
||||
if not has_app_context() and ("SERVER_NAME" not in app.config or not app.config["SERVER_NAME"]):
|
||||
app.config["SERVER_NAME"] = "localhost"
|
||||
|
||||
# If there is no app_context, we have to create one before creating
|
||||
# the session. If we would create app_context after the session (this
|
||||
@@ -152,9 +144,7 @@ def make_session(conf):
|
||||
with app.app_context() if not has_app_context() else _dummy_context_mgr():
|
||||
# TODO - we could use ZopeTransactionExtension() here some day for
|
||||
# improved safety on the backend.
|
||||
engine = sqlalchemy.engine_from_config({
|
||||
'sqlalchemy.url': conf.sqlalchemy_database_uri,
|
||||
})
|
||||
engine = sqlalchemy.engine_from_config({"sqlalchemy.url": conf.sqlalchemy_database_uri})
|
||||
session = scoped_session(sessionmaker(bind=engine))()
|
||||
_setup_event_listeners(session)
|
||||
try:
|
||||
@@ -174,20 +164,20 @@ class MBSBase(db.Model):
|
||||
|
||||
|
||||
module_builds_to_module_buildrequires = db.Table(
|
||||
'module_builds_to_module_buildrequires',
|
||||
db.Column('module_id', db.Integer, db.ForeignKey('module_builds.id'), nullable=False),
|
||||
db.Column('module_buildrequire_id', db.Integer, db.ForeignKey('module_builds.id'),
|
||||
nullable=False),
|
||||
db.UniqueConstraint('module_id', 'module_buildrequire_id', name='unique_buildrequires')
|
||||
"module_builds_to_module_buildrequires",
|
||||
db.Column("module_id", db.Integer, db.ForeignKey("module_builds.id"), nullable=False),
|
||||
db.Column(
|
||||
"module_buildrequire_id", db.Integer, db.ForeignKey("module_builds.id"), nullable=False),
|
||||
db.UniqueConstraint("module_id", "module_buildrequire_id", name="unique_buildrequires"),
|
||||
)
|
||||
|
||||
|
||||
module_builds_to_virtual_streams = db.Table(
|
||||
'module_builds_to_virtual_streams',
|
||||
db.Column('module_build_id', db.Integer, db.ForeignKey('module_builds.id'), nullable=False),
|
||||
db.Column('virtual_stream_id', db.Integer, db.ForeignKey('virtual_streams.id'), nullable=False),
|
||||
"module_builds_to_virtual_streams",
|
||||
db.Column("module_build_id", db.Integer, db.ForeignKey("module_builds.id"), nullable=False),
|
||||
db.Column("virtual_stream_id", db.Integer, db.ForeignKey("virtual_streams.id"), nullable=False),
|
||||
db.UniqueConstraint(
|
||||
'module_build_id', 'virtual_stream_id', name='unique_module_to_virtual_stream')
|
||||
"module_build_id", "virtual_stream_id", name="unique_module_to_virtual_stream"),
|
||||
)
|
||||
|
||||
|
||||
@@ -218,10 +208,7 @@ class ModuleBuild(MBSBase):
|
||||
new_repo_task_id = db.Column(db.Integer)
|
||||
rebuild_strategy = db.Column(db.String, nullable=False)
|
||||
virtual_streams = db.relationship(
|
||||
'VirtualStream',
|
||||
secondary=module_builds_to_virtual_streams,
|
||||
back_populates='module_builds',
|
||||
)
|
||||
"VirtualStream", secondary=module_builds_to_virtual_streams, back_populates="module_builds")
|
||||
|
||||
# A monotonically increasing integer that represents which batch or
|
||||
# iteration this module is currently on for successive rebuilds of its
|
||||
@@ -231,18 +218,19 @@ class ModuleBuild(MBSBase):
|
||||
# This is only used for base modules for ordering purposes (f27.0.1 => 270001)
|
||||
stream_version = db.Column(db.Float)
|
||||
buildrequires = db.relationship(
|
||||
'ModuleBuild',
|
||||
"ModuleBuild",
|
||||
secondary=module_builds_to_module_buildrequires,
|
||||
primaryjoin=module_builds_to_module_buildrequires.c.module_id == id,
|
||||
secondaryjoin=module_builds_to_module_buildrequires.c.module_buildrequire_id == id,
|
||||
backref='buildrequire_for'
|
||||
backref="buildrequire_for",
|
||||
)
|
||||
|
||||
rebuild_strategies = {
|
||||
'all': 'All components will be rebuilt',
|
||||
'changed-and-after': ('All components that have changed and those in subsequent batches '
|
||||
'will be rebuilt'),
|
||||
'only-changed': 'All changed components will be rebuilt'
|
||||
"all": "All components will be rebuilt",
|
||||
"changed-and-after": (
|
||||
"All components that have changed and those in subsequent batches will be rebuilt"
|
||||
),
|
||||
"only-changed": "All changed components will be rebuilt",
|
||||
}
|
||||
|
||||
def current_batch(self, state=None):
|
||||
@@ -282,8 +270,7 @@ class ModuleBuild(MBSBase):
|
||||
]
|
||||
else:
|
||||
return [
|
||||
component for component in self.component_builds
|
||||
if component.batch <= self.batch
|
||||
component for component in self.component_builds if component.batch <= self.batch
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
@@ -305,30 +292,40 @@ class ModuleBuild(MBSBase):
|
||||
streams for given module `name`.
|
||||
"""
|
||||
# Prepare the subquery to find out all unique name:stream records.
|
||||
subq = session.query(
|
||||
func.max(ModuleBuild.id).label("maxid"),
|
||||
func.max(sqlalchemy.cast(ModuleBuild.version, db.BigInteger))
|
||||
).group_by(ModuleBuild.stream).filter_by(
|
||||
name=name, state=BUILD_STATES["ready"]).subquery('t2')
|
||||
subq = (
|
||||
session.query(
|
||||
func.max(ModuleBuild.id).label("maxid"),
|
||||
func.max(sqlalchemy.cast(ModuleBuild.version, db.BigInteger)),
|
||||
)
|
||||
.group_by(ModuleBuild.stream)
|
||||
.filter_by(name=name, state=BUILD_STATES["ready"])
|
||||
.subquery("t2")
|
||||
)
|
||||
|
||||
# Use the subquery to actually return all the columns for its results.
|
||||
query = session.query(ModuleBuild).join(
|
||||
subq, and_(ModuleBuild.id == subq.c.maxid))
|
||||
query = session.query(ModuleBuild).join(subq, and_(ModuleBuild.id == subq.c.maxid))
|
||||
return query.all()
|
||||
|
||||
@staticmethod
|
||||
def _get_last_builds_in_stream_query(session, name, stream, **kwargs):
|
||||
# Prepare the subquery to find out all unique name:stream records.
|
||||
subq = session.query(
|
||||
func.max(sqlalchemy.cast(ModuleBuild.version, db.BigInteger)).label("maxversion")
|
||||
).filter_by(name=name, state=BUILD_STATES["ready"], stream=stream, **kwargs).subquery('t2')
|
||||
subq = (
|
||||
session.query(
|
||||
func.max(sqlalchemy.cast(ModuleBuild.version, db.BigInteger)).label("maxversion")
|
||||
)
|
||||
.filter_by(name=name, state=BUILD_STATES["ready"], stream=stream, **kwargs)
|
||||
.subquery("t2")
|
||||
)
|
||||
|
||||
# Use the subquery to actually return all the columns for its results.
|
||||
query = session.query(ModuleBuild).join(
|
||||
subq, and_(
|
||||
subq,
|
||||
and_(
|
||||
ModuleBuild.name == name,
|
||||
ModuleBuild.stream == stream,
|
||||
sqlalchemy.cast(ModuleBuild.version, db.BigInteger) == subq.c.maxversion))
|
||||
sqlalchemy.cast(ModuleBuild.version, db.BigInteger) == subq.c.maxversion,
|
||||
),
|
||||
)
|
||||
return query
|
||||
|
||||
@staticmethod
|
||||
@@ -370,8 +367,11 @@ class ModuleBuild(MBSBase):
|
||||
Returns build defined by NSVC. Optional kwargs are passed to SQLAlchemy
|
||||
filter_by method.
|
||||
"""
|
||||
return session.query(ModuleBuild).filter_by(
|
||||
name=name, stream=stream, version=str(version), context=context, **kwargs).first()
|
||||
return (
|
||||
session.query(ModuleBuild)
|
||||
.filter_by(name=name, stream=stream, version=str(version), context=context, **kwargs)
|
||||
.first()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_scratch_builds_from_nsvc(session, name, stream, version, context, **kwargs):
|
||||
@@ -379,9 +379,12 @@ class ModuleBuild(MBSBase):
|
||||
Returns all scratch builds defined by NSVC. This is done by using the supplied `context`
|
||||
as a match prefix. Optional kwargs are passed to SQLAlchemy filter_by method.
|
||||
"""
|
||||
return session.query(ModuleBuild).filter_by(
|
||||
name=name, stream=stream, version=str(version), scratch=True, **kwargs)\
|
||||
.filter(ModuleBuild.context.like(context + '%')).all()
|
||||
return (
|
||||
session.query(ModuleBuild)
|
||||
.filter_by(name=name, stream=stream, version=str(version), scratch=True, **kwargs)
|
||||
.filter(ModuleBuild.context.like(context + "%"))
|
||||
.all()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _add_stream_version_lte_filter(session, query, stream_version):
|
||||
@@ -398,9 +401,8 @@ class ModuleBuild(MBSBase):
|
||||
# Compute the minimal stream_version. For example, for `stream_version` 281234,
|
||||
# the minimal `stream_version` is 280000.
|
||||
min_stream_version = (stream_version // 10000) * 10000
|
||||
return query\
|
||||
.filter(ModuleBuild.stream_version <= stream_version)\
|
||||
.filter(ModuleBuild.stream_version >= min_stream_version)
|
||||
return query.filter(ModuleBuild.stream_version <= stream_version).filter(
|
||||
ModuleBuild.stream_version >= min_stream_version)
|
||||
|
||||
@staticmethod
|
||||
def _add_virtual_streams_filter(session, query, virtual_streams):
|
||||
@@ -416,11 +418,11 @@ class ModuleBuild(MBSBase):
|
||||
if not virtual_streams:
|
||||
return query
|
||||
|
||||
return query.join(
|
||||
VirtualStream, ModuleBuild.virtual_streams
|
||||
).filter(
|
||||
VirtualStream.name.in_(virtual_streams)
|
||||
).distinct(ModuleBuild.id)
|
||||
return (
|
||||
query.join(VirtualStream, ModuleBuild.virtual_streams)
|
||||
.filter(VirtualStream.name.in_(virtual_streams))
|
||||
.distinct(ModuleBuild.id)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_last_builds_in_stream_version_lte(session, name, stream_version, virtual_streams=None):
|
||||
@@ -437,10 +439,12 @@ class ModuleBuild(MBSBase):
|
||||
:param list virtual_streams: A list of the virtual streams to filter on. The filtering uses
|
||||
"or" logic. When falsy, no filtering occurs.
|
||||
"""
|
||||
query = session.query(ModuleBuild)\
|
||||
.filter(ModuleBuild.name == name)\
|
||||
.filter(ModuleBuild.state == BUILD_STATES["ready"])\
|
||||
query = (
|
||||
session.query(ModuleBuild)
|
||||
.filter(ModuleBuild.name == name)
|
||||
.filter(ModuleBuild.state == BUILD_STATES["ready"])
|
||||
.order_by(ModuleBuild.version.desc())
|
||||
)
|
||||
|
||||
query = ModuleBuild._add_stream_version_lte_filter(session, query, stream_version)
|
||||
query = ModuleBuild._add_virtual_streams_filter(session, query, virtual_streams)
|
||||
@@ -485,19 +489,20 @@ class ModuleBuild(MBSBase):
|
||||
|
||||
def mmd(self):
|
||||
from module_build_service.utils import load_mmd
|
||||
|
||||
try:
|
||||
return load_mmd(self.modulemd)
|
||||
except Exception:
|
||||
log.exception('An error occurred while trying to parse the modulemd')
|
||||
log.exception("An error occurred while trying to parse the modulemd")
|
||||
raise ValueError("Invalid modulemd")
|
||||
|
||||
@property
|
||||
def previous_non_failed_state(self):
|
||||
for trace in reversed(self.module_builds_trace):
|
||||
if trace.state != BUILD_STATES['failed']:
|
||||
if trace.state != BUILD_STATES["failed"]:
|
||||
return trace.state
|
||||
|
||||
@validates('state')
|
||||
@validates("state")
|
||||
def validate_state(self, key, field):
|
||||
if field in BUILD_STATES.values():
|
||||
return field
|
||||
@@ -505,22 +510,22 @@ class ModuleBuild(MBSBase):
|
||||
return BUILD_STATES[field]
|
||||
raise ValueError("%s: %s, not in %r" % (key, field, BUILD_STATES))
|
||||
|
||||
@validates('rebuild_strategy')
|
||||
@validates("rebuild_strategy")
|
||||
def validate_rebuild_stategy(self, key, rebuild_strategy):
|
||||
if rebuild_strategy not in self.rebuild_strategies.keys():
|
||||
choices = ', '.join(self.rebuild_strategies.keys())
|
||||
raise ValueError('The rebuild_strategy of "{0}" is invalid. Choose from: {1}'
|
||||
.format(rebuild_strategy, choices))
|
||||
choices = ", ".join(self.rebuild_strategies.keys())
|
||||
raise ValueError(
|
||||
'The rebuild_strategy of "{0}" is invalid. Choose from: {1}'.format(
|
||||
rebuild_strategy, choices)
|
||||
)
|
||||
return rebuild_strategy
|
||||
|
||||
@classmethod
|
||||
def from_module_event(cls, session, event):
|
||||
if type(event) == module_build_service.messaging.MBSModule:
|
||||
return session.query(cls).filter(
|
||||
cls.id == event.module_build_id).first()
|
||||
return session.query(cls).filter(cls.id == event.module_build_id).first()
|
||||
else:
|
||||
raise ValueError("%r is not a module message."
|
||||
% type(event).__name__)
|
||||
raise ValueError("%r is not a module message." % type(event).__name__)
|
||||
|
||||
@staticmethod
|
||||
def contexts_from_mmd(mmd_str):
|
||||
@@ -538,28 +543,31 @@ class ModuleBuild(MBSBase):
|
||||
context hashes.
|
||||
"""
|
||||
from module_build_service.utils import load_mmd
|
||||
|
||||
try:
|
||||
mmd = load_mmd(mmd_str)
|
||||
except Exception:
|
||||
raise ValueError("Invalid modulemd")
|
||||
mbs_xmd = mmd.get_xmd().get('mbs', {})
|
||||
mbs_xmd = mmd.get_xmd().get("mbs", {})
|
||||
rv = []
|
||||
|
||||
# Get the buildrequires from the XMD section, because it contains
|
||||
# all the buildrequires as we resolved them using dependency resolver.
|
||||
# We have to use keys because GLib.Variant doesn't support `in` directly.
|
||||
if "buildrequires" not in mbs_xmd.keys():
|
||||
raise ValueError('The module\'s modulemd hasn\'t been formatted by MBS')
|
||||
raise ValueError("The module's modulemd hasn't been formatted by MBS")
|
||||
mmd_formatted_buildrequires = {
|
||||
dep: info['ref'] for dep, info in mbs_xmd["buildrequires"].items()}
|
||||
dep: info["ref"] for dep, info in mbs_xmd["buildrequires"].items()
|
||||
}
|
||||
property_json = json.dumps(OrderedDict(sorted(mmd_formatted_buildrequires.items())))
|
||||
rv.append(hashlib.sha1(property_json.encode('utf-8')).hexdigest())
|
||||
rv.append(hashlib.sha1(property_json.encode("utf-8")).hexdigest())
|
||||
|
||||
# Get the streams of buildrequires and hash it.
|
||||
mmd_formatted_buildrequires = {
|
||||
dep: info['stream'] for dep, info in mbs_xmd["buildrequires"].items()}
|
||||
dep: info["stream"] for dep, info in mbs_xmd["buildrequires"].items()
|
||||
}
|
||||
property_json = json.dumps(OrderedDict(sorted(mmd_formatted_buildrequires.items())))
|
||||
build_context = hashlib.sha1(property_json.encode('utf-8')).hexdigest()
|
||||
build_context = hashlib.sha1(property_json.encode("utf-8")).hexdigest()
|
||||
rv.append(build_context)
|
||||
|
||||
# Get the requires from the real "dependencies" section in MMD.
|
||||
@@ -571,29 +579,45 @@ class ModuleBuild(MBSBase):
|
||||
mmd_requires[name] = mmd_requires[name].union(streams.get())
|
||||
|
||||
# Sort the streams for each module name and also sort the module names.
|
||||
mmd_requires = {
|
||||
dep: sorted(list(streams)) for dep, streams in mmd_requires.items()}
|
||||
mmd_requires = {dep: sorted(list(streams)) for dep, streams in mmd_requires.items()}
|
||||
property_json = json.dumps(OrderedDict(sorted(mmd_requires.items())))
|
||||
runtime_context = hashlib.sha1(property_json.encode('utf-8')).hexdigest()
|
||||
runtime_context = hashlib.sha1(property_json.encode("utf-8")).hexdigest()
|
||||
rv.append(runtime_context)
|
||||
|
||||
combined_hashes = '{0}:{1}'.format(build_context, runtime_context)
|
||||
context = hashlib.sha1(combined_hashes.encode('utf-8')).hexdigest()[:8]
|
||||
combined_hashes = "{0}:{1}".format(build_context, runtime_context)
|
||||
context = hashlib.sha1(combined_hashes.encode("utf-8")).hexdigest()[:8]
|
||||
rv.append(context)
|
||||
|
||||
return tuple(rv)
|
||||
|
||||
@property
|
||||
def siblings(self):
|
||||
query = self.query.filter_by(
|
||||
name=self.name, stream=self.stream, version=self.version, scratch=self.scratch).options(
|
||||
load_only('id')).filter(ModuleBuild.id != self.id)
|
||||
query = (
|
||||
self.query.filter_by(
|
||||
name=self.name, stream=self.stream, version=self.version, scratch=self.scratch)
|
||||
.options(load_only("id"))
|
||||
.filter(ModuleBuild.id != self.id)
|
||||
)
|
||||
return [build.id for build in query.all()]
|
||||
|
||||
@classmethod
|
||||
def create(cls, session, conf, name, stream, version, modulemd, scmurl, username,
|
||||
context=None, rebuild_strategy=None, scratch=False, srpms=None,
|
||||
publish_msg=True, **kwargs):
|
||||
def create(
|
||||
cls,
|
||||
session,
|
||||
conf,
|
||||
name,
|
||||
stream,
|
||||
version,
|
||||
modulemd,
|
||||
scmurl,
|
||||
username,
|
||||
context=None,
|
||||
rebuild_strategy=None,
|
||||
scratch=False,
|
||||
srpms=None,
|
||||
publish_msg=True,
|
||||
**kwargs
|
||||
):
|
||||
now = datetime.utcnow()
|
||||
module = cls(
|
||||
name=name,
|
||||
@@ -624,14 +648,14 @@ class ModuleBuild(MBSBase):
|
||||
session.commit()
|
||||
if publish_msg:
|
||||
module_build_service.messaging.publish(
|
||||
service='mbs',
|
||||
topic='module.state.change',
|
||||
service="mbs",
|
||||
topic="module.state.change",
|
||||
msg=module.json(show_tasks=False), # Note the state is "init" here...
|
||||
conf=conf,
|
||||
)
|
||||
return module
|
||||
|
||||
def transition(self, conf, state, state_reason=None, failure_type='unspec'):
|
||||
def transition(self, conf, state, state_reason=None, failure_type="unspec"):
|
||||
"""Record that a build has transitioned state.
|
||||
|
||||
The history of state transitions are recorded in model
|
||||
@@ -653,9 +677,9 @@ class ModuleBuild(MBSBase):
|
||||
|
||||
from module_build_service.monitor import builder_success_counter, builder_failed_counter
|
||||
|
||||
if INVERSE_BUILD_STATES[self.state] in ['done', 'failed']:
|
||||
if INVERSE_BUILD_STATES[self.state] in ["done", "failed"]:
|
||||
self.time_completed = now
|
||||
if INVERSE_BUILD_STATES[self.state] == 'done':
|
||||
if INVERSE_BUILD_STATES[self.state] == "done":
|
||||
builder_success_counter.inc()
|
||||
else:
|
||||
builder_failed_counter.labels(reason=failure_type).inc()
|
||||
@@ -664,16 +688,14 @@ class ModuleBuild(MBSBase):
|
||||
self.state_reason = state_reason
|
||||
|
||||
# record module's state change
|
||||
mbt = ModuleBuildTrace(state_time=now,
|
||||
state=self.state,
|
||||
state_reason=state_reason)
|
||||
mbt = ModuleBuildTrace(state_time=now, state=self.state, state_reason=state_reason)
|
||||
self.module_builds_trace.append(mbt)
|
||||
|
||||
log.info("%r, state %r->%r" % (self, old_state, self.state))
|
||||
if old_state != self.state:
|
||||
module_build_service.messaging.publish(
|
||||
service='mbs',
|
||||
topic='module.state.change',
|
||||
service="mbs",
|
||||
topic="module.state.change",
|
||||
msg=self.json(show_tasks=False),
|
||||
conf=conf,
|
||||
)
|
||||
@@ -697,14 +719,13 @@ class ModuleBuild(MBSBase):
|
||||
filters["name"] = name
|
||||
if stream:
|
||||
filters["stream"] = stream
|
||||
local_modules = session.query(ModuleBuild).filter_by(
|
||||
**filters).all()
|
||||
local_modules = session.query(ModuleBuild).filter_by(**filters).all()
|
||||
if not local_modules:
|
||||
return []
|
||||
|
||||
local_modules = [m for m in local_modules
|
||||
if m.koji_tag and
|
||||
m.koji_tag.startswith(conf.mock_resultsdir)]
|
||||
local_modules = [
|
||||
m for m in local_modules if m.koji_tag and m.koji_tag.startswith(conf.mock_resultsdir)
|
||||
]
|
||||
return local_modules
|
||||
|
||||
@classmethod
|
||||
@@ -718,13 +739,15 @@ class ModuleBuild(MBSBase):
|
||||
|
||||
There should be at most one.
|
||||
"""
|
||||
if event.repo_tag.endswith('-build'):
|
||||
if event.repo_tag.endswith("-build"):
|
||||
tag = event.repo_tag[:-6]
|
||||
else:
|
||||
tag = event.repo_tag
|
||||
query = session.query(cls)\
|
||||
.filter(cls.koji_tag == tag)\
|
||||
query = (
|
||||
session.query(cls)
|
||||
.filter(cls.koji_tag == tag)
|
||||
.filter(cls.state == BUILD_STATES["build"])
|
||||
)
|
||||
|
||||
count = query.count()
|
||||
if count > 1:
|
||||
@@ -734,10 +757,12 @@ class ModuleBuild(MBSBase):
|
||||
|
||||
@classmethod
|
||||
def from_tag_change_event(cls, session, event):
|
||||
tag = event.tag[:-6] if event.tag.endswith('-build') else event.tag
|
||||
query = session.query(cls)\
|
||||
.filter(cls.koji_tag == tag)\
|
||||
tag = event.tag[:-6] if event.tag.endswith("-build") else event.tag
|
||||
query = (
|
||||
session.query(cls)
|
||||
.filter(cls.koji_tag == tag)
|
||||
.filter(cls.state == BUILD_STATES["build"])
|
||||
)
|
||||
|
||||
count = query.count()
|
||||
if count > 1:
|
||||
@@ -747,43 +772,43 @@ class ModuleBuild(MBSBase):
|
||||
|
||||
def short_json(self, show_stream_version=False):
|
||||
rv = {
|
||||
'id': self.id,
|
||||
'state': self.state,
|
||||
'state_name': INVERSE_BUILD_STATES[self.state],
|
||||
'stream': self.stream,
|
||||
'version': self.version,
|
||||
'name': self.name,
|
||||
'context': self.context,
|
||||
"id": self.id,
|
||||
"state": self.state,
|
||||
"state_name": INVERSE_BUILD_STATES[self.state],
|
||||
"stream": self.stream,
|
||||
"version": self.version,
|
||||
"name": self.name,
|
||||
"context": self.context,
|
||||
}
|
||||
if show_stream_version:
|
||||
rv['stream_version'] = self.stream_version
|
||||
rv["stream_version"] = self.stream_version
|
||||
return rv
|
||||
|
||||
def json(self, show_tasks=True):
|
||||
mmd = self.mmd()
|
||||
xmd = from_variant_dict(mmd.get_xmd())
|
||||
try:
|
||||
buildrequires = xmd['mbs']['buildrequires']
|
||||
buildrequires = xmd["mbs"]["buildrequires"]
|
||||
except KeyError:
|
||||
buildrequires = {}
|
||||
rv = self.short_json()
|
||||
rv.update({
|
||||
'component_builds': [build.id for build in self.component_builds],
|
||||
'koji_tag': self.koji_tag,
|
||||
'owner': self.owner,
|
||||
'rebuild_strategy': self.rebuild_strategy,
|
||||
'scmurl': self.scmurl,
|
||||
'scratch': self.scratch,
|
||||
'srpms': json.loads(self.srpms or '[]'),
|
||||
'siblings': self.siblings,
|
||||
'state_reason': self.state_reason,
|
||||
'time_completed': _utc_datetime_to_iso(self.time_completed),
|
||||
'time_modified': _utc_datetime_to_iso(self.time_modified),
|
||||
'time_submitted': _utc_datetime_to_iso(self.time_submitted),
|
||||
'buildrequires': buildrequires,
|
||||
"component_builds": [build.id for build in self.component_builds],
|
||||
"koji_tag": self.koji_tag,
|
||||
"owner": self.owner,
|
||||
"rebuild_strategy": self.rebuild_strategy,
|
||||
"scmurl": self.scmurl,
|
||||
"scratch": self.scratch,
|
||||
"srpms": json.loads(self.srpms or "[]"),
|
||||
"siblings": self.siblings,
|
||||
"state_reason": self.state_reason,
|
||||
"time_completed": _utc_datetime_to_iso(self.time_completed),
|
||||
"time_modified": _utc_datetime_to_iso(self.time_modified),
|
||||
"time_submitted": _utc_datetime_to_iso(self.time_submitted),
|
||||
"buildrequires": buildrequires,
|
||||
})
|
||||
if show_tasks:
|
||||
rv['tasks'] = self.tasks()
|
||||
rv["tasks"] = self.tasks()
|
||||
return rv
|
||||
|
||||
def extended_json(self, show_state_url=False, api_version=1):
|
||||
@@ -797,25 +822,26 @@ class ModuleBuild(MBSBase):
|
||||
rv = self.json(show_tasks=True)
|
||||
state_url = None
|
||||
if show_state_url:
|
||||
state_url = get_url_for('module_build', api_version=api_version, id=self.id)
|
||||
state_url = get_url_for("module_build", api_version=api_version, id=self.id)
|
||||
|
||||
rv.update({
|
||||
'base_module_buildrequires': [br.short_json(True) for br in self.buildrequires],
|
||||
'build_context': self.build_context,
|
||||
'modulemd': self.modulemd,
|
||||
'ref_build_context': self.ref_build_context,
|
||||
'runtime_context': self.runtime_context,
|
||||
'state_trace': [
|
||||
"base_module_buildrequires": [br.short_json(True) for br in self.buildrequires],
|
||||
"build_context": self.build_context,
|
||||
"modulemd": self.modulemd,
|
||||
"ref_build_context": self.ref_build_context,
|
||||
"runtime_context": self.runtime_context,
|
||||
"state_trace": [
|
||||
{
|
||||
'time': _utc_datetime_to_iso(record.state_time),
|
||||
'state': record.state,
|
||||
'state_name': INVERSE_BUILD_STATES[record.state],
|
||||
'reason': record.state_reason
|
||||
} for record in self.state_trace(self.id)
|
||||
"time": _utc_datetime_to_iso(record.state_time),
|
||||
"state": record.state,
|
||||
"state_name": INVERSE_BUILD_STATES[record.state],
|
||||
"reason": record.state_reason,
|
||||
}
|
||||
for record in self.state_trace(self.id)
|
||||
],
|
||||
'state_url': state_url,
|
||||
'stream_version': self.stream_version,
|
||||
'virtual_streams': [virtual_stream.name for virtual_stream in self.virtual_streams],
|
||||
"state_url": state_url,
|
||||
"stream_version": self.stream_version,
|
||||
"virtual_streams": [virtual_stream.name for virtual_stream in self.virtual_streams],
|
||||
})
|
||||
|
||||
return rv
|
||||
@@ -825,11 +851,12 @@ class ModuleBuild(MBSBase):
|
||||
:return: dictionary containing the tasks associated with the build
|
||||
"""
|
||||
tasks = dict()
|
||||
if self.id and self.state != 'init':
|
||||
for build in ComponentBuild.query\
|
||||
.filter_by(module_id=self.id)\
|
||||
.options(lazyload('module_build'))\
|
||||
.all():
|
||||
if self.id and self.state != "init":
|
||||
for build in (
|
||||
ComponentBuild.query.filter_by(module_id=self.id)
|
||||
.options(lazyload("module_build"))
|
||||
.all()
|
||||
):
|
||||
tasks[build.format] = tasks.get(build.format, {})
|
||||
tasks[build.format][build.package] = dict(
|
||||
task_id=build.task_id,
|
||||
@@ -843,8 +870,11 @@ class ModuleBuild(MBSBase):
|
||||
return tasks
|
||||
|
||||
def state_trace(self, module_id):
|
||||
return ModuleBuildTrace.query.filter_by(
|
||||
module_id=module_id).order_by(ModuleBuildTrace.state_time).all()
|
||||
return (
|
||||
ModuleBuildTrace.query.filter_by(module_id=module_id)
|
||||
.order_by(ModuleBuildTrace.state_time)
|
||||
.all()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_stream_version(stream, right_pad=True):
|
||||
@@ -861,7 +891,7 @@ class ModuleBuild(MBSBase):
|
||||
:rtype: float or None if the stream doesn't have a valid version
|
||||
"""
|
||||
# The platform version (e.g. prefix1.2.0 => 010200)
|
||||
version = ''
|
||||
version = ""
|
||||
for char in stream:
|
||||
# See if the current character is an integer, signifying the version has started
|
||||
if char.isdigit():
|
||||
@@ -870,19 +900,19 @@ class ModuleBuild(MBSBase):
|
||||
elif version:
|
||||
# If the character is a period and the version is set, then
|
||||
# the loop is still processing the version part of the stream
|
||||
if char == '.':
|
||||
version += '.'
|
||||
if char == ".":
|
||||
version += "."
|
||||
# If the version is set and the character is not a period or
|
||||
# digit, then the remainder of the stream is a suffix like "-beta"
|
||||
else:
|
||||
break
|
||||
|
||||
# Remove the periods and pad the numbers if necessary
|
||||
version = ''.join([section.zfill(2) for section in version.rstrip('.').split('.')])
|
||||
version = "".join([section.zfill(2) for section in version.rstrip(".").split(".")])
|
||||
|
||||
if version:
|
||||
if right_pad:
|
||||
version += (6 - len(version)) * '0'
|
||||
version += (6 - len(version)) * "0"
|
||||
|
||||
result = float(version)
|
||||
|
||||
@@ -908,68 +938,79 @@ class ModuleBuild(MBSBase):
|
||||
for bm in conf.base_module_names:
|
||||
# xmd is a GLib Variant and doesn't support .get() syntax
|
||||
try:
|
||||
bm_dict = xmd['mbs']['buildrequires'].get(bm)
|
||||
bm_dict = xmd["mbs"]["buildrequires"].get(bm)
|
||||
except KeyError:
|
||||
raise RuntimeError(
|
||||
'The module\'s mmd is missing information in the xmd section')
|
||||
raise RuntimeError("The module's mmd is missing information in the xmd section")
|
||||
|
||||
if not bm_dict:
|
||||
continue
|
||||
base_module = self.get_build_from_nsvc(
|
||||
db_session, bm, bm_dict['stream'], bm_dict['version'], bm_dict['context'])
|
||||
db_session, bm, bm_dict["stream"], bm_dict["version"], bm_dict["context"]
|
||||
)
|
||||
if not base_module:
|
||||
log.error('Module #{} buildrequires "{}" but it wasn\'t found in the database'
|
||||
.format(self.id, repr(bm_dict)))
|
||||
log.error(
|
||||
'Module #{} buildrequires "{}" but it wasn\'t found in the database'.format(
|
||||
self.id, repr(bm_dict))
|
||||
)
|
||||
continue
|
||||
rv.append(base_module)
|
||||
|
||||
return rv
|
||||
|
||||
def __repr__(self):
|
||||
return (("<ModuleBuild %s, id=%d, stream=%s, version=%s, scratch=%r,"
|
||||
" state %r, batch %r, state_reason %r>")
|
||||
% (self.name, self.id, self.stream, self.version, self.scratch,
|
||||
INVERSE_BUILD_STATES[self.state], self.batch, self.state_reason))
|
||||
return (
|
||||
"<ModuleBuild %s, id=%d, stream=%s, version=%s, scratch=%r,"
|
||||
" state %r, batch %r, state_reason %r>"
|
||||
) % (
|
||||
self.name,
|
||||
self.id,
|
||||
self.stream,
|
||||
self.version,
|
||||
self.scratch,
|
||||
INVERSE_BUILD_STATES[self.state],
|
||||
self.batch,
|
||||
self.state_reason,
|
||||
)
|
||||
|
||||
|
||||
class VirtualStream(MBSBase):
|
||||
__tablename__ = 'virtual_streams'
|
||||
__tablename__ = "virtual_streams"
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
name = db.Column(db.String, nullable=False, unique=True)
|
||||
module_builds = db.relationship(
|
||||
'ModuleBuild',
|
||||
secondary=module_builds_to_virtual_streams,
|
||||
back_populates='virtual_streams',
|
||||
"ModuleBuild", secondary=module_builds_to_virtual_streams, back_populates="virtual_streams"
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return '<VirtualStream id={} name={}>'.format(self.id, self.name)
|
||||
return "<VirtualStream id={} name={}>".format(self.id, self.name)
|
||||
|
||||
|
||||
class ModuleBuildTrace(MBSBase):
|
||||
__tablename__ = "module_builds_trace"
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
module_id = db.Column(db.Integer, db.ForeignKey('module_builds.id'), nullable=False)
|
||||
module_id = db.Column(db.Integer, db.ForeignKey("module_builds.id"), nullable=False)
|
||||
state_time = db.Column(db.DateTime, nullable=False)
|
||||
state = db.Column(db.Integer, nullable=True)
|
||||
state_reason = db.Column(db.String, nullable=True)
|
||||
|
||||
module_build = db.relationship('ModuleBuild', backref='module_builds_trace', lazy=False)
|
||||
module_build = db.relationship("ModuleBuild", backref="module_builds_trace", lazy=False)
|
||||
|
||||
def json(self):
|
||||
retval = {
|
||||
'id': self.id,
|
||||
'module_id': self.module_id,
|
||||
'state_time': _utc_datetime_to_iso(self.state_time),
|
||||
'state': self.state,
|
||||
'state_reason': self.state_reason,
|
||||
"id": self.id,
|
||||
"module_id": self.module_id,
|
||||
"state_time": _utc_datetime_to_iso(self.state_time),
|
||||
"state": self.state,
|
||||
"state_reason": self.state_reason,
|
||||
}
|
||||
|
||||
return retval
|
||||
|
||||
def __repr__(self):
|
||||
return ("<ModuleBuildTrace %s, module_id: %s, state_time: %r, state: %s, state_reason: %s>"
|
||||
% (self.id, self.module_id, self.state_time, self.state, self.state_reason))
|
||||
return (
|
||||
"<ModuleBuildTrace %s, module_id: %s, state_time: %r, state: %s, state_reason: %s>"
|
||||
% (self.id, self.module_id, self.state_time, self.state, self.state_reason)
|
||||
)
|
||||
|
||||
|
||||
class ComponentBuild(MBSBase):
|
||||
@@ -1002,10 +1043,9 @@ class ComponentBuild(MBSBase):
|
||||
# component is not currently part of a batch.
|
||||
batch = db.Column(db.Integer, default=0)
|
||||
|
||||
module_id = db.Column(db.Integer, db.ForeignKey('module_builds.id'), nullable=False)
|
||||
module_build = db.relationship('ModuleBuild', backref='component_builds', lazy=False)
|
||||
reused_component_id = db.Column(
|
||||
db.Integer, db.ForeignKey('component_builds.id'))
|
||||
module_id = db.Column(db.Integer, db.ForeignKey("module_builds.id"), nullable=False)
|
||||
module_build = db.relationship("ModuleBuild", backref="component_builds", lazy=False)
|
||||
reused_component_id = db.Column(db.Integer, db.ForeignKey("component_builds.id"))
|
||||
|
||||
# Weight defines the complexity of the component build as calculated by the builder's
|
||||
# get_build_weights function
|
||||
@@ -1015,45 +1055,49 @@ class ComponentBuild(MBSBase):
|
||||
def from_component_event(cls, session, event):
|
||||
if isinstance(event, module_build_service.messaging.KojiBuildChange):
|
||||
if event.module_build_id:
|
||||
return session.query(cls).filter_by(
|
||||
task_id=event.task_id, module_id=event.module_build_id)\
|
||||
return (
|
||||
session.query(cls)
|
||||
.filter_by(task_id=event.task_id, module_id=event.module_build_id)
|
||||
.one()
|
||||
)
|
||||
else:
|
||||
return session.query(cls).filter(
|
||||
cls.task_id == event.task_id).first()
|
||||
return session.query(cls).filter(cls.task_id == event.task_id).first()
|
||||
else:
|
||||
raise ValueError("%r is not a koji message." % event['topic'])
|
||||
raise ValueError("%r is not a koji message." % event["topic"])
|
||||
|
||||
@classmethod
|
||||
def from_component_name(cls, session, component_name, module_id):
|
||||
return session.query(cls).filter_by(
|
||||
package=component_name, module_id=module_id).first()
|
||||
return session.query(cls).filter_by(package=component_name, module_id=module_id).first()
|
||||
|
||||
@classmethod
|
||||
def from_component_nvr(cls, session, nvr, module_id):
|
||||
return session.query(cls).filter_by(nvr=nvr, module_id=module_id).first()
|
||||
|
||||
def state_trace(self, component_id):
|
||||
return ComponentBuildTrace.query.filter_by(
|
||||
component_id=component_id).order_by(ComponentBuildTrace.state_time).all()
|
||||
return (
|
||||
ComponentBuildTrace.query.filter_by(component_id=component_id)
|
||||
.order_by(ComponentBuildTrace.state_time)
|
||||
.all()
|
||||
)
|
||||
|
||||
def json(self):
|
||||
retval = {
|
||||
'id': self.id,
|
||||
'package': self.package,
|
||||
'format': self.format,
|
||||
'task_id': self.task_id,
|
||||
'state': self.state,
|
||||
'state_reason': self.state_reason,
|
||||
'module_build': self.module_id,
|
||||
'nvr': self.nvr
|
||||
"id": self.id,
|
||||
"package": self.package,
|
||||
"format": self.format,
|
||||
"task_id": self.task_id,
|
||||
"state": self.state,
|
||||
"state_reason": self.state_reason,
|
||||
"module_build": self.module_id,
|
||||
"nvr": self.nvr,
|
||||
}
|
||||
|
||||
try:
|
||||
# Koji is py2 only, so this fails if the main web process is
|
||||
# running on py3.
|
||||
import koji
|
||||
retval['state_name'] = koji.BUILD_STATES.get(self.state)
|
||||
|
||||
retval["state_name"] = koji.BUILD_STATES.get(self.state)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
@@ -1070,72 +1114,91 @@ class ComponentBuild(MBSBase):
|
||||
json = self.json()
|
||||
state_url = None
|
||||
if show_state_url:
|
||||
state_url = get_url_for('component_build', api_version=api_version, id=self.id)
|
||||
state_url = get_url_for("component_build", api_version=api_version, id=self.id)
|
||||
json.update({
|
||||
'batch': self.batch,
|
||||
'state_trace': [{'time': _utc_datetime_to_iso(record.state_time),
|
||||
'state': record.state,
|
||||
'state_name': INVERSE_BUILD_STATES[record.state],
|
||||
'reason': record.state_reason}
|
||||
for record
|
||||
in self.state_trace(self.id)],
|
||||
'state_url': state_url
|
||||
"batch": self.batch,
|
||||
"state_trace": [
|
||||
{
|
||||
"time": _utc_datetime_to_iso(record.state_time),
|
||||
"state": record.state,
|
||||
"state_name": INVERSE_BUILD_STATES[record.state],
|
||||
"reason": record.state_reason,
|
||||
}
|
||||
for record in self.state_trace(self.id)
|
||||
],
|
||||
"state_url": state_url,
|
||||
})
|
||||
|
||||
return json
|
||||
|
||||
def __repr__(self):
|
||||
return "<ComponentBuild %s, %r, state: %r, task_id: %r, batch: %r, state_reason: %s>" % (
|
||||
self.package, self.module_id, self.state, self.task_id, self.batch, self.state_reason)
|
||||
self.package,
|
||||
self.module_id,
|
||||
self.state,
|
||||
self.task_id,
|
||||
self.batch,
|
||||
self.state_reason,
|
||||
)
|
||||
|
||||
|
||||
class ComponentBuildTrace(MBSBase):
|
||||
__tablename__ = "component_builds_trace"
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
component_id = db.Column(db.Integer, db.ForeignKey('component_builds.id'), nullable=False)
|
||||
component_id = db.Column(db.Integer, db.ForeignKey("component_builds.id"), nullable=False)
|
||||
state_time = db.Column(db.DateTime, nullable=False)
|
||||
state = db.Column(db.Integer, nullable=True)
|
||||
state_reason = db.Column(db.String, nullable=True)
|
||||
task_id = db.Column(db.Integer, nullable=True)
|
||||
|
||||
component_build = db.relationship('ComponentBuild', backref='component_builds_trace',
|
||||
lazy=False)
|
||||
component_build = db.relationship(
|
||||
"ComponentBuild", backref="component_builds_trace", lazy=False
|
||||
)
|
||||
|
||||
def json(self):
|
||||
retval = {
|
||||
'id': self.id,
|
||||
'component_id': self.component_id,
|
||||
'state_time': _utc_datetime_to_iso(self.state_time),
|
||||
'state': self.state,
|
||||
'state_reason': self.state_reason,
|
||||
'task_id': self.task_id,
|
||||
"id": self.id,
|
||||
"component_id": self.component_id,
|
||||
"state_time": _utc_datetime_to_iso(self.state_time),
|
||||
"state": self.state,
|
||||
"state_reason": self.state_reason,
|
||||
"task_id": self.task_id,
|
||||
}
|
||||
|
||||
return retval
|
||||
|
||||
def __repr__(self):
|
||||
return ("<ComponentBuildTrace %s, component_id: %s, state_time: %r, state: %s,"
|
||||
" state_reason: %s, task_id: %s>") % (self.id, self.component_id, self.state_time,
|
||||
self.state, self.state_reason, self.task_id)
|
||||
return (
|
||||
"<ComponentBuildTrace %s, component_id: %s, state_time: %r, state: %s,"
|
||||
" state_reason: %s, task_id: %s>"
|
||||
) % (
|
||||
self.id,
|
||||
self.component_id,
|
||||
self.state_time,
|
||||
self.state,
|
||||
self.state_reason,
|
||||
self.task_id,
|
||||
)
|
||||
|
||||
|
||||
def session_before_commit_handlers(session):
|
||||
# new and updated items
|
||||
for item in (set(session.new) | set(session.dirty)):
|
||||
for item in set(session.new) | set(session.dirty):
|
||||
# handlers for component builds
|
||||
if isinstance(item, ComponentBuild):
|
||||
cbt = ComponentBuildTrace(
|
||||
state_time=datetime.utcnow(),
|
||||
state=item.state,
|
||||
state_reason=item.state_reason,
|
||||
task_id=item.task_id)
|
||||
task_id=item.task_id,
|
||||
)
|
||||
# To fully support append, the hook must be tied to the session
|
||||
item.component_builds_trace.append(cbt)
|
||||
|
||||
|
||||
@sqlalchemy.event.listens_for(ModuleBuild, 'before_insert')
|
||||
@sqlalchemy.event.listens_for(ModuleBuild, 'before_update')
|
||||
@sqlalchemy.event.listens_for(ModuleBuild, "before_insert")
|
||||
@sqlalchemy.event.listens_for(ModuleBuild, "before_update")
|
||||
def new_and_update_module_handler(mapper, session, target):
|
||||
# Only modify time_modified if it wasn't explicitly set
|
||||
if not db.inspect(target).get_history('time_modified', True).has_changes():
|
||||
if not db.inspect(target).get_history("time_modified", True).has_changes():
|
||||
target.time_modified = datetime.utcnow()
|
||||
|
||||
@@ -27,77 +27,76 @@ import tempfile
|
||||
|
||||
from flask import Blueprint, Response
|
||||
from prometheus_client import ( # noqa: F401
|
||||
ProcessCollector, CollectorRegistry, Counter, multiprocess, Histogram, generate_latest,
|
||||
start_http_server, CONTENT_TYPE_LATEST)
|
||||
ProcessCollector,
|
||||
CollectorRegistry,
|
||||
Counter,
|
||||
multiprocess,
|
||||
Histogram,
|
||||
generate_latest,
|
||||
start_http_server,
|
||||
CONTENT_TYPE_LATEST,
|
||||
)
|
||||
from sqlalchemy import event
|
||||
|
||||
# Service-specific imports
|
||||
from module_build_service.utils import cors_header, validate_api_version
|
||||
|
||||
|
||||
if not os.environ.get('prometheus_multiproc_dir'):
|
||||
os.environ.setdefault('prometheus_multiproc_dir', tempfile.mkdtemp())
|
||||
if not os.environ.get("prometheus_multiproc_dir"):
|
||||
os.environ.setdefault("prometheus_multiproc_dir", tempfile.mkdtemp())
|
||||
registry = CollectorRegistry()
|
||||
ProcessCollector(registry=registry)
|
||||
multiprocess.MultiProcessCollector(registry)
|
||||
if os.getenv('MONITOR_STANDALONE_METRICS_SERVER_ENABLE', 'false') == 'true':
|
||||
port = os.getenv('MONITOR_STANDALONE_METRICS_SERVER_PORT', '10040')
|
||||
if os.getenv("MONITOR_STANDALONE_METRICS_SERVER_ENABLE", "false") == "true":
|
||||
port = os.getenv("MONITOR_STANDALONE_METRICS_SERVER_PORT", "10040")
|
||||
start_http_server(int(port), registry=registry)
|
||||
|
||||
|
||||
# Generic metrics
|
||||
messaging_rx_counter = Counter(
|
||||
'messaging_rx',
|
||||
'Total number of messages received',
|
||||
registry=registry)
|
||||
"messaging_rx", "Total number of messages received", registry=registry
|
||||
)
|
||||
messaging_rx_processed_ok_counter = Counter(
|
||||
'messaging_rx_processed_ok',
|
||||
'Number of received messages, which were processed successfully',
|
||||
registry=registry)
|
||||
"messaging_rx_processed_ok",
|
||||
"Number of received messages, which were processed successfully",
|
||||
registry=registry,
|
||||
)
|
||||
messaging_rx_failed_counter = Counter(
|
||||
'messaging_rx_failed',
|
||||
'Number of received messages, which failed during processing',
|
||||
registry=registry)
|
||||
"messaging_rx_failed",
|
||||
"Number of received messages, which failed during processing",
|
||||
registry=registry,
|
||||
)
|
||||
|
||||
messaging_tx_to_send_counter = Counter(
|
||||
'messaging_tx_to_send',
|
||||
'Total number of messages to send',
|
||||
registry=registry)
|
||||
"messaging_tx_to_send", "Total number of messages to send", registry=registry
|
||||
)
|
||||
messaging_tx_sent_ok_counter = Counter(
|
||||
'messaging_tx_sent_ok',
|
||||
'Number of messages, which were sent successfully',
|
||||
registry=registry)
|
||||
"messaging_tx_sent_ok", "Number of messages, which were sent successfully", registry=registry
|
||||
)
|
||||
messaging_tx_failed_counter = Counter(
|
||||
'messaging_tx_failed',
|
||||
'Number of messages, for which the sender failed',
|
||||
registry=registry)
|
||||
"messaging_tx_failed", "Number of messages, for which the sender failed", registry=registry
|
||||
)
|
||||
|
||||
builder_success_counter = Counter(
|
||||
'builds_success',
|
||||
'Number of successful builds',
|
||||
registry=registry)
|
||||
"builds_success", "Number of successful builds", registry=registry
|
||||
)
|
||||
builder_failed_counter = Counter(
|
||||
'builds_failed_total',
|
||||
'Number of failed builds',
|
||||
labelnames=['reason'], # reason could be: 'user', 'infra', 'unspec'
|
||||
registry=registry)
|
||||
"builds_failed_total",
|
||||
"Number of failed builds",
|
||||
labelnames=["reason"], # reason could be: 'user', 'infra', 'unspec'
|
||||
registry=registry,
|
||||
)
|
||||
|
||||
db_dbapi_error_counter = Counter(
|
||||
'db_dbapi_error',
|
||||
'Number of DBAPI errors',
|
||||
registry=registry)
|
||||
db_dbapi_error_counter = Counter("db_dbapi_error", "Number of DBAPI errors", registry=registry)
|
||||
db_engine_connect_counter = Counter(
|
||||
'db_engine_connect',
|
||||
'Number of \'engine_connect\' events',
|
||||
registry=registry)
|
||||
"db_engine_connect", "Number of 'engine_connect' events", registry=registry
|
||||
)
|
||||
db_handle_error_counter = Counter(
|
||||
'db_handle_error',
|
||||
'Number of exceptions during connection',
|
||||
registry=registry)
|
||||
"db_handle_error", "Number of exceptions during connection", registry=registry
|
||||
)
|
||||
db_transaction_rollback_counter = Counter(
|
||||
'db_transaction_rollback',
|
||||
'Number of transactions, which were rolled back',
|
||||
registry=registry)
|
||||
"db_transaction_rollback", "Number of transactions, which were rolled back", registry=registry
|
||||
)
|
||||
|
||||
# Service-specific metrics
|
||||
# XXX: TODO
|
||||
@@ -110,31 +109,29 @@ def db_hook_event_listeners(target=None):
|
||||
if not target:
|
||||
target = db.engine
|
||||
|
||||
@event.listens_for(target, 'dbapi_error', named=True)
|
||||
@event.listens_for(target, "dbapi_error", named=True)
|
||||
def receive_dbapi_error(**kw):
|
||||
db_dbapi_error_counter.inc()
|
||||
|
||||
@event.listens_for(target, 'engine_connect')
|
||||
@event.listens_for(target, "engine_connect")
|
||||
def receive_engine_connect(conn, branch):
|
||||
db_engine_connect_counter.inc()
|
||||
|
||||
@event.listens_for(target, 'handle_error')
|
||||
@event.listens_for(target, "handle_error")
|
||||
def receive_handle_error(exception_context):
|
||||
db_handle_error_counter.inc()
|
||||
|
||||
@event.listens_for(target, 'rollback')
|
||||
@event.listens_for(target, "rollback")
|
||||
def receive_rollback(conn):
|
||||
db_transaction_rollback_counter.inc()
|
||||
|
||||
|
||||
monitor_api = Blueprint(
|
||||
'monitor', __name__,
|
||||
url_prefix='/module-build-service/<int:api_version>/monitor')
|
||||
"monitor", __name__, url_prefix="/module-build-service/<int:api_version>/monitor")
|
||||
|
||||
|
||||
@cors_header()
|
||||
@validate_api_version()
|
||||
@monitor_api.route('/metrics')
|
||||
@monitor_api.route("/metrics")
|
||||
def metrics(api_version):
|
||||
return Response(generate_latest(registry),
|
||||
content_type=CONTENT_TYPE_LATEST)
|
||||
return Response(generate_latest(registry), content_type=CONTENT_TYPE_LATEST)
|
||||
|
||||
@@ -31,29 +31,30 @@ Source: http://flask.pocoo.org/snippets/35/ by Peter Hansen
|
||||
|
||||
|
||||
class ReverseProxy(object):
|
||||
'''Wrap the application in this middleware and configure the
|
||||
"""Wrap the application in this middleware and configure the
|
||||
front-end server to add these headers, to let you quietly bind
|
||||
this to a URL other than / and to an HTTP scheme that is
|
||||
different than what is used locally.
|
||||
|
||||
:param app: the WSGI application
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
script_name = environ.get('HTTP_X_SCRIPT_NAME', '')
|
||||
script_name = environ.get("HTTP_X_SCRIPT_NAME", "")
|
||||
if script_name:
|
||||
environ['SCRIPT_NAME'] = script_name
|
||||
path_info = environ['PATH_INFO']
|
||||
environ["SCRIPT_NAME"] = script_name
|
||||
path_info = environ["PATH_INFO"]
|
||||
if path_info.startswith(script_name):
|
||||
environ['PATH_INFO'] = path_info[len(script_name):]
|
||||
environ["PATH_INFO"] = path_info[len(script_name):]
|
||||
|
||||
server = environ.get('HTTP_X_FORWARDED_HOST', '')
|
||||
server = environ.get("HTTP_X_FORWARDED_HOST", "")
|
||||
if server:
|
||||
environ['HTTP_HOST'] = server
|
||||
environ["HTTP_HOST"] = server
|
||||
|
||||
scheme = environ.get('HTTP_X_SCHEME', '')
|
||||
scheme = environ.get("HTTP_X_SCHEME", "")
|
||||
if scheme:
|
||||
environ['wsgi.url_scheme'] = scheme
|
||||
environ["wsgi.url_scheme"] = scheme
|
||||
return self.app(environ, start_response)
|
||||
|
||||
@@ -36,13 +36,14 @@ class DBResolver(GenericResolver):
|
||||
"""
|
||||
Resolver using the MBS database
|
||||
"""
|
||||
backend = 'db'
|
||||
|
||||
backend = "db"
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
|
||||
def _get_module(
|
||||
self, name, stream, version, context, state=models.BUILD_STATES['ready'], strict=False,
|
||||
self, name, stream, version, context, state=models.BUILD_STATES["ready"], strict=False
|
||||
):
|
||||
with models.make_session(self.config) as session:
|
||||
mb = models.ModuleBuild.get_build_from_nsvc(
|
||||
@@ -52,7 +53,7 @@ class DBResolver(GenericResolver):
|
||||
|
||||
if strict:
|
||||
raise UnprocessableEntity(
|
||||
'Cannot find any module builds for %s:%s' % (name, stream))
|
||||
"Cannot find any module builds for %s:%s" % (name, stream))
|
||||
|
||||
def get_module_count(self, **kwargs):
|
||||
"""
|
||||
@@ -79,14 +80,22 @@ class DBResolver(GenericResolver):
|
||||
# Cast the version as an integer so that we get proper ordering
|
||||
module = query.order_by(
|
||||
models.ModuleBuild.stream_version.desc(),
|
||||
sqlalchemy.cast(models.ModuleBuild.version, db.BigInteger).desc()
|
||||
sqlalchemy.cast(models.ModuleBuild.version, db.BigInteger).desc(),
|
||||
).first()
|
||||
|
||||
if module:
|
||||
return load_mmd(module.modulemd)
|
||||
|
||||
def get_module_modulemds(self, name, stream, version=None, context=None, strict=False,
|
||||
stream_version_lte=False, virtual_streams=None):
|
||||
def get_module_modulemds(
|
||||
self,
|
||||
name,
|
||||
stream,
|
||||
version=None,
|
||||
context=None,
|
||||
strict=False,
|
||||
stream_version_lte=False,
|
||||
virtual_streams=None,
|
||||
):
|
||||
"""
|
||||
Gets the module modulemds from the resolver.
|
||||
:param name: a string of the module's name
|
||||
@@ -108,18 +117,18 @@ class DBResolver(GenericResolver):
|
||||
mmd = self._get_module(name, stream, version, context, strict=strict)
|
||||
if mmd is None:
|
||||
return
|
||||
return [load_mmd(mmd['modulemd'])]
|
||||
return [load_mmd(mmd["modulemd"])]
|
||||
|
||||
with models.make_session(self.config) as session:
|
||||
if not version and not context:
|
||||
if (stream_version_lte and len(str(models.ModuleBuild.get_stream_version(
|
||||
stream, right_pad=False))) >= 5):
|
||||
if stream_version_lte and (
|
||||
len(str(models.ModuleBuild.get_stream_version(stream, right_pad=False))) >= 5
|
||||
):
|
||||
stream_version = models.ModuleBuild.get_stream_version(stream)
|
||||
builds = models.ModuleBuild.get_last_builds_in_stream_version_lte(
|
||||
session, name, stream_version, virtual_streams)
|
||||
else:
|
||||
builds = models.ModuleBuild.get_last_builds_in_stream(
|
||||
session, name, stream)
|
||||
builds = models.ModuleBuild.get_last_builds_in_stream(session, name, stream)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"This combination of name/stream/version/context is not implemented")
|
||||
@@ -146,7 +155,7 @@ class DBResolver(GenericResolver):
|
||||
query = session.query(models.ModuleBuild)
|
||||
query = query.filter_by(name=name, stream=stream, state=models.BUILD_STATES["ready"])
|
||||
|
||||
module_br_alias = aliased(models.ModuleBuild, name='module_br')
|
||||
module_br_alias = aliased(models.ModuleBuild, name="module_br")
|
||||
# Shorten this table name for clarity in the query below
|
||||
mb_to_br = models.module_builds_to_module_buildrequires
|
||||
# The following joins get added:
|
||||
@@ -154,14 +163,17 @@ class DBResolver(GenericResolver):
|
||||
# ON module_builds_to_module_buildrequires.module_id = module_builds.id
|
||||
# JOIN module_builds AS module_br
|
||||
# ON module_builds_to_module_buildrequires.module_buildrequire_id = module_br.id
|
||||
query = query.join(mb_to_br, mb_to_br.c.module_id == models.ModuleBuild.id)\
|
||||
.join(module_br_alias, mb_to_br.c.module_buildrequire_id == module_br_alias.id)
|
||||
query = query.join(mb_to_br, mb_to_br.c.module_id == models.ModuleBuild.id).join(
|
||||
module_br_alias, mb_to_br.c.module_buildrequire_id == module_br_alias.id)
|
||||
|
||||
# Get only modules buildrequiring particular base_module_nsvc
|
||||
n, s, v, c = base_module_nsvc.split(":")
|
||||
query = query.filter(
|
||||
module_br_alias.name == n, module_br_alias.stream == s,
|
||||
module_br_alias.version == v, module_br_alias.context == c)
|
||||
module_br_alias.name == n,
|
||||
module_br_alias.stream == s,
|
||||
module_br_alias.version == v,
|
||||
module_br_alias.context == c,
|
||||
)
|
||||
query = query.order_by(
|
||||
sqlalchemy.cast(models.ModuleBuild.version, db.BigInteger).desc())
|
||||
all_builds = query.all()
|
||||
@@ -178,8 +190,12 @@ class DBResolver(GenericResolver):
|
||||
builds.append(build)
|
||||
|
||||
mmds = [build.mmd() for build in builds]
|
||||
nsvcs = [":".join([mmd.get_name(), mmd.get_stream(),
|
||||
str(mmd.get_version()), mmd.get_context()]) for mmd in mmds]
|
||||
nsvcs = [
|
||||
":".join(
|
||||
[mmd.get_name(), mmd.get_stream(), str(mmd.get_version()), mmd.get_context()]
|
||||
)
|
||||
for mmd in mmds
|
||||
]
|
||||
log.debug("Found: %r", nsvcs)
|
||||
return mmds
|
||||
|
||||
@@ -198,12 +214,12 @@ class DBResolver(GenericResolver):
|
||||
for key in keys:
|
||||
results[key] = set()
|
||||
with models.make_session(self.config) as session:
|
||||
for module_name, module_info in mmd.get_xmd()['mbs']['buildrequires'].items():
|
||||
for module_name, module_info in mmd.get_xmd()["mbs"]["buildrequires"].items():
|
||||
local_modules = models.ModuleBuild.local_modules(
|
||||
session, module_name, module_info['stream'])
|
||||
session, module_name, module_info["stream"])
|
||||
if local_modules:
|
||||
local_module = local_modules[0]
|
||||
log.info('Using local module {0!r} to resolve profiles.'.format(local_module))
|
||||
log.info("Using local module {0!r} to resolve profiles.".format(local_module))
|
||||
dep_mmd = local_module.mmd()
|
||||
for key in keys:
|
||||
if key in dep_mmd.get_profiles().keys():
|
||||
@@ -211,12 +227,22 @@ class DBResolver(GenericResolver):
|
||||
continue
|
||||
|
||||
build = models.ModuleBuild.get_build_from_nsvc(
|
||||
session, module_name, module_info['stream'], module_info['version'],
|
||||
module_info['context'], state=models.BUILD_STATES['ready'])
|
||||
session,
|
||||
module_name,
|
||||
module_info["stream"],
|
||||
module_info["version"],
|
||||
module_info["context"],
|
||||
state=models.BUILD_STATES["ready"],
|
||||
)
|
||||
if not build:
|
||||
raise UnprocessableEntity('The module {}:{}:{}:{} was not found'.format(
|
||||
module_name, module_info['stream'], module_info['version'],
|
||||
module_info['context']))
|
||||
raise UnprocessableEntity(
|
||||
"The module {}:{}:{}:{} was not found".format(
|
||||
module_name,
|
||||
module_info["stream"],
|
||||
module_info["version"],
|
||||
module_info["context"],
|
||||
)
|
||||
)
|
||||
dep_mmd = build.mmd()
|
||||
|
||||
# Take note of what rpms are in this dep's profile
|
||||
@@ -227,8 +253,9 @@ class DBResolver(GenericResolver):
|
||||
# Return the union of all rpms in all profiles of the given keys
|
||||
return results
|
||||
|
||||
def get_module_build_dependencies(self, name=None, stream=None, version=None, context=None,
|
||||
mmd=None, strict=False):
|
||||
def get_module_build_dependencies(
|
||||
self, name=None, stream=None, version=None, context=None, mmd=None, strict=False
|
||||
):
|
||||
"""
|
||||
Returns a dictionary of koji_tag:[mmd, ...] of all the dependencies of input module.
|
||||
|
||||
@@ -247,44 +274,57 @@ class DBResolver(GenericResolver):
|
||||
:return: a dictionary
|
||||
"""
|
||||
if mmd:
|
||||
log.debug('get_module_build_dependencies(mmd={0!r} strict={1!r})'.format(mmd, strict))
|
||||
log.debug("get_module_build_dependencies(mmd={0!r} strict={1!r})".format(mmd, strict))
|
||||
elif any(x is None for x in [name, stream, version, context]):
|
||||
raise RuntimeError('The name, stream, version, and/or context weren\'t specified')
|
||||
raise RuntimeError("The name, stream, version, and/or context weren't specified")
|
||||
else:
|
||||
version = str(version)
|
||||
log.debug('get_module_build_dependencies({0}, strict={1!r})'.format(
|
||||
', '.join([name, stream, str(version), context]), strict))
|
||||
log.debug(
|
||||
"get_module_build_dependencies({0}, strict={1!r})".format(
|
||||
", ".join([name, stream, str(version), context]), strict)
|
||||
)
|
||||
|
||||
module_tags = {}
|
||||
with models.make_session(self.config) as session:
|
||||
if mmd:
|
||||
queried_mmd = mmd
|
||||
nsvc = ':'.join([
|
||||
mmd.get_name(), mmd.get_stream(), str(mmd.get_version()),
|
||||
mmd.get_context() or models.DEFAULT_MODULE_CONTEXT])
|
||||
nsvc = ":".join([
|
||||
mmd.get_name(),
|
||||
mmd.get_stream(),
|
||||
str(mmd.get_version()),
|
||||
mmd.get_context() or models.DEFAULT_MODULE_CONTEXT,
|
||||
])
|
||||
else:
|
||||
build = models.ModuleBuild.get_build_from_nsvc(
|
||||
session, name, stream, version, context)
|
||||
if not build:
|
||||
raise UnprocessableEntity('The module {} was not found'.format(
|
||||
':'.join([name, stream, version, context])))
|
||||
raise UnprocessableEntity(
|
||||
"The module {} was not found".format(
|
||||
":".join([name, stream, version, context]))
|
||||
)
|
||||
queried_mmd = build.mmd()
|
||||
nsvc = ':'.join([name, stream, version, context])
|
||||
nsvc = ":".join([name, stream, version, context])
|
||||
|
||||
xmd_mbs = queried_mmd.get_xmd().get('mbs')
|
||||
if not xmd_mbs or 'buildrequires' not in xmd_mbs.keys():
|
||||
xmd_mbs = queried_mmd.get_xmd().get("mbs")
|
||||
if not xmd_mbs or "buildrequires" not in xmd_mbs.keys():
|
||||
raise RuntimeError(
|
||||
'The module {} did not contain its modulemd or did not have '
|
||||
'its xmd attribute filled out in MBS'.format(nsvc))
|
||||
"The module {} did not contain its modulemd or did not have "
|
||||
"its xmd attribute filled out in MBS".format(nsvc)
|
||||
)
|
||||
|
||||
buildrequires = xmd_mbs['buildrequires']
|
||||
buildrequires = xmd_mbs["buildrequires"]
|
||||
for br_name, details in buildrequires.items():
|
||||
build = models.ModuleBuild.get_build_from_nsvc(
|
||||
session, br_name, details['stream'], details['version'], details['context'],
|
||||
state=models.BUILD_STATES['ready'])
|
||||
session,
|
||||
br_name,
|
||||
details["stream"],
|
||||
details["version"],
|
||||
details["context"],
|
||||
state=models.BUILD_STATES["ready"],
|
||||
)
|
||||
if not build:
|
||||
raise RuntimeError(
|
||||
'Buildrequired module %s %r does not exist in MBS db' % (br_name, details))
|
||||
"Buildrequired module %s %r does not exist in MBS db" % (br_name, details))
|
||||
|
||||
# If the buildrequire is a meta-data only module with no Koji tag set, then just
|
||||
# skip it
|
||||
@@ -325,11 +365,11 @@ class DBResolver(GenericResolver):
|
||||
if local_modules:
|
||||
local_build = local_modules[0]
|
||||
new_requires[module_name] = {
|
||||
'ref': None,
|
||||
'stream': local_build.stream,
|
||||
'version': local_build.version,
|
||||
'context': local_build.context,
|
||||
'koji_tag': local_build.koji_tag,
|
||||
"ref": None,
|
||||
"stream": local_build.stream,
|
||||
"version": local_build.version,
|
||||
"context": local_build.context,
|
||||
"koji_tag": local_build.koji_tag,
|
||||
}
|
||||
continue
|
||||
|
||||
@@ -341,29 +381,31 @@ class DBResolver(GenericResolver):
|
||||
session, module_name, module_stream, module_version, module_context)
|
||||
|
||||
if not build:
|
||||
raise UnprocessableEntity('The module {} was not found'.format(nsvc))
|
||||
raise UnprocessableEntity("The module {} was not found".format(nsvc))
|
||||
|
||||
commit_hash = None
|
||||
mmd = build.mmd()
|
||||
mbs_xmd = mmd.get_xmd().get('mbs')
|
||||
if mbs_xmd and 'commit' in mbs_xmd.keys():
|
||||
commit_hash = mbs_xmd['commit']
|
||||
mbs_xmd = mmd.get_xmd().get("mbs")
|
||||
if mbs_xmd and "commit" in mbs_xmd.keys():
|
||||
commit_hash = mbs_xmd["commit"]
|
||||
else:
|
||||
raise RuntimeError(
|
||||
'The module "{0}" didn\'t contain a commit hash in its xmd'
|
||||
.format(module_name))
|
||||
'The module "{0}" didn\'t contain a commit hash in its xmd'.format(
|
||||
module_name)
|
||||
)
|
||||
|
||||
if "mse" not in mbs_xmd.keys() or not mbs_xmd["mse"]:
|
||||
raise RuntimeError(
|
||||
'The module "{}" is not built using Module Stream Expansion. '
|
||||
'Please rebuild this module first'.format(nsvc))
|
||||
"Please rebuild this module first".format(nsvc)
|
||||
)
|
||||
|
||||
new_requires[module_name] = {
|
||||
'ref': commit_hash,
|
||||
'stream': module_stream,
|
||||
'version': build.version,
|
||||
'context': build.context,
|
||||
'koji_tag': build.koji_tag,
|
||||
"ref": commit_hash,
|
||||
"stream": module_stream,
|
||||
"version": build.version,
|
||||
"context": build.context,
|
||||
"koji_tag": build.koji_tag,
|
||||
}
|
||||
|
||||
return new_requires
|
||||
|
||||
@@ -31,7 +31,8 @@ class LocalResolver(DBResolver):
|
||||
It is subclass of DBResolver with small changes to DBResolver logic to fit
|
||||
the offline local module builds. See particular methods for more information.
|
||||
"""
|
||||
backend = 'local'
|
||||
|
||||
backend = "local"
|
||||
|
||||
def get_buildrequired_modulemds(self, name, stream, base_module_nsvc):
|
||||
"""
|
||||
|
||||
@@ -64,7 +64,7 @@ class MBSResolver(GenericResolver):
|
||||
"stream": stream,
|
||||
"state": state,
|
||||
"verbose": True,
|
||||
"order_desc_by": "version"
|
||||
"order_desc_by": "version",
|
||||
}
|
||||
if version is not None:
|
||||
query["version"] = str(version)
|
||||
@@ -72,8 +72,9 @@ class MBSResolver(GenericResolver):
|
||||
query["context"] = context
|
||||
return query
|
||||
|
||||
def _get_modules(self, name, stream, version=None, context=None, state="ready", strict=False,
|
||||
**kwargs):
|
||||
def _get_modules(
|
||||
self, name, stream, version=None, context=None, state="ready", strict=False, **kwargs
|
||||
):
|
||||
"""Query and return modules from MBS with specific info
|
||||
|
||||
:param str name: module's name.
|
||||
@@ -133,11 +134,7 @@ class MBSResolver(GenericResolver):
|
||||
:return: the number of modules that match the provided filter
|
||||
:rtype: int
|
||||
"""
|
||||
query = {
|
||||
"page": 1,
|
||||
"per_page": 1,
|
||||
"short": True,
|
||||
}
|
||||
query = {"page": 1, "per_page": 1, "short": True}
|
||||
query.update(kwargs)
|
||||
res = self.session.get(self.mbs_prod_url, params=query)
|
||||
if not res.ok:
|
||||
@@ -171,8 +168,16 @@ class MBSResolver(GenericResolver):
|
||||
if data["items"]:
|
||||
return load_mmd(data["items"][0]["modulemd"])
|
||||
|
||||
def get_module_modulemds(self, name, stream, version=None, context=None, strict=False,
|
||||
stream_version_lte=False, virtual_streams=None):
|
||||
def get_module_modulemds(
|
||||
self,
|
||||
name,
|
||||
stream,
|
||||
version=None,
|
||||
context=None,
|
||||
strict=False,
|
||||
stream_version_lte=False,
|
||||
virtual_streams=None,
|
||||
):
|
||||
"""
|
||||
Gets the module modulemds from the resolver.
|
||||
:param name: a string of the module's name
|
||||
@@ -197,8 +202,9 @@ class MBSResolver(GenericResolver):
|
||||
return [m.mmd() for m in local_modules]
|
||||
|
||||
extra_args = {}
|
||||
if (stream_version_lte and len(str(models.ModuleBuild.get_stream_version(
|
||||
stream, right_pad=False))) >= 5):
|
||||
if stream_version_lte and (
|
||||
len(str(models.ModuleBuild.get_stream_version(stream, right_pad=False))) >= 5
|
||||
):
|
||||
stream_version = models.ModuleBuild.get_stream_version(stream)
|
||||
extra_args["stream_version_lte"] = stream_version
|
||||
|
||||
@@ -212,7 +218,7 @@ class MBSResolver(GenericResolver):
|
||||
mmds = []
|
||||
for module in modules:
|
||||
if module:
|
||||
yaml = module['modulemd']
|
||||
yaml = module["modulemd"]
|
||||
|
||||
if not yaml:
|
||||
if strict:
|
||||
@@ -236,9 +242,8 @@ class MBSResolver(GenericResolver):
|
||||
:rtype: list
|
||||
:return: List of modulemd metadata.
|
||||
"""
|
||||
modules = self._get_modules(name, stream, strict=False,
|
||||
base_module_br=base_module_nsvc)
|
||||
return [load_mmd(module['modulemd']) for module in modules]
|
||||
modules = self._get_modules(name, stream, strict=False, base_module_br=base_module_nsvc)
|
||||
return [load_mmd(module["modulemd"]) for module in modules]
|
||||
|
||||
def resolve_profiles(self, mmd, keys):
|
||||
"""
|
||||
@@ -258,13 +263,12 @@ class MBSResolver(GenericResolver):
|
||||
results = {}
|
||||
for key in keys:
|
||||
results[key] = set()
|
||||
for module_name, module_info in mmd.get_xmd()['mbs']['buildrequires'].items():
|
||||
for module_name, module_info in mmd.get_xmd()["mbs"]["buildrequires"].items():
|
||||
local_modules = models.ModuleBuild.local_modules(
|
||||
db.session, module_name, module_info['stream'])
|
||||
db.session, module_name, module_info["stream"])
|
||||
if local_modules:
|
||||
local_module = local_modules[0]
|
||||
log.info("Using local module %r to resolve profiles.",
|
||||
local_module)
|
||||
log.info("Using local module %r to resolve profiles.", local_module)
|
||||
dep_mmd = local_module.mmd()
|
||||
for key in keys:
|
||||
if key in dep_mmd.get_profiles().keys():
|
||||
@@ -273,11 +277,15 @@ class MBSResolver(GenericResolver):
|
||||
|
||||
# Find the dep in the built modules in MBS
|
||||
modules = self._get_modules(
|
||||
module_name, module_info['stream'], module_info['version'],
|
||||
module_info['context'], strict=True)
|
||||
module_name,
|
||||
module_info["stream"],
|
||||
module_info["version"],
|
||||
module_info["context"],
|
||||
strict=True,
|
||||
)
|
||||
|
||||
for module in modules:
|
||||
yaml = module['modulemd']
|
||||
yaml = module["modulemd"]
|
||||
dep_mmd = load_mmd(yaml)
|
||||
# Take note of what rpms are in this dep's profile.
|
||||
for key in keys:
|
||||
@@ -287,8 +295,9 @@ class MBSResolver(GenericResolver):
|
||||
# Return the union of all rpms in all profiles of the given keys.
|
||||
return results
|
||||
|
||||
def get_module_build_dependencies(self, name=None, stream=None, version=None, context=None,
|
||||
mmd=None, strict=False):
|
||||
def get_module_build_dependencies(
|
||||
self, name=None, stream=None, version=None, context=None, mmd=None, strict=False
|
||||
):
|
||||
"""
|
||||
Returns a dictionary of koji_tag:[mmd, ...] of all the dependencies of input module.
|
||||
|
||||
@@ -311,11 +320,13 @@ class MBSResolver(GenericResolver):
|
||||
if mmd:
|
||||
log.debug("get_module_build_dependencies(mmd=%r strict=%r)" % (mmd, strict))
|
||||
elif any(x is None for x in [name, stream, version, context]):
|
||||
raise RuntimeError('The name, stream, version, and/or context weren\'t specified')
|
||||
raise RuntimeError("The name, stream, version, and/or context weren't specified")
|
||||
else:
|
||||
version = str(version)
|
||||
log.debug("get_module_build_dependencies(%s, strict=%r)"
|
||||
% (', '.join([name, stream, str(version), context]), strict))
|
||||
log.debug(
|
||||
"get_module_build_dependencies(%s, strict=%r)"
|
||||
% (", ".join([name, stream, str(version), context]), strict)
|
||||
)
|
||||
|
||||
# This is the set we're going to build up and return.
|
||||
module_tags = {}
|
||||
@@ -323,22 +334,24 @@ class MBSResolver(GenericResolver):
|
||||
if mmd:
|
||||
queried_mmd = mmd
|
||||
else:
|
||||
queried_module = self._get_module(
|
||||
name, stream, version, context, strict=strict)
|
||||
yaml = queried_module['modulemd']
|
||||
queried_module = self._get_module(name, stream, version, context, strict=strict)
|
||||
yaml = queried_module["modulemd"]
|
||||
queried_mmd = load_mmd(yaml)
|
||||
|
||||
if (not queried_mmd or not queried_mmd.get_xmd().get('mbs') or
|
||||
'buildrequires' not in queried_mmd.get_xmd()['mbs'].keys()):
|
||||
if (
|
||||
not queried_mmd
|
||||
or not queried_mmd.get_xmd().get("mbs")
|
||||
or "buildrequires" not in queried_mmd.get_xmd()["mbs"].keys()
|
||||
):
|
||||
raise RuntimeError(
|
||||
'The module "{0!r}" did not contain its modulemd or did not have '
|
||||
'its xmd attribute filled out in MBS'.format(queried_mmd))
|
||||
"its xmd attribute filled out in MBS".format(queried_mmd)
|
||||
)
|
||||
|
||||
buildrequires = queried_mmd.get_xmd()['mbs']['buildrequires']
|
||||
buildrequires = queried_mmd.get_xmd()["mbs"]["buildrequires"]
|
||||
# Queue up the next tier of deps that we should look at..
|
||||
for name, details in buildrequires.items():
|
||||
local_modules = models.ModuleBuild.local_modules(
|
||||
db.session, name, details['stream'])
|
||||
local_modules = models.ModuleBuild.local_modules(db.session, name, details["stream"])
|
||||
if local_modules:
|
||||
for m in local_modules:
|
||||
# If the buildrequire is a meta-data only module with no Koji tag set, then just
|
||||
@@ -351,8 +364,7 @@ class MBSResolver(GenericResolver):
|
||||
if "context" not in details:
|
||||
details["context"] = models.DEFAULT_MODULE_CONTEXT
|
||||
modules = self._get_modules(
|
||||
name, details['stream'], details['version'],
|
||||
details['context'], strict=True)
|
||||
name, details["stream"], details["version"], details["context"], strict=True)
|
||||
for m in modules:
|
||||
if m["koji_tag"] in module_tags:
|
||||
continue
|
||||
@@ -390,21 +402,20 @@ class MBSResolver(GenericResolver):
|
||||
"Only N:S or N:S:V:C is accepted by resolve_requires, got %s" % nsvc)
|
||||
# Try to find out module dependency in the local module builds
|
||||
# added by utils.load_local_builds(...).
|
||||
local_modules = models.ModuleBuild.local_modules(
|
||||
db.session, module_name, module_stream)
|
||||
local_modules = models.ModuleBuild.local_modules(db.session, module_name, module_stream)
|
||||
if local_modules:
|
||||
local_build = local_modules[0]
|
||||
new_requires[module_name] = {
|
||||
# The commit ID isn't currently saved in modules.yaml
|
||||
'ref': None,
|
||||
'stream': local_build.stream,
|
||||
'version': local_build.version,
|
||||
'context': local_build.context,
|
||||
'koji_tag': local_build.koji_tag,
|
||||
"ref": None,
|
||||
"stream": local_build.stream,
|
||||
"version": local_build.version,
|
||||
"context": local_build.context,
|
||||
"koji_tag": local_build.koji_tag,
|
||||
# No need to set filtered_rpms for local builds, because MBS
|
||||
# filters the RPMs automatically when the module build is
|
||||
# done.
|
||||
'filtered_rpms': []
|
||||
"filtered_rpms": [],
|
||||
}
|
||||
continue
|
||||
|
||||
@@ -412,12 +423,12 @@ class MBSResolver(GenericResolver):
|
||||
version = None
|
||||
filtered_rpms = []
|
||||
module = self._get_module(
|
||||
module_name, module_stream, module_version,
|
||||
module_context, strict=True)
|
||||
if module.get('modulemd'):
|
||||
mmd = load_mmd(module['modulemd'])
|
||||
if mmd.get_xmd().get('mbs') and 'commit' in mmd.get_xmd()['mbs'].keys():
|
||||
commit_hash = mmd.get_xmd()['mbs']['commit']
|
||||
module_name, module_stream, module_version, module_context, strict=True
|
||||
)
|
||||
if module.get("modulemd"):
|
||||
mmd = load_mmd(module["modulemd"])
|
||||
if mmd.get_xmd().get("mbs") and "commit" in mmd.get_xmd()["mbs"].keys():
|
||||
commit_hash = mmd.get_xmd()["mbs"]["commit"]
|
||||
|
||||
# Find out the particular NVR of filtered packages
|
||||
if "rpms" in module and mmd.get_rpm_filter().get():
|
||||
@@ -433,22 +444,23 @@ class MBSResolver(GenericResolver):
|
||||
continue
|
||||
filtered_rpms.append(nvr)
|
||||
|
||||
if module.get('version'):
|
||||
version = module['version']
|
||||
if module.get("version"):
|
||||
version = module["version"]
|
||||
|
||||
if version and commit_hash:
|
||||
new_requires[module_name] = {
|
||||
'ref': commit_hash,
|
||||
'stream': module_stream,
|
||||
'version': str(version),
|
||||
'context': module["context"],
|
||||
'koji_tag': module['koji_tag'],
|
||||
'filtered_rpms': filtered_rpms,
|
||||
"ref": commit_hash,
|
||||
"stream": module_stream,
|
||||
"version": str(version),
|
||||
"context": module["context"],
|
||||
"koji_tag": module["koji_tag"],
|
||||
"filtered_rpms": filtered_rpms,
|
||||
}
|
||||
else:
|
||||
raise RuntimeError(
|
||||
'The module "{0}" didn\'t contain either a commit hash or a'
|
||||
' version in MBS'.format(module_name))
|
||||
" version in MBS".format(module_name)
|
||||
)
|
||||
# If the module is a base module, then import it in the database so that entries in
|
||||
# the module_builds_to_module_buildrequires table can be created later on
|
||||
if module_name in conf.base_module_names:
|
||||
@@ -457,10 +469,10 @@ class MBSResolver(GenericResolver):
|
||||
return new_requires
|
||||
|
||||
def get_modulemd_by_koji_tag(self, tag):
|
||||
resp = self.session.get(self.mbs_prod_url, params={'koji_tag': tag, 'verbose': True})
|
||||
resp = self.session.get(self.mbs_prod_url, params={"koji_tag": tag, "verbose": True})
|
||||
data = resp.json()
|
||||
if data['items']:
|
||||
modulemd = data['items'][0]['modulemd']
|
||||
if data["items"]:
|
||||
modulemd = data["items"][0]["modulemd"]
|
||||
return load_mmd(modulemd)
|
||||
else:
|
||||
return None
|
||||
|
||||
@@ -28,7 +28,7 @@ from module_build_service.resolver.base import GenericResolver
|
||||
|
||||
# NOTE: if you are adding a new resolver to MBS please note that you also have to add
|
||||
# a new resolver to your setup.py and update you egg-info
|
||||
for entrypoint in pkg_resources.iter_entry_points('mbs.resolver_backends'):
|
||||
for entrypoint in pkg_resources.iter_entry_points("mbs.resolver_backends"):
|
||||
GenericResolver.register_backend_class(entrypoint.load())
|
||||
|
||||
if not GenericResolver.backends:
|
||||
|
||||
@@ -78,13 +78,13 @@ class GenericResolver(six.with_metaclass(ABCMeta)):
|
||||
@classmethod
|
||||
def supported_builders(cls):
|
||||
if cls is GenericResolver:
|
||||
return {k: v['builders'] for k, v in cls._resolvers.items()}
|
||||
return {k: v["builders"] for k, v in cls._resolvers.items()}
|
||||
else:
|
||||
try:
|
||||
return cls._resolvers[cls.backend]['builders']
|
||||
return cls._resolvers[cls.backend]["builders"]
|
||||
except KeyError:
|
||||
raise RuntimeError("No configuration of builder backends found "
|
||||
"for resolver {}".format(cls))
|
||||
raise RuntimeError(
|
||||
"No configuration of builder backends found for resolver {}".format(cls))
|
||||
|
||||
@classmethod
|
||||
def is_builder_compatible(cls, builder):
|
||||
@@ -100,9 +100,12 @@ class GenericResolver(six.with_metaclass(ABCMeta)):
|
||||
|
||||
@staticmethod
|
||||
def extract_modulemd(yaml, strict=False):
|
||||
log.warning('GenericResolver.extract_modulemd is deprecated. Please call '
|
||||
'module_build_service.utils.load_mmd in new code.')
|
||||
log.warning(
|
||||
"GenericResolver.extract_modulemd is deprecated. Please call "
|
||||
"module_build_service.utils.load_mmd in new code."
|
||||
)
|
||||
from module_build_service.utils import load_mmd
|
||||
|
||||
return load_mmd(yaml)
|
||||
|
||||
@abstractmethod
|
||||
@@ -114,8 +117,16 @@ class GenericResolver(six.with_metaclass(ABCMeta)):
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractmethod
|
||||
def get_module_modulemds(self, name, stream, version=None, context=None, strict=False,
|
||||
stream_version_lte=None, virtual_streams=None):
|
||||
def get_module_modulemds(
|
||||
self,
|
||||
name,
|
||||
stream,
|
||||
version=None,
|
||||
context=None,
|
||||
strict=False,
|
||||
stream_version_lte=None,
|
||||
virtual_streams=None,
|
||||
):
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractmethod
|
||||
@@ -127,8 +138,9 @@ class GenericResolver(six.with_metaclass(ABCMeta)):
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractmethod
|
||||
def get_module_build_dependencies(self, name=None, stream=None, version=None, mmd=None,
|
||||
context=None, strict=False):
|
||||
def get_module_build_dependencies(
|
||||
self, name=None, stream=None, version=None, mmd=None, context=None, strict=False
|
||||
):
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractmethod
|
||||
|
||||
@@ -7,6 +7,7 @@ import module_build_service.models
|
||||
import module_build_service.scheduler.consumer
|
||||
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -17,15 +18,15 @@ def main(initial_messages, stop_condition):
|
||||
"""
|
||||
|
||||
config = fedmsg.config.load_config()
|
||||
config['mbsconsumer'] = True
|
||||
config['mbsconsumer.stop_condition'] = stop_condition
|
||||
config['mbsconsumer.initial_messages'] = initial_messages
|
||||
config["mbsconsumer"] = True
|
||||
config["mbsconsumer.stop_condition"] = stop_condition
|
||||
config["mbsconsumer.initial_messages"] = initial_messages
|
||||
|
||||
# Moksha requires that we subscribe to *something*, so tell it /dev/null
|
||||
# since we'll just be doing in-memory queue-based messaging for this single
|
||||
# build.
|
||||
config['zmq_enabled'] = True
|
||||
config['zmq_subscribe_endpoints'] = 'ipc:///dev/null'
|
||||
config["zmq_enabled"] = True
|
||||
config["zmq_subscribe_endpoints"] = "ipc:///dev/null"
|
||||
|
||||
consumers = [module_build_service.scheduler.consumer.MBSConsumer]
|
||||
|
||||
@@ -56,9 +57,11 @@ def make_simple_stop_condition(session):
|
||||
# XXX - We ignore the message here and instead just query the DB.
|
||||
|
||||
# Grab the latest module build.
|
||||
module = session.query(module_build_service.models.ModuleBuild)\
|
||||
.order_by(module_build_service.models.ModuleBuild.id.desc())\
|
||||
module = (
|
||||
session.query(module_build_service.models.ModuleBuild)
|
||||
.order_by(module_build_service.models.ModuleBuild.id.desc())
|
||||
.first()
|
||||
)
|
||||
done = (
|
||||
module_build_service.models.BUILD_STATES["failed"],
|
||||
module_build_service.models.BUILD_STATES["ready"],
|
||||
|
||||
@@ -58,7 +58,8 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer):
|
||||
""" This is triggered by running fedmsg-hub. This class is responsible for
|
||||
ingesting and processing messages from the message bus.
|
||||
"""
|
||||
config_key = 'mbsconsumer'
|
||||
|
||||
config_key = "mbsconsumer"
|
||||
|
||||
# It is set to the id of currently handled module build. It is used to
|
||||
# group all the log messages associated with single module build to
|
||||
@@ -70,15 +71,15 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer):
|
||||
|
||||
backends = module_build_service.messaging._messaging_backends
|
||||
prefixes = conf.messaging_topic_prefix # This is a list.
|
||||
services = backends[conf.messaging]['services']
|
||||
suffix = backends[conf.messaging]['topic_suffix']
|
||||
services = backends[conf.messaging]["services"]
|
||||
suffix = backends[conf.messaging]["topic_suffix"]
|
||||
self.topic = [
|
||||
'{}.{}{}'.format(prefix.rstrip('.'), category, suffix)
|
||||
"{}.{}{}".format(prefix.rstrip("."), category, suffix)
|
||||
for prefix, category in itertools.product(prefixes, services)
|
||||
]
|
||||
if not self.topic:
|
||||
self.topic = '*'
|
||||
log.debug('Setting topics: {}'.format(', '.join(self.topic)))
|
||||
self.topic = "*"
|
||||
log.debug("Setting topics: {}".format(", ".join(self.topic)))
|
||||
|
||||
# The call to `super` takes action based on the setting of topics above
|
||||
super(MBSConsumer, self).__init__(hub)
|
||||
@@ -86,13 +87,13 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer):
|
||||
# Our call to `super` above should have initialized an `incoming` queue
|
||||
# for us.. but in certain test situations, it does not. So here,
|
||||
# establish a fake `incoming` queue.
|
||||
if not hasattr(self, 'incoming'):
|
||||
if not hasattr(self, "incoming"):
|
||||
self.incoming = queue.Queue()
|
||||
|
||||
# These two values are typically provided either by the unit tests or
|
||||
# by the local build command. They are empty in the production environ
|
||||
self.stop_condition = hub.config.get('mbsconsumer.stop_condition')
|
||||
initial_messages = hub.config.get('mbsconsumer.initial_messages', [])
|
||||
self.stop_condition = hub.config.get("mbsconsumer.stop_condition")
|
||||
initial_messages = hub.config.get("mbsconsumer.initial_messages", [])
|
||||
for msg in initial_messages:
|
||||
self.incoming.put(msg)
|
||||
|
||||
@@ -108,26 +109,23 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer):
|
||||
self.on_build_change = {
|
||||
koji.BUILD_STATES["BUILDING"]: NO_OP,
|
||||
koji.BUILD_STATES[
|
||||
"COMPLETE"]: module_build_service.scheduler.handlers.components.complete,
|
||||
"COMPLETE"
|
||||
]: module_build_service.scheduler.handlers.components.complete,
|
||||
koji.BUILD_STATES["FAILED"]: module_build_service.scheduler.handlers.components.failed,
|
||||
koji.BUILD_STATES[
|
||||
"FAILED"]: module_build_service.scheduler.handlers.components.failed,
|
||||
koji.BUILD_STATES[
|
||||
"CANCELED"]: module_build_service.scheduler.handlers.components.canceled,
|
||||
"CANCELED"
|
||||
]: module_build_service.scheduler.handlers.components.canceled,
|
||||
koji.BUILD_STATES["DELETED"]: NO_OP,
|
||||
}
|
||||
self.on_module_change = {
|
||||
models.BUILD_STATES[
|
||||
"init"]: module_build_service.scheduler.handlers.modules.init,
|
||||
models.BUILD_STATES[
|
||||
"wait"]: module_build_service.scheduler.handlers.modules.wait,
|
||||
models.BUILD_STATES["init"]: module_build_service.scheduler.handlers.modules.init,
|
||||
models.BUILD_STATES["wait"]: module_build_service.scheduler.handlers.modules.wait,
|
||||
models.BUILD_STATES["build"]: NO_OP,
|
||||
models.BUILD_STATES[
|
||||
"failed"]: module_build_service.scheduler.handlers.modules.failed,
|
||||
models.BUILD_STATES[
|
||||
"done"]: module_build_service.scheduler.handlers.modules.done,
|
||||
models.BUILD_STATES["failed"]: module_build_service.scheduler.handlers.modules.failed,
|
||||
models.BUILD_STATES["done"]: module_build_service.scheduler.handlers.modules.done,
|
||||
# XXX: DIRECT TRANSITION TO READY
|
||||
models.BUILD_STATES["ready"]: NO_OP,
|
||||
models.BUILD_STATES["garbage"]: NO_OP
|
||||
models.BUILD_STATES["garbage"]: NO_OP,
|
||||
}
|
||||
# Only one kind of repo change event, though...
|
||||
self.on_repo_change = module_build_service.scheduler.handlers.repos.done
|
||||
@@ -138,11 +136,12 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer):
|
||||
def shutdown(self):
|
||||
log.info("Scheduling shutdown.")
|
||||
from moksha.hub.reactor import reactor
|
||||
|
||||
reactor.callFromThread(self.hub.stop)
|
||||
reactor.callFromThread(reactor.stop)
|
||||
|
||||
def validate(self, message):
|
||||
if conf.messaging == 'fedmsg':
|
||||
if conf.messaging == "fedmsg":
|
||||
# If this is a faked internal message, don't bother.
|
||||
if isinstance(message, module_build_service.messaging.BaseMessage):
|
||||
log.info("Skipping crypto validation for %r" % message)
|
||||
@@ -171,7 +170,7 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer):
|
||||
monitor.messaging_rx_processed_ok_counter.inc()
|
||||
except sqlalchemy.exc.OperationalError as error:
|
||||
monitor.messaging_rx_failed_counter.inc()
|
||||
if 'could not translate host name' in str(error):
|
||||
if "could not translate host name" in str(error):
|
||||
log.exception(
|
||||
"SQLAlchemy can't resolve DNS records. Scheduling fedmsg-hub to shutdown.")
|
||||
self.shutdown()
|
||||
@@ -179,21 +178,20 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer):
|
||||
raise
|
||||
except Exception:
|
||||
monitor.messaging_rx_failed_counter.inc()
|
||||
log.exception('Failed while handling {0!r}'.format(msg))
|
||||
log.exception("Failed while handling {0!r}".format(msg))
|
||||
|
||||
if self.stop_condition and self.stop_condition(message):
|
||||
self.shutdown()
|
||||
|
||||
def get_abstracted_msg(self, message):
|
||||
parser = module_build_service.messaging._messaging_backends[conf.messaging].get('parser')
|
||||
parser = module_build_service.messaging._messaging_backends[conf.messaging].get("parser")
|
||||
if parser:
|
||||
try:
|
||||
return parser.parse(message)
|
||||
except module_build_service.messaging.IgnoreMessage:
|
||||
pass
|
||||
else:
|
||||
raise ValueError('{0} backend does not define a message parser'
|
||||
.format(conf.messaging))
|
||||
raise ValueError("{0} backend does not define a message parser".format(conf.messaging))
|
||||
|
||||
def sanity_check(self):
|
||||
""" On startup, make sure our implementation is sane. """
|
||||
@@ -205,17 +203,16 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer):
|
||||
if koji.BUILD_STATES[state] not in self.on_build_change:
|
||||
raise KeyError("Koji build states %r not handled." % state)
|
||||
|
||||
all_fns = (list(self.on_build_change.items()) +
|
||||
list(self.on_module_change.items()))
|
||||
all_fns = list(self.on_build_change.items()) + list(self.on_module_change.items())
|
||||
for key, callback in all_fns:
|
||||
expected = ['config', 'session', 'msg']
|
||||
expected = ["config", "session", "msg"]
|
||||
if six.PY2:
|
||||
argspec = inspect.getargspec(callback)[0]
|
||||
else:
|
||||
argspec = inspect.getfullargspec(callback)[0]
|
||||
if argspec != expected:
|
||||
raise ValueError("Callback %r, state %r has argspec %r!=%r" % (
|
||||
callback, key, argspec, expected))
|
||||
raise ValueError(
|
||||
"Callback %r, state %r has argspec %r!=%r" % (callback, key, argspec, expected))
|
||||
|
||||
def process_message(self, session, msg):
|
||||
# set module build to None and let's populate it later
|
||||
@@ -258,13 +255,17 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer):
|
||||
try:
|
||||
further_work = handler(conf, session, msg) or []
|
||||
except Exception as e:
|
||||
msg = 'Could not process message handler. See the traceback.'
|
||||
msg = "Could not process message handler. See the traceback."
|
||||
log.exception(msg)
|
||||
session.rollback()
|
||||
if build:
|
||||
session.refresh(build)
|
||||
build.transition(conf, state=models.BUILD_STATES['failed'],
|
||||
state_reason=str(e), failure_type='infra')
|
||||
build.transition(
|
||||
conf,
|
||||
state=models.BUILD_STATES["failed"],
|
||||
state_reason=str(e),
|
||||
failure_type="infra",
|
||||
)
|
||||
session.commit()
|
||||
|
||||
log.debug("Done with %s" % idx)
|
||||
@@ -303,7 +304,5 @@ def work_queue_put(msg):
|
||||
|
||||
def fake_repo_done_message(tag_name):
|
||||
msg = module_build_service.messaging.KojiRepoChange(
|
||||
msg_id='a faked internal message',
|
||||
repo_tag=tag_name + "-build",
|
||||
)
|
||||
msg_id="a faked internal message", repo_tag=tag_name + "-build")
|
||||
work_queue_put(msg)
|
||||
|
||||
@@ -40,8 +40,7 @@ def _finalize(config, session, msg, state):
|
||||
# First, find our ModuleBuild associated with this component, if any.
|
||||
component_build = models.ComponentBuild.from_component_event(session, msg)
|
||||
try:
|
||||
nvr = "{}-{}-{}".format(msg.build_name, msg.build_version,
|
||||
msg.build_release)
|
||||
nvr = "{}-{}-{}".format(msg.build_name, msg.build_version, msg.build_release)
|
||||
except KeyError:
|
||||
nvr = None
|
||||
|
||||
@@ -53,7 +52,7 @@ def _finalize(config, session, msg, state):
|
||||
|
||||
if msg.state_reason:
|
||||
state_reason = msg.state_reason
|
||||
elif state != koji.BUILD_STATES['COMPLETE']:
|
||||
elif state != koji.BUILD_STATES["COMPLETE"]:
|
||||
state_reason = "Failed to build artifact %s in Koji" % (msg.build_name)
|
||||
else:
|
||||
state_reason = ""
|
||||
@@ -67,10 +66,13 @@ def _finalize(config, session, msg, state):
|
||||
parent = component_build.module_build
|
||||
|
||||
# If the macro build failed, then the module is doomed.
|
||||
if (component_build.package == 'module-build-macros' and
|
||||
state != koji.BUILD_STATES['COMPLETE']):
|
||||
parent.transition(config, state=models.BUILD_STATES['failed'],
|
||||
state_reason=state_reason, failure_type='user')
|
||||
if component_build.package == "module-build-macros" and state != koji.BUILD_STATES["COMPLETE"]:
|
||||
parent.transition(
|
||||
config,
|
||||
state=models.BUILD_STATES["failed"],
|
||||
state_reason=state_reason,
|
||||
failure_type="user",
|
||||
)
|
||||
session.commit()
|
||||
return
|
||||
|
||||
@@ -80,31 +82,36 @@ def _finalize(config, session, msg, state):
|
||||
# we can tag all successfully built components in the batch.
|
||||
unbuilt_components_in_batch = [
|
||||
c for c in parent.current_batch()
|
||||
if c.state == koji.BUILD_STATES['BUILDING'] or not c.state
|
||||
if c.state == koji.BUILD_STATES["BUILDING"] or not c.state
|
||||
]
|
||||
if not unbuilt_components_in_batch:
|
||||
failed_components_in_batch = [
|
||||
c for c in parent.current_batch()
|
||||
if (c.state in [koji.BUILD_STATES['FAILED'],
|
||||
koji.BUILD_STATES['CANCELED']])
|
||||
if (c.state in [koji.BUILD_STATES["FAILED"], koji.BUILD_STATES["CANCELED"]])
|
||||
]
|
||||
|
||||
built_components_in_batch = [
|
||||
c for c in parent.current_batch()
|
||||
if c.state == koji.BUILD_STATES['COMPLETE']
|
||||
if c.state == koji.BUILD_STATES["COMPLETE"]
|
||||
]
|
||||
|
||||
builder = module_build_service.builder.GenericBuilder.create_from_module(
|
||||
session, parent, config)
|
||||
session, parent, config
|
||||
)
|
||||
|
||||
if failed_components_in_batch:
|
||||
log.info("Batch done, but not tagging because of failed component builds. Will "
|
||||
"transition the module to \"failed\"")
|
||||
state_reason = 'Component(s) {} failed to build.'.format(
|
||||
', '.join(c.package for c in failed_components_in_batch))
|
||||
parent.transition(config,
|
||||
state=models.BUILD_STATES['failed'],
|
||||
state_reason=state_reason, failure_type='user')
|
||||
log.info(
|
||||
"Batch done, but not tagging because of failed component builds. Will "
|
||||
'transition the module to "failed"'
|
||||
)
|
||||
state_reason = "Component(s) {} failed to build.".format(
|
||||
", ".join(c.package for c in failed_components_in_batch))
|
||||
parent.transition(
|
||||
config,
|
||||
state=models.BUILD_STATES["failed"],
|
||||
state_reason=state_reason,
|
||||
failure_type="user",
|
||||
)
|
||||
session.commit()
|
||||
return []
|
||||
elif not built_components_in_batch:
|
||||
@@ -112,14 +119,17 @@ def _finalize(config, session, msg, state):
|
||||
# The repository won't be regenerated in this case and therefore we generate fake repo
|
||||
# change message here.
|
||||
log.info("Batch done. No component to tag")
|
||||
further_work += [messaging.KojiRepoChange(
|
||||
'components::_finalize: fake msg',
|
||||
builder.module_build_tag['name'])]
|
||||
further_work += [
|
||||
messaging.KojiRepoChange(
|
||||
"components::_finalize: fake msg", builder.module_build_tag["name"])
|
||||
]
|
||||
else:
|
||||
built_component_nvrs_in_batch = [c.nvr for c in built_components_in_batch]
|
||||
# tag && add to srpm-build group if neccessary
|
||||
log.info("Batch done. Tagging %i component(s) in the build tag." % len(
|
||||
built_component_nvrs_in_batch))
|
||||
log.info(
|
||||
"Batch done. Tagging %i component(s) in the build tag."
|
||||
% len(built_component_nvrs_in_batch)
|
||||
)
|
||||
log.debug("%r" % built_component_nvrs_in_batch)
|
||||
# TODO: install=component_build.build_time_only works here because module-build-macros
|
||||
# is alone in its batch and the only component with build_time_only set. All other
|
||||
@@ -129,16 +139,17 @@ def _finalize(config, session, msg, state):
|
||||
built_component_nvrs_in_batch, install=component_build.build_time_only)
|
||||
|
||||
# Do not tag packages which only belong to the build tag to the dest tag
|
||||
component_nvrs_to_tag_in_dest = [c.nvr for c in built_components_in_batch
|
||||
if c.build_time_only is False]
|
||||
log.info("Tagging %i component(s) in the dest tag." % len(
|
||||
component_nvrs_to_tag_in_dest))
|
||||
component_nvrs_to_tag_in_dest = [
|
||||
c.nvr for c in built_components_in_batch
|
||||
if c.build_time_only is False
|
||||
]
|
||||
log.info(
|
||||
"Tagging %i component(s) in the dest tag." % len(component_nvrs_to_tag_in_dest))
|
||||
if component_nvrs_to_tag_in_dest:
|
||||
builder.tag_artifacts(component_nvrs_to_tag_in_dest)
|
||||
|
||||
session.commit()
|
||||
elif (any([c.state != koji.BUILD_STATES['BUILDING']
|
||||
for c in unbuilt_components_in_batch])):
|
||||
elif any([c.state != koji.BUILD_STATES["BUILDING"] for c in unbuilt_components_in_batch]):
|
||||
# We are not in the middle of the batch building and
|
||||
# we have some unbuilt components in this batch. We might hit the
|
||||
# concurrent builds threshold in previous call of continue_batch_build
|
||||
@@ -153,12 +164,12 @@ def _finalize(config, session, msg, state):
|
||||
|
||||
|
||||
def complete(config, session, msg):
|
||||
return _finalize(config, session, msg, state=koji.BUILD_STATES['COMPLETE'])
|
||||
return _finalize(config, session, msg, state=koji.BUILD_STATES["COMPLETE"])
|
||||
|
||||
|
||||
def failed(config, session, msg):
|
||||
return _finalize(config, session, msg, state=koji.BUILD_STATES['FAILED'])
|
||||
return _finalize(config, session, msg, state=koji.BUILD_STATES["FAILED"])
|
||||
|
||||
|
||||
def canceled(config, session, msg):
|
||||
return _finalize(config, session, msg, state=koji.BUILD_STATES['CANCELED'])
|
||||
return _finalize(config, session, msg, state=koji.BUILD_STATES["CANCELED"])
|
||||
|
||||
@@ -42,8 +42,7 @@ def get_corresponding_module_build(nvr):
|
||||
return None
|
||||
|
||||
try:
|
||||
module_build_id = build_info['extra']['typeinfo']['module'][
|
||||
'module_build_service_id']
|
||||
module_build_id = build_info["extra"]["typeinfo"]["module"]["module_build_service_id"]
|
||||
except KeyError:
|
||||
# If any of the keys is not present, the NVR is not the one for
|
||||
# handling Greenwave event.
|
||||
@@ -64,37 +63,50 @@ def decision_update(config, session, msg):
|
||||
:type msg: :class:`GreenwaveDecisionUpdate`
|
||||
"""
|
||||
if not config.greenwave_decision_context:
|
||||
log.debug('Skip Greenwave message %s as MBS does not have GREENWAVE_DECISION_CONTEXT '
|
||||
'configured', msg.msg_id)
|
||||
log.debug(
|
||||
"Skip Greenwave message %s as MBS does not have GREENWAVE_DECISION_CONTEXT "
|
||||
"configured",
|
||||
msg.msg_id,
|
||||
)
|
||||
return
|
||||
|
||||
if msg.decision_context != config.greenwave_decision_context:
|
||||
log.debug('Skip Greenwave message %s as MBS only handles messages with the '
|
||||
'decision context "%s"',
|
||||
msg.msg_id, config.greenwave_decision_context)
|
||||
log.debug(
|
||||
"Skip Greenwave message %s as MBS only handles messages with the "
|
||||
'decision context "%s"',
|
||||
msg.msg_id,
|
||||
config.greenwave_decision_context,
|
||||
)
|
||||
return
|
||||
|
||||
module_build_nvr = msg.subject_identifier
|
||||
|
||||
if not msg.policies_satisfied:
|
||||
log.debug('Skip to handle module build %s because it has not satisfied'
|
||||
' Greenwave policies.',
|
||||
module_build_nvr)
|
||||
log.debug(
|
||||
"Skip to handle module build %s because it has not satisfied Greenwave policies.",
|
||||
module_build_nvr,
|
||||
)
|
||||
return
|
||||
|
||||
build = get_corresponding_module_build(module_build_nvr)
|
||||
|
||||
if build is None:
|
||||
log.debug('No corresponding module build of subject_identifier %s is '
|
||||
'found.', module_build_nvr)
|
||||
log.debug(
|
||||
"No corresponding module build of subject_identifier %s is found.", module_build_nvr)
|
||||
return
|
||||
|
||||
if build.state == BUILD_STATES['done']:
|
||||
if build.state == BUILD_STATES["done"]:
|
||||
build.transition(
|
||||
conf, BUILD_STATES['ready'],
|
||||
state_reason='Module build {} has satisfied Greenwave policies.'
|
||||
.format(module_build_nvr))
|
||||
conf,
|
||||
BUILD_STATES["ready"],
|
||||
state_reason="Module build {} has satisfied Greenwave policies.".format(
|
||||
module_build_nvr
|
||||
),
|
||||
)
|
||||
else:
|
||||
log.warning('Module build %s is not in done state but Greenwave tells '
|
||||
'it passes tests in decision context %s',
|
||||
module_build_nvr, msg.decision_context)
|
||||
log.warning(
|
||||
"Module build %s is not in done state but Greenwave tells "
|
||||
"it passes tests in decision context %s",
|
||||
module_build_nvr,
|
||||
msg.decision_context,
|
||||
)
|
||||
|
||||
@@ -33,7 +33,8 @@ from module_build_service.utils import (
|
||||
record_component_builds,
|
||||
get_rpm_release,
|
||||
generate_koji_tag,
|
||||
record_filtered_rpms)
|
||||
record_filtered_rpms,
|
||||
)
|
||||
from module_build_service.errors import UnprocessableEntity, Forbidden, ValidationError
|
||||
from module_build_service.utils.ursine import handle_stream_collision_modules
|
||||
|
||||
@@ -64,17 +65,17 @@ def failed(config, session, msg):
|
||||
build = models.ModuleBuild.from_module_event(session, msg)
|
||||
|
||||
module_info = build.json()
|
||||
if module_info['state'] != msg.module_build_state:
|
||||
if module_info["state"] != msg.module_build_state:
|
||||
log.warning(
|
||||
"Note that retrieved module state %r doesn't match message module"
|
||||
" state %r", module_info['state'], msg.module_build_state)
|
||||
"Note that retrieved module state %r doesn't match message module state %r",
|
||||
module_info["state"], msg.module_build_state,
|
||||
)
|
||||
# This is ok.. it's a race condition we can ignore.
|
||||
pass
|
||||
|
||||
unbuilt_components = [
|
||||
c for c in build.component_builds
|
||||
if (c.state != koji.BUILD_STATES['COMPLETE'] and
|
||||
c.state != koji.BUILD_STATES["FAILED"])
|
||||
if (c.state != koji.BUILD_STATES["COMPLETE"] and c.state != koji.BUILD_STATES["FAILED"])
|
||||
]
|
||||
|
||||
if build.koji_tag:
|
||||
@@ -87,7 +88,7 @@ def failed(config, session, msg):
|
||||
for component in unbuilt_components:
|
||||
if component.task_id:
|
||||
builder.cancel_build(component.task_id)
|
||||
component.state = koji.BUILD_STATES['FAILED']
|
||||
component.state = koji.BUILD_STATES["FAILED"]
|
||||
component.state_reason = build.state_reason
|
||||
session.add(component)
|
||||
|
||||
@@ -98,13 +99,13 @@ def failed(config, session, msg):
|
||||
if not build.state_reason:
|
||||
reason = "Missing koji tag. Assuming previously failed module lookup."
|
||||
log.error(reason)
|
||||
build.transition(config, state="failed", state_reason=reason, failure_type='infra')
|
||||
build.transition(config, state="failed", state_reason=reason, failure_type="infra")
|
||||
session.commit()
|
||||
return
|
||||
|
||||
# Don't transition it again if it's already been transitioned
|
||||
if build.state != models.BUILD_STATES["failed"]:
|
||||
build.transition(config, state="failed", failure_type='user')
|
||||
build.transition(config, state="failed", failure_type="user")
|
||||
|
||||
session.commit()
|
||||
|
||||
@@ -122,10 +123,11 @@ def done(config, session, msg):
|
||||
"""
|
||||
build = models.ModuleBuild.from_module_event(session, msg)
|
||||
module_info = build.json()
|
||||
if module_info['state'] != msg.module_build_state:
|
||||
if module_info["state"] != msg.module_build_state:
|
||||
log.warning(
|
||||
"Note that retrieved module state %r doesn't match message module"
|
||||
" state %r", module_info['state'], msg.module_build_state)
|
||||
"Note that retrieved module state %r doesn't match message module state %r",
|
||||
module_info["state"], msg.module_build_state,
|
||||
)
|
||||
# This is ok.. it's a race condition we can ignore.
|
||||
pass
|
||||
|
||||
@@ -148,13 +150,13 @@ def init(config, session, msg):
|
||||
break
|
||||
time.sleep(1)
|
||||
|
||||
error_msg = ''
|
||||
failure_reason = 'unspec'
|
||||
error_msg = ""
|
||||
failure_reason = "unspec"
|
||||
try:
|
||||
mmd = build.mmd()
|
||||
record_component_builds(mmd, build, session=session)
|
||||
# The ursine.handle_stream_collision_modules is Koji specific.
|
||||
if conf.system in ['koji', 'test']:
|
||||
if conf.system in ["koji", "test"]:
|
||||
handle_stream_collision_modules(mmd)
|
||||
mmd = record_filtered_rpms(mmd)
|
||||
build.modulemd = to_text_type(mmd.dumps())
|
||||
@@ -163,15 +165,15 @@ def init(config, session, msg):
|
||||
except (UnprocessableEntity, Forbidden, ValidationError, RuntimeError) as e:
|
||||
log.exception(str(e))
|
||||
error_msg = str(e)
|
||||
failure_reason = 'user'
|
||||
failure_reason = "user"
|
||||
except (xmlrpclib.ProtocolError, koji.GenericError) as e:
|
||||
log.exception(str(e))
|
||||
error_msg = 'Koji communication error: "{0}"'.format(str(e))
|
||||
failure_reason = 'infra'
|
||||
failure_reason = "infra"
|
||||
except Exception as e:
|
||||
log.exception(str(e))
|
||||
error_msg = "An unknown error occurred while validating the modulemd"
|
||||
failure_reason = 'user'
|
||||
failure_reason = "user"
|
||||
else:
|
||||
session.add(build)
|
||||
session.commit()
|
||||
@@ -179,8 +181,12 @@ def init(config, session, msg):
|
||||
if error_msg:
|
||||
# Rollback changes underway
|
||||
session.rollback()
|
||||
build.transition(conf, models.BUILD_STATES["failed"], state_reason=error_msg,
|
||||
failure_type=failure_reason)
|
||||
build.transition(
|
||||
conf,
|
||||
models.BUILD_STATES["failed"],
|
||||
state_reason=error_msg,
|
||||
failure_type=failure_reason,
|
||||
)
|
||||
|
||||
|
||||
def generate_module_build_koji_tag(build):
|
||||
@@ -191,17 +197,23 @@ def generate_module_build_koji_tag(build):
|
||||
:return: generated koji tag.
|
||||
:rtype: str
|
||||
"""
|
||||
log.info('Getting tag for %s:%s:%s', build.name, build.stream, build.version)
|
||||
if conf.system in ['koji', 'test']:
|
||||
return generate_koji_tag(build.name, build.stream, build.version, build.context,
|
||||
scratch=build.scratch, scratch_id=build.id)
|
||||
log.info("Getting tag for %s:%s:%s", build.name, build.stream, build.version)
|
||||
if conf.system in ["koji", "test"]:
|
||||
return generate_koji_tag(
|
||||
build.name,
|
||||
build.stream,
|
||||
build.version,
|
||||
build.context,
|
||||
scratch=build.scratch,
|
||||
scratch_id=build.id,
|
||||
)
|
||||
else:
|
||||
return '-'.join(['module', build.name, build.stream, build.version])
|
||||
return "-".join(["module", build.name, build.stream, build.version])
|
||||
|
||||
|
||||
@module_build_service.utils.retry(
|
||||
interval=10, timeout=120,
|
||||
wait_on=(ValueError, RuntimeError, ConnectionError))
|
||||
interval=10, timeout=120, wait_on=(ValueError, RuntimeError, ConnectionError)
|
||||
)
|
||||
def get_module_build_dependencies(build):
|
||||
"""Used by wait handler to get module's build dependencies
|
||||
|
||||
@@ -212,10 +224,10 @@ def get_module_build_dependencies(build):
|
||||
:rtype: dict[str, Modulemd.Module]
|
||||
"""
|
||||
resolver = module_build_service.resolver.system_resolver
|
||||
if conf.system in ['koji', 'test']:
|
||||
if conf.system in ["koji", "test"]:
|
||||
# For Koji backend, query for the module we are going to
|
||||
# build to get the koji_tag and deps from it.
|
||||
log.info('Getting tag for %s:%s:%s', build.name, build.stream, build.version)
|
||||
log.info("Getting tag for %s:%s:%s", build.name, build.stream, build.version)
|
||||
return resolver.get_module_build_dependencies(
|
||||
build.name, build.stream, build.version, build.context, strict=True)
|
||||
else:
|
||||
@@ -235,7 +247,7 @@ def get_content_generator_build_koji_tag(module_deps):
|
||||
:return: the koji tag.
|
||||
:rtype: str
|
||||
"""
|
||||
if conf.system in ['koji', 'test']:
|
||||
if conf.system in ["koji", "test"]:
|
||||
# Find out the name of Koji tag to which the module's Content
|
||||
# Generator build should be tagged once the build finishes.
|
||||
module_names_streams = {
|
||||
@@ -246,9 +258,11 @@ def get_content_generator_build_koji_tag(module_deps):
|
||||
return conf.koji_cg_build_tag_template.format(
|
||||
module_names_streams[base_module_name])
|
||||
|
||||
log.debug('No configured base module is a buildrequire. Hence use'
|
||||
' default content generator build koji tag %s',
|
||||
conf.koji_cg_default_build_tag)
|
||||
log.debug(
|
||||
"No configured base module is a buildrequire. Hence use"
|
||||
" default content generator build koji tag %s",
|
||||
conf.koji_cg_default_build_tag,
|
||||
)
|
||||
return conf.koji_cg_default_build_tag
|
||||
else:
|
||||
return conf.koji_cg_default_build_tag
|
||||
@@ -270,11 +284,10 @@ def wait(config, session, msg):
|
||||
@module_build_service.utils.retry(interval=10, timeout=120, wait_on=RuntimeError)
|
||||
def _get_build_containing_xmd_for_mbs():
|
||||
build = models.ModuleBuild.from_module_event(session, msg)
|
||||
if 'mbs' in build.mmd().get_xmd():
|
||||
if "mbs" in build.mmd().get_xmd():
|
||||
return build
|
||||
session.expire(build)
|
||||
raise RuntimeError("{!r} doesn't contain xmd information for MBS."
|
||||
.format(build))
|
||||
raise RuntimeError("{!r} doesn't contain xmd information for MBS.".format(build))
|
||||
|
||||
build = _get_build_containing_xmd_for_mbs()
|
||||
build_logs.start(build)
|
||||
@@ -283,8 +296,10 @@ def wait(config, session, msg):
|
||||
log.info("%r", build.modulemd)
|
||||
|
||||
if build.state != msg.module_build_state:
|
||||
log.warning("Note that retrieved module state %r doesn't match message"
|
||||
" module state %r", build.state, msg.module_build_state)
|
||||
log.warning(
|
||||
"Note that retrieved module state %r doesn't match message module state %r",
|
||||
build.state, msg.module_build_state,
|
||||
)
|
||||
# This is ok.. it's a race condition we can ignore.
|
||||
pass
|
||||
|
||||
@@ -293,7 +308,7 @@ def wait(config, session, msg):
|
||||
except ValueError:
|
||||
reason = "Failed to get module info from MBS. Max retries reached."
|
||||
log.exception(reason)
|
||||
build.transition(config, state="failed", state_reason=reason, failure_type='infra')
|
||||
build.transition(config, state="failed", state_reason=reason, failure_type="infra")
|
||||
session.commit()
|
||||
raise
|
||||
|
||||
@@ -307,23 +322,24 @@ def wait(config, session, msg):
|
||||
build.koji_tag = tag
|
||||
|
||||
if build.scratch:
|
||||
log.debug('Assigning Content Generator build koji tag is skipped for'
|
||||
' scratch module build.')
|
||||
log.debug(
|
||||
"Assigning Content Generator build koji tag is skipped for scratch module build.")
|
||||
elif conf.koji_cg_tag_build:
|
||||
cg_build_koji_tag = get_content_generator_build_koji_tag(build_deps)
|
||||
log.debug("Assigning Content Generator build koji tag=%s to module build",
|
||||
cg_build_koji_tag)
|
||||
log.debug(
|
||||
"Assigning Content Generator build koji tag=%s to module build", cg_build_koji_tag)
|
||||
build.cg_build_koji_tag = cg_build_koji_tag
|
||||
else:
|
||||
log.debug('It is disabled to tag module build during importing into'
|
||||
' Koji by Content Generator.')
|
||||
log.debug('Skip to assign Content Generator build koji tag to module build.')
|
||||
log.debug(
|
||||
"It is disabled to tag module build during importing into Koji by Content Generator.")
|
||||
log.debug("Skip to assign Content Generator build koji tag to module build.")
|
||||
|
||||
builder = module_build_service.builder.GenericBuilder.create_from_module(
|
||||
session, build, config)
|
||||
builder = module_build_service.builder.GenericBuilder.create_from_module(session, build, config)
|
||||
|
||||
log.debug("Adding dependencies %s into buildroot for module %s:%s:%s",
|
||||
build_deps.keys(), build.name, build.stream, build.version)
|
||||
log.debug(
|
||||
"Adding dependencies %s into buildroot for module %s:%s:%s",
|
||||
build_deps.keys(), build.name, build.stream, build.version,
|
||||
)
|
||||
builder.buildroot_add_repos(build_deps)
|
||||
|
||||
if not build.component_builds:
|
||||
@@ -333,14 +349,15 @@ def wait(config, session, msg):
|
||||
session.commit()
|
||||
# Return a KojiRepoChange message so that the build can be transitioned to done
|
||||
# in the repos handler
|
||||
return [module_build_service.messaging.KojiRepoChange(
|
||||
'handlers.modules.wait: fake msg', builder.module_build_tag['name'])]
|
||||
return [
|
||||
module_build_service.messaging.KojiRepoChange(
|
||||
"handlers.modules.wait: fake msg", builder.module_build_tag["name"])
|
||||
]
|
||||
|
||||
# If all components in module build will be reused, we don't have to build
|
||||
# module-build-macros, because there won't be any build done.
|
||||
if attempt_to_reuse_all_components(builder, session, build):
|
||||
log.info("All components have been reused for module %r, "
|
||||
"skipping build" % build)
|
||||
log.info("All components have been reused for module %r, skipping build" % build)
|
||||
build.transition(config, state="build")
|
||||
session.add(build)
|
||||
session.commit()
|
||||
@@ -352,12 +369,9 @@ def wait(config, session, msg):
|
||||
|
||||
artifact_name = "module-build-macros"
|
||||
|
||||
component_build = models.ComponentBuild.from_component_name(
|
||||
session, artifact_name, build.id)
|
||||
component_build = models.ComponentBuild.from_component_name(session, artifact_name, build.id)
|
||||
further_work = []
|
||||
srpm = builder.get_disttag_srpm(
|
||||
disttag=".%s" % get_rpm_release(build),
|
||||
module_build=build)
|
||||
srpm = builder.get_disttag_srpm(disttag=".%s" % get_rpm_release(build), module_build=build)
|
||||
if not component_build:
|
||||
component_build = models.ComponentBuild(
|
||||
module_id=build.id,
|
||||
@@ -365,7 +379,7 @@ def wait(config, session, msg):
|
||||
format="rpms",
|
||||
scmurl=srpm,
|
||||
batch=1,
|
||||
build_time_only=True
|
||||
build_time_only=True,
|
||||
)
|
||||
session.add(component_build)
|
||||
# Commit and refresh so that the SQLAlchemy relationships are available
|
||||
@@ -373,7 +387,7 @@ def wait(config, session, msg):
|
||||
session.refresh(component_build)
|
||||
msgs = builder.recover_orphaned_artifact(component_build)
|
||||
if msgs:
|
||||
log.info('Found an existing module-build-macros build')
|
||||
log.info("Found an existing module-build-macros build")
|
||||
further_work += msgs
|
||||
# There was no existing artifact found, so lets submit the build instead
|
||||
else:
|
||||
@@ -382,13 +396,13 @@ def wait(config, session, msg):
|
||||
component_build.state = state
|
||||
component_build.reason = reason
|
||||
component_build.nvr = nvr
|
||||
elif component_build.state != koji.BUILD_STATES['COMPLETE']:
|
||||
elif component_build.state != koji.BUILD_STATES["COMPLETE"]:
|
||||
# It's possible that the build succeeded in the builder but some other step failed which
|
||||
# caused module-build-macros to be marked as failed in MBS, so check to see if it exists
|
||||
# first
|
||||
msgs = builder.recover_orphaned_artifact(component_build)
|
||||
if msgs:
|
||||
log.info('Found an existing module-build-macros build')
|
||||
log.info("Found an existing module-build-macros build")
|
||||
further_work += msgs
|
||||
else:
|
||||
task_id, state, reason, nvr = builder.build(artifact_name=artifact_name, source=srpm)
|
||||
@@ -405,11 +419,12 @@ def wait(config, session, msg):
|
||||
# We always have to regenerate the repository.
|
||||
if config.system == "koji":
|
||||
log.info("Regenerating the repository")
|
||||
task_id = builder.koji_session.newRepo(
|
||||
builder.module_build_tag['name'])
|
||||
task_id = builder.koji_session.newRepo(builder.module_build_tag["name"])
|
||||
build.new_repo_task_id = task_id
|
||||
session.commit()
|
||||
else:
|
||||
further_work.append(module_build_service.messaging.KojiRepoChange(
|
||||
'fake msg', builder.module_build_tag['name']))
|
||||
further_work.append(
|
||||
module_build_service.messaging.KojiRepoChange(
|
||||
"fake msg", builder.module_build_tag["name"])
|
||||
)
|
||||
return further_work
|
||||
|
||||
@@ -38,10 +38,10 @@ def done(config, session, msg):
|
||||
|
||||
# First, find our ModuleBuild associated with this repo, if any.
|
||||
tag = msg.repo_tag
|
||||
if config.system in ('koji', 'test') and not tag.endswith('-build'):
|
||||
if config.system in ("koji", "test") and not tag.endswith("-build"):
|
||||
log.debug("Tag %r does not end with '-build' suffix, ignoring" % tag)
|
||||
return
|
||||
tag = tag[:-6] if tag.endswith('-build') else tag
|
||||
tag = tag[:-6] if tag.endswith("-build") else tag
|
||||
module_build = models.ModuleBuild.from_repo_done_event(session, msg)
|
||||
if not module_build:
|
||||
log.debug("No module build found associated with koji tag %r" % tag)
|
||||
@@ -50,17 +50,17 @@ def done(config, session, msg):
|
||||
# It is possible that we have already failed.. but our repo is just being
|
||||
# routinely regenerated. Just ignore that. If module_build_service says the module is
|
||||
# dead, then the module is dead.
|
||||
if module_build.state == models.BUILD_STATES['failed']:
|
||||
if module_build.state == models.BUILD_STATES["failed"]:
|
||||
log.info("Ignoring repo regen for already failed %r" % module_build)
|
||||
return
|
||||
|
||||
# Get the list of untagged components in current/previous batches which
|
||||
# have been built successfully
|
||||
if config.system in ('koji', 'test') and module_build.component_builds:
|
||||
if config.system in ("koji", "test") and module_build.component_builds:
|
||||
untagged_components = [
|
||||
c for c in module_build.up_to_current_batch()
|
||||
if (not c.tagged or (not c.tagged_in_final and not c.build_time_only)) and
|
||||
c.state == koji.BUILD_STATES['COMPLETE']
|
||||
if (not c.tagged or (not c.tagged_in_final and not c.build_time_only))
|
||||
and c.state == koji.BUILD_STATES["COMPLETE"]
|
||||
]
|
||||
if untagged_components:
|
||||
log.info("Ignoring repo regen, because not all components are tagged.")
|
||||
@@ -76,20 +76,19 @@ def done(config, session, msg):
|
||||
current_batch = module_build.current_batch()
|
||||
|
||||
# If any in the current batch are still running.. just wait.
|
||||
running = [c.state == koji.BUILD_STATES['BUILDING'] for c in current_batch]
|
||||
running = [c.state == koji.BUILD_STATES["BUILDING"] for c in current_batch]
|
||||
if any(running):
|
||||
log.info(
|
||||
"%r has %r of %r components still "
|
||||
"building in this batch (%r total)" % (
|
||||
module_build, len(running), len(current_batch),
|
||||
len(module_build.component_builds)))
|
||||
"building in this batch (%r total)"
|
||||
% (module_build, len(running), len(current_batch), len(module_build.component_builds))
|
||||
)
|
||||
return
|
||||
|
||||
# Assemble the list of all successful components in the batch.
|
||||
good = [c for c in current_batch if c.state == koji.BUILD_STATES['COMPLETE']]
|
||||
good = [c for c in current_batch if c.state == koji.BUILD_STATES["COMPLETE"]]
|
||||
|
||||
failed_states = (koji.BUILD_STATES['FAILED'],
|
||||
koji.BUILD_STATES['CANCELED'])
|
||||
failed_states = (koji.BUILD_STATES["FAILED"], koji.BUILD_STATES["CANCELED"])
|
||||
|
||||
# If *none* of the components completed for this batch, then obviously the
|
||||
# module fails. However! We shouldn't reach this scenario. There is
|
||||
@@ -97,10 +96,10 @@ def done(config, session, msg):
|
||||
# first before we ever get here. This is here as a race condition safety
|
||||
# valve.
|
||||
if module_build.component_builds and not good:
|
||||
state_reason = 'Component(s) {} failed to build.'.format(
|
||||
', '.join(c.package for c in current_batch if c.state in failed_states))
|
||||
module_build.transition(config, models.BUILD_STATES['failed'], state_reason,
|
||||
failure_type='infra')
|
||||
state_reason = "Component(s) {} failed to build.".format(
|
||||
", ".join(c.package for c in current_batch if c.state in failed_states))
|
||||
module_build.transition(
|
||||
config, models.BUILD_STATES["failed"], state_reason, failure_type="infra")
|
||||
session.commit()
|
||||
log.warning("Odd! All components in batch failed for %r." % module_build)
|
||||
return
|
||||
@@ -109,8 +108,13 @@ def done(config, session, msg):
|
||||
session, module_build)
|
||||
|
||||
builder = module_build_service.builder.GenericBuilder.create(
|
||||
module_build.owner, module_build, config.system, config,
|
||||
tag_name=tag, components=[c.package for c in module_build.component_builds])
|
||||
module_build.owner,
|
||||
module_build,
|
||||
config.system,
|
||||
config,
|
||||
tag_name=tag,
|
||||
components=[c.package for c in module_build.component_builds],
|
||||
)
|
||||
builder.buildroot_connect(groups)
|
||||
|
||||
# If we have reached here then we know the following things:
|
||||
@@ -122,8 +126,8 @@ def done(config, session, msg):
|
||||
# So now we can either start a new batch if there are still some to build
|
||||
# or, if everything is built successfully, then we can bless the module as
|
||||
# complete.
|
||||
has_unbuilt_components = any(c.state in [None, koji.BUILD_STATES['BUILDING']]
|
||||
for c in module_build.component_builds)
|
||||
has_unbuilt_components = any(
|
||||
c.state in [None, koji.BUILD_STATES["BUILDING"]] for c in module_build.component_builds)
|
||||
has_failed_components = any(c.state in failed_states for c in module_build.component_builds)
|
||||
|
||||
further_work = []
|
||||
@@ -137,25 +141,27 @@ def done(config, session, msg):
|
||||
|
||||
# Try to start next batch build, because there are still unbuilt
|
||||
# components in a module.
|
||||
further_work += start_next_batch_build(
|
||||
config, module_build, session, builder)
|
||||
further_work += start_next_batch_build(config, module_build, session, builder)
|
||||
|
||||
else:
|
||||
if has_failed_components:
|
||||
state_reason = 'Component(s) {} failed to build.'.format(
|
||||
', '.join(c.package for c in module_build.component_builds
|
||||
if c.state in failed_states)
|
||||
state_reason = "Component(s) {} failed to build.".format(
|
||||
", ".join(
|
||||
c.package for c in module_build.component_builds if c.state in failed_states
|
||||
)
|
||||
)
|
||||
module_build.transition(
|
||||
config,
|
||||
state=models.BUILD_STATES["failed"],
|
||||
state_reason=state_reason,
|
||||
failure_type="user",
|
||||
)
|
||||
module_build.transition(config,
|
||||
state=models.BUILD_STATES['failed'],
|
||||
state_reason=state_reason,
|
||||
failure_type='user')
|
||||
else:
|
||||
# Tell the external buildsystem to wrap up (CG import, createrepo, etc.)
|
||||
module_build.time_completed = datetime.utcnow()
|
||||
builder.finalize(succeeded=True)
|
||||
|
||||
module_build.transition(config, state=models.BUILD_STATES['done'])
|
||||
module_build.transition(config, state=models.BUILD_STATES["done"])
|
||||
session.commit()
|
||||
|
||||
return further_work
|
||||
|
||||
@@ -44,17 +44,15 @@ def tagged(config, session, msg):
|
||||
return
|
||||
|
||||
# Find tagged component.
|
||||
component = models.ComponentBuild.from_component_nvr(
|
||||
session, msg.nvr, module_build.id)
|
||||
component = models.ComponentBuild.from_component_nvr(session, msg.nvr, module_build.id)
|
||||
if not component:
|
||||
log.error("No component %s in module %r", msg.nvr, module_build)
|
||||
return
|
||||
|
||||
log.info("Saw relevant component tag of %r from %r." % (component.nvr,
|
||||
msg.msg_id))
|
||||
log.info("Saw relevant component tag of %r from %r." % (component.nvr, msg.msg_id))
|
||||
|
||||
# Mark the component as tagged
|
||||
if tag.endswith('-build'):
|
||||
if tag.endswith("-build"):
|
||||
component.tagged = True
|
||||
else:
|
||||
component.tagged_in_final = True
|
||||
@@ -62,19 +60,21 @@ def tagged(config, session, msg):
|
||||
|
||||
unbuilt_components_in_batch = [
|
||||
c for c in module_build.current_batch()
|
||||
if c.state == koji.BUILD_STATES['BUILDING'] or not c.state
|
||||
if c.state == koji.BUILD_STATES["BUILDING"] or not c.state
|
||||
]
|
||||
if unbuilt_components_in_batch:
|
||||
log.info("Not regenerating repo for tag %s, there are still "
|
||||
"building components in a batch", tag)
|
||||
log.info(
|
||||
"Not regenerating repo for tag %s, there are still building components in a batch",
|
||||
tag,
|
||||
)
|
||||
return []
|
||||
|
||||
# Get the list of untagged components in current/previous batches which
|
||||
# have been built successfully.
|
||||
untagged_components = [
|
||||
c for c in module_build.up_to_current_batch()
|
||||
if (not c.tagged or (not c.tagged_in_final and not c.build_time_only)) and
|
||||
c.state == koji.BUILD_STATES['COMPLETE']
|
||||
if (not c.tagged or (not c.tagged_in_final and not c.build_time_only))
|
||||
and c.state == koji.BUILD_STATES["COMPLETE"]
|
||||
]
|
||||
|
||||
further_work = []
|
||||
@@ -86,10 +86,10 @@ def tagged(config, session, msg):
|
||||
|
||||
unbuilt_components = [
|
||||
c for c in module_build.component_builds
|
||||
if c.state == koji.BUILD_STATES['BUILDING'] or not c.state
|
||||
if c.state == koji.BUILD_STATES["BUILDING"] or not c.state
|
||||
]
|
||||
if unbuilt_components:
|
||||
repo_tag = builder.module_build_tag['name']
|
||||
repo_tag = builder.module_build_tag["name"]
|
||||
log.info("All components in batch tagged, regenerating repo for tag %s", repo_tag)
|
||||
task_id = builder.koji_session.newRepo(repo_tag)
|
||||
module_build.new_repo_task_id = task_id
|
||||
@@ -97,11 +97,12 @@ def tagged(config, session, msg):
|
||||
# In case this is the last batch, we do not need to regenerate the
|
||||
# buildroot, because we will not build anything else in it. It
|
||||
# would be useless to wait for a repository we will not use anyway.
|
||||
log.info("All components in module tagged and built, skipping the "
|
||||
"last repo regeneration")
|
||||
further_work += [messaging.KojiRepoChange(
|
||||
'components::_finalize: fake msg',
|
||||
builder.module_build_tag['name'])]
|
||||
log.info(
|
||||
"All components in module tagged and built, skipping the last repo regeneration")
|
||||
further_work += [
|
||||
messaging.KojiRepoChange(
|
||||
"components::_finalize: fake msg", builder.module_build_tag["name"])
|
||||
]
|
||||
session.commit()
|
||||
|
||||
return further_work
|
||||
|
||||
@@ -54,11 +54,10 @@ class MBSProducer(PollingProducer):
|
||||
self.cleanup_stale_failed_builds(conf, session)
|
||||
self.sync_koji_build_tags(conf, session)
|
||||
except Exception:
|
||||
msg = 'Error in poller execution:'
|
||||
msg = "Error in poller execution:"
|
||||
log.exception(msg)
|
||||
|
||||
log.info('Poller will now sleep for "{}" seconds'
|
||||
.format(conf.polling_interval))
|
||||
log.info('Poller will now sleep for "{}" seconds'.format(conf.polling_interval))
|
||||
|
||||
def fail_lost_builds(self, session):
|
||||
# This function is supposed to be handling only the part which can't be
|
||||
@@ -66,15 +65,17 @@ class MBSProducer(PollingProducer):
|
||||
# fit `n` slim. We do want rest to be processed elsewhere
|
||||
# TODO re-use
|
||||
|
||||
if conf.system == 'koji':
|
||||
if conf.system == "koji":
|
||||
# We don't do this on behalf of users
|
||||
koji_session = KojiModuleBuilder.get_session(conf, login=False)
|
||||
log.info('Querying tasks for statuses:')
|
||||
res = models.ComponentBuild.query.filter_by(
|
||||
state=koji.BUILD_STATES['BUILDING']).options(
|
||||
lazyload('module_build')).all()
|
||||
log.info("Querying tasks for statuses:")
|
||||
res = (
|
||||
models.ComponentBuild.query.filter_by(state=koji.BUILD_STATES["BUILDING"])
|
||||
.options(lazyload("module_build"))
|
||||
.all()
|
||||
)
|
||||
|
||||
log.info('Checking status for {0} tasks'.format(len(res)))
|
||||
log.info("Checking status for {0} tasks".format(len(res)))
|
||||
for component_build in res:
|
||||
log.debug(component_build.json())
|
||||
# Don't check tasks which haven't been triggered yet
|
||||
@@ -85,10 +86,11 @@ class MBSProducer(PollingProducer):
|
||||
# they may have BUILDING state temporarily before we tag them
|
||||
# to new module tag. Checking them would be waste of resources.
|
||||
if component_build.reused_component_id:
|
||||
log.debug('Skipping check for task "{0}", '
|
||||
'the component has been reused ("{1}").'.format(
|
||||
component_build.task_id,
|
||||
component_build.reused_component_id))
|
||||
log.debug(
|
||||
'Skipping check for task "{0}", '
|
||||
'the component has been reused ("{1}").'.format(
|
||||
component_build.task_id, component_build.reused_component_id)
|
||||
)
|
||||
continue
|
||||
|
||||
task_id = component_build.task_id
|
||||
@@ -98,42 +100,41 @@ class MBSProducer(PollingProducer):
|
||||
|
||||
state_mapping = {
|
||||
# Cancelled and failed builds should be marked as failed.
|
||||
koji.TASK_STATES['CANCELED']: koji.BUILD_STATES['FAILED'],
|
||||
koji.TASK_STATES['FAILED']: koji.BUILD_STATES['FAILED'],
|
||||
koji.TASK_STATES["CANCELED"]: koji.BUILD_STATES["FAILED"],
|
||||
koji.TASK_STATES["FAILED"]: koji.BUILD_STATES["FAILED"],
|
||||
# Completed tasks should be marked as complete.
|
||||
koji.TASK_STATES['CLOSED']: koji.BUILD_STATES['COMPLETE'],
|
||||
koji.TASK_STATES["CLOSED"]: koji.BUILD_STATES["COMPLETE"],
|
||||
}
|
||||
|
||||
# If it is a closed/completed task, then we can extract the NVR
|
||||
build_version, build_release = None, None # defaults
|
||||
if task_info['state'] == koji.TASK_STATES['CLOSED']:
|
||||
if task_info["state"] == koji.TASK_STATES["CLOSED"]:
|
||||
builds = koji_session.listBuilds(taskID=task_id)
|
||||
if not builds:
|
||||
log.warning("Task ID %r is closed, but we found no "
|
||||
"builds in koji." % task_id)
|
||||
log.warning(
|
||||
"Task ID %r is closed, but we found no builds in koji." % task_id)
|
||||
elif len(builds) > 1:
|
||||
log.warning("Task ID %r is closed, but more than one "
|
||||
"build is present!" % task_id)
|
||||
log.warning(
|
||||
"Task ID %r is closed, but more than one build is present!" % task_id)
|
||||
else:
|
||||
build_version = builds[0]['version']
|
||||
build_release = builds[0]['release']
|
||||
build_version = builds[0]["version"]
|
||||
build_release = builds[0]["release"]
|
||||
|
||||
log.info(' task {0!r} is in state {1!r}'.format(
|
||||
task_id, task_info['state']))
|
||||
if task_info['state'] in state_mapping:
|
||||
log.info(" task {0!r} is in state {1!r}".format(task_id, task_info["state"]))
|
||||
if task_info["state"] in state_mapping:
|
||||
# Fake a fedmsg message on our internal queue
|
||||
msg = module_build_service.messaging.KojiBuildChange(
|
||||
msg_id='producer::fail_lost_builds fake msg',
|
||||
msg_id="producer::fail_lost_builds fake msg",
|
||||
build_id=component_build.task_id,
|
||||
task_id=component_build.task_id,
|
||||
build_name=component_build.package,
|
||||
build_new_state=state_mapping[task_info['state']],
|
||||
build_new_state=state_mapping[task_info["state"]],
|
||||
build_release=build_release,
|
||||
build_version=build_version,
|
||||
)
|
||||
module_build_service.scheduler.consumer.work_queue_put(msg)
|
||||
|
||||
elif conf.system == 'mock':
|
||||
elif conf.system == "mock":
|
||||
pass
|
||||
|
||||
def cleanup_stale_failed_builds(self, conf, session):
|
||||
@@ -141,60 +142,71 @@ class MBSProducer(PollingProducer):
|
||||
:param conf: the MBS configuration object
|
||||
:param session: a SQLAlchemy database session
|
||||
"""
|
||||
if conf.system == 'koji':
|
||||
stale_date = datetime.utcnow() - timedelta(
|
||||
days=conf.cleanup_failed_builds_time)
|
||||
stale_module_builds = session.query(models.ModuleBuild).filter(
|
||||
models.ModuleBuild.state == models.BUILD_STATES['failed'],
|
||||
models.ModuleBuild.time_modified <= stale_date).all()
|
||||
if conf.system == "koji":
|
||||
stale_date = datetime.utcnow() - timedelta(days=conf.cleanup_failed_builds_time)
|
||||
stale_module_builds = (
|
||||
session.query(models.ModuleBuild)
|
||||
.filter(
|
||||
models.ModuleBuild.state == models.BUILD_STATES["failed"],
|
||||
models.ModuleBuild.time_modified <= stale_date,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
if stale_module_builds:
|
||||
log.info('{0} stale failed module build(s) will be cleaned up'.format(
|
||||
len(stale_module_builds)))
|
||||
log.info(
|
||||
"{0} stale failed module build(s) will be cleaned up".format(
|
||||
len(stale_module_builds))
|
||||
)
|
||||
for module in stale_module_builds:
|
||||
log.info('{0!r} is stale and is being cleaned up'.format(module))
|
||||
log.info("{0!r} is stale and is being cleaned up".format(module))
|
||||
# Find completed artifacts in the stale build
|
||||
artifacts = [c for c in module.component_builds
|
||||
if c.state == koji.BUILD_STATES['COMPLETE']]
|
||||
artifacts = [
|
||||
c for c in module.component_builds
|
||||
if c.state == koji.BUILD_STATES["COMPLETE"]
|
||||
]
|
||||
# If there are no completed artifacts, then there is nothing to tag
|
||||
if artifacts:
|
||||
# Set buildroot_connect=False so it doesn't recreate the Koji target and etc.
|
||||
builder = GenericBuilder.create_from_module(
|
||||
session, module, conf, buildroot_connect=False)
|
||||
session, module, conf, buildroot_connect=False
|
||||
)
|
||||
builder.untag_artifacts([c.nvr for c in artifacts])
|
||||
# Mark the artifacts as untagged in the database
|
||||
for c in artifacts:
|
||||
c.tagged = False
|
||||
c.tagged_in_final = False
|
||||
session.add(c)
|
||||
state_reason = ('The module was garbage collected since it has failed over {0}'
|
||||
' day(s) ago'.format(conf.cleanup_failed_builds_time))
|
||||
state_reason = (
|
||||
"The module was garbage collected since it has failed over {0}"
|
||||
" day(s) ago".format(conf.cleanup_failed_builds_time)
|
||||
)
|
||||
module.transition(
|
||||
conf, models.BUILD_STATES['garbage'], state_reason=state_reason,
|
||||
failure_type='user')
|
||||
conf,
|
||||
models.BUILD_STATES["garbage"],
|
||||
state_reason=state_reason,
|
||||
failure_type="user",
|
||||
)
|
||||
session.add(module)
|
||||
session.commit()
|
||||
|
||||
def log_summary(self, session):
|
||||
log.info('Current status:')
|
||||
log.info("Current status:")
|
||||
consumer = module_build_service.scheduler.consumer.get_global_consumer()
|
||||
backlog = consumer.incoming.qsize()
|
||||
log.info(' * internal queue backlog is {0}'.format(backlog))
|
||||
log.info(" * internal queue backlog is {0}".format(backlog))
|
||||
states = sorted(models.BUILD_STATES.items(), key=operator.itemgetter(1))
|
||||
for name, code in states:
|
||||
query = models.ModuleBuild.query.filter_by(state=code)
|
||||
count = query.count()
|
||||
if count:
|
||||
log.info(' * {0} module builds in the {1} state'.format(
|
||||
count, name))
|
||||
if name == 'build':
|
||||
log.info(" * {0} module builds in the {1} state".format(count, name))
|
||||
if name == "build":
|
||||
for module_build in query.all():
|
||||
log.info(' * {0!r}'.format(module_build))
|
||||
log.info(" * {0!r}".format(module_build))
|
||||
# First batch is number '1'.
|
||||
for i in range(1, module_build.batch + 1):
|
||||
n = len([c for c in module_build.component_builds
|
||||
if c.batch == i])
|
||||
log.info(' * {0} components in batch {1}'
|
||||
.format(n, i))
|
||||
n = len([c for c in module_build.component_builds if c.batch == i])
|
||||
log.info(" * {0} components in batch {1}".format(n, i))
|
||||
|
||||
def _nudge_module_builds_in_state(self, session, state_name, older_than_minutes):
|
||||
"""
|
||||
@@ -202,9 +214,9 @@ class MBSProducer(PollingProducer):
|
||||
than `older_than_minutes` and adds fake MBSModule message to the
|
||||
work queue.
|
||||
"""
|
||||
log.info('Looking for module builds stuck in the %s state', state_name)
|
||||
log.info("Looking for module builds stuck in the %s state", state_name)
|
||||
builds = models.ModuleBuild.by_state(session, state_name)
|
||||
log.info(' %r module builds in the %s state...', len(builds), state_name)
|
||||
log.info(" %r module builds in the %s state...", len(builds), state_name)
|
||||
now = datetime.utcnow()
|
||||
time_modified_threshold = timedelta(minutes=older_than_minutes)
|
||||
for build in builds:
|
||||
@@ -220,32 +232,38 @@ class MBSProducer(PollingProducer):
|
||||
# Fake a message to kickstart the build anew in the consumer
|
||||
state = module_build_service.models.BUILD_STATES[state_name]
|
||||
msg = module_build_service.messaging.MBSModule(
|
||||
'nudge_module_builds_fake_message', build.id, state)
|
||||
"nudge_module_builds_fake_message", build.id, state)
|
||||
log.info(" Scheduling faked event %r" % msg)
|
||||
module_build_service.scheduler.consumer.work_queue_put(msg)
|
||||
|
||||
def process_waiting_module_builds(self, session):
|
||||
for state in ['init', 'wait']:
|
||||
for state in ["init", "wait"]:
|
||||
self._nudge_module_builds_in_state(session, state, 10)
|
||||
|
||||
def process_open_component_builds(self, session):
|
||||
log.warning('process_open_component_builds is not yet implemented...')
|
||||
log.warning("process_open_component_builds is not yet implemented...")
|
||||
|
||||
def process_paused_module_builds(self, config, session):
|
||||
log.info('Looking for paused module builds in the build state')
|
||||
if module_build_service.utils.at_concurrent_component_threshold(
|
||||
config, session):
|
||||
log.debug('Will not attempt to start paused module builds due to '
|
||||
'the concurrent build threshold being met')
|
||||
log.info("Looking for paused module builds in the build state")
|
||||
if module_build_service.utils.at_concurrent_component_threshold(config, session):
|
||||
log.debug(
|
||||
"Will not attempt to start paused module builds due to "
|
||||
"the concurrent build threshold being met"
|
||||
)
|
||||
return
|
||||
|
||||
ten_minutes = timedelta(minutes=10)
|
||||
# Check for module builds that are in the build state but don't have any active component
|
||||
# builds. Exclude module builds in batch 0. This is likely a build of a module without
|
||||
# components.
|
||||
module_builds = session.query(models.ModuleBuild).filter(
|
||||
models.ModuleBuild.state == models.BUILD_STATES['build'],
|
||||
models.ModuleBuild.batch > 0).all()
|
||||
module_builds = (
|
||||
session.query(models.ModuleBuild)
|
||||
.filter(
|
||||
models.ModuleBuild.state == models.BUILD_STATES["build"],
|
||||
models.ModuleBuild.batch > 0,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
for module_build in module_builds:
|
||||
now = datetime.utcnow()
|
||||
# Only give builds a nudge if stuck for more than ten minutes
|
||||
@@ -255,12 +273,13 @@ class MBSProducer(PollingProducer):
|
||||
# then no possible event will start off new component builds.
|
||||
# But do not try to start new builds when we are waiting for the
|
||||
# repo-regen.
|
||||
if (not module_build.current_batch(koji.BUILD_STATES['BUILDING']) and
|
||||
not module_build.new_repo_task_id):
|
||||
log.info(' Processing the paused module build %r', module_build)
|
||||
if (
|
||||
not module_build.current_batch(koji.BUILD_STATES["BUILDING"])
|
||||
and not module_build.new_repo_task_id
|
||||
):
|
||||
log.info(" Processing the paused module build %r", module_build)
|
||||
# Initialize the builder...
|
||||
builder = GenericBuilder.create_from_module(
|
||||
session, module_build, config)
|
||||
builder = GenericBuilder.create_from_module(session, module_build, config)
|
||||
|
||||
further_work = module_build_service.utils.start_next_batch_build(
|
||||
config, module_build, session, builder)
|
||||
@@ -269,8 +288,7 @@ class MBSProducer(PollingProducer):
|
||||
module_build_service.scheduler.consumer.work_queue_put(event)
|
||||
|
||||
# Check if we have met the threshold.
|
||||
if module_build_service.utils.at_concurrent_component_threshold(
|
||||
config, session):
|
||||
if module_build_service.utils.at_concurrent_component_threshold(config, session):
|
||||
break
|
||||
|
||||
def trigger_new_repo_when_stalled(self, config, session):
|
||||
@@ -279,22 +297,24 @@ class MBSProducer(PollingProducer):
|
||||
doing anything and our module build stucks. In case the module build
|
||||
gets stuck on that, we trigger newRepo again to rebuild it.
|
||||
"""
|
||||
if config.system != 'koji':
|
||||
if config.system != "koji":
|
||||
return
|
||||
|
||||
koji_session = module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder\
|
||||
.get_session(config)
|
||||
koji_session = module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session(
|
||||
config)
|
||||
|
||||
for module_build in session.query(models.ModuleBuild) \
|
||||
.filter_by(state=models.BUILD_STATES['build']).all():
|
||||
for module_build in (
|
||||
session.query(models.ModuleBuild).filter_by(state=models.BUILD_STATES["build"]).all()
|
||||
):
|
||||
if not module_build.new_repo_task_id:
|
||||
continue
|
||||
|
||||
task_info = koji_session.getTaskInfo(module_build.new_repo_task_id)
|
||||
if (task_info["state"] in [koji.TASK_STATES['CANCELED'],
|
||||
koji.TASK_STATES['FAILED']]):
|
||||
log.info("newRepo task %s for %r failed, starting another one",
|
||||
str(module_build.new_repo_task_id), module_build)
|
||||
if task_info["state"] in [koji.TASK_STATES["CANCELED"], koji.TASK_STATES["FAILED"]]:
|
||||
log.info(
|
||||
"newRepo task %s for %r failed, starting another one",
|
||||
str(module_build.new_repo_task_id), module_build,
|
||||
)
|
||||
taginfo = koji_session.getTag(module_build.koji_tag + "-build")
|
||||
module_build.new_repo_task_id = koji_session.newRepo(taginfo["name"])
|
||||
else:
|
||||
@@ -307,39 +327,42 @@ class MBSProducer(PollingProducer):
|
||||
Deletes targets older than `config.koji_target_delete_time` seconds
|
||||
from Koji to cleanup after the module builds.
|
||||
"""
|
||||
if config.system != 'koji':
|
||||
if config.system != "koji":
|
||||
return
|
||||
|
||||
log.info('Looking for module builds which Koji target can be removed')
|
||||
log.info("Looking for module builds which Koji target can be removed")
|
||||
|
||||
now = datetime.utcnow()
|
||||
|
||||
koji_session = KojiModuleBuilder.get_session(config)
|
||||
for target in koji_session.getBuildTargets():
|
||||
koji_tag = target["dest_tag_name"]
|
||||
module = session.query(models.ModuleBuild).filter_by(
|
||||
koji_tag=koji_tag).first()
|
||||
if not module or module.name in conf.base_module_names or module.state in [
|
||||
module = session.query(models.ModuleBuild).filter_by(koji_tag=koji_tag).first()
|
||||
if (
|
||||
not module
|
||||
or module.name in conf.base_module_names
|
||||
or module.state in [
|
||||
models.BUILD_STATES["init"],
|
||||
models.BUILD_STATES["wait"],
|
||||
models.BUILD_STATES["build"]]:
|
||||
models.BUILD_STATES["build"],
|
||||
]
|
||||
):
|
||||
continue
|
||||
|
||||
# Double-check that the target we are going to remove is prefixed
|
||||
# by our prefix, so we won't remove f26 when there is some garbage
|
||||
# in DB or Koji.
|
||||
for allowed_prefix in config.koji_tag_prefixes:
|
||||
if target['name'].startswith(allowed_prefix + "-"):
|
||||
if target["name"].startswith(allowed_prefix + "-"):
|
||||
break
|
||||
else:
|
||||
log.error("Module %r has Koji target with not allowed prefix.",
|
||||
module)
|
||||
log.error("Module %r has Koji target with not allowed prefix.", module)
|
||||
continue
|
||||
|
||||
delta = now - module.time_completed
|
||||
if delta.total_seconds() > config.koji_target_delete_time:
|
||||
log.info("Removing target of module %r", module)
|
||||
koji_session.deleteBuildTarget(target['id'])
|
||||
koji_session.deleteBuildTarget(target["id"])
|
||||
|
||||
def cancel_stuck_module_builds(self, config, session):
|
||||
"""
|
||||
@@ -347,34 +370,45 @@ class MBSProducer(PollingProducer):
|
||||
The states are defined with the "cleanup_stuck_builds_states" config option and the
|
||||
time is defined by the "cleanup_stuck_builds_time" config option.
|
||||
"""
|
||||
log.info(('Looking for module builds stuck in the states "{states}" '
|
||||
'more than {days} days').format(
|
||||
states=' and '.join(config.cleanup_stuck_builds_states),
|
||||
days=config.cleanup_stuck_builds_time
|
||||
))
|
||||
log.info(
|
||||
'Looking for module builds stuck in the states "{states}" more than {days} days'
|
||||
.format(
|
||||
states=" and ".join(config.cleanup_stuck_builds_states),
|
||||
days=config.cleanup_stuck_builds_time,
|
||||
)
|
||||
)
|
||||
|
||||
delta = timedelta(days=config.cleanup_stuck_builds_time)
|
||||
now = datetime.utcnow()
|
||||
threshold = now - delta
|
||||
states = [module_build_service.models.BUILD_STATES[state]
|
||||
for state in config.cleanup_stuck_builds_states]
|
||||
states = [
|
||||
module_build_service.models.BUILD_STATES[state]
|
||||
for state in config.cleanup_stuck_builds_states
|
||||
]
|
||||
|
||||
module_builds = session.query(models.ModuleBuild).filter(
|
||||
models.ModuleBuild.state.in_(states),
|
||||
models.ModuleBuild.time_modified < threshold).all()
|
||||
module_builds = (
|
||||
session.query(models.ModuleBuild)
|
||||
.filter(
|
||||
models.ModuleBuild.state.in_(states), models.ModuleBuild.time_modified < threshold
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
log.info(' {0!r} module builds are stuck...'.format(len(module_builds)))
|
||||
log.info(" {0!r} module builds are stuck...".format(len(module_builds)))
|
||||
|
||||
for build in module_builds:
|
||||
nsvc = ":".join([build.name, build.stream, build.version, build.context])
|
||||
log.info('Transitioning build "{nsvc}" to "Failed" state.'.format(nsvc=nsvc))
|
||||
|
||||
state_reason = "The module was in {state} for more than {days} days".format(
|
||||
state=build.state,
|
||||
days=config.cleanup_stuck_builds_time
|
||||
state=build.state, days=config.cleanup_stuck_builds_time
|
||||
)
|
||||
build.transition(
|
||||
config,
|
||||
state=models.BUILD_STATES["failed"],
|
||||
state_reason=state_reason,
|
||||
failure_type="user",
|
||||
)
|
||||
build.transition(config, state=models.BUILD_STATES["failed"],
|
||||
state_reason=state_reason, failure_type='user')
|
||||
session.commit()
|
||||
|
||||
def sync_koji_build_tags(self, config, session):
|
||||
@@ -386,15 +420,14 @@ class MBSProducer(PollingProducer):
|
||||
In case the Koji shows the build as tagged/tagged_in_final,
|
||||
fake "tagged" message is added to work queue.
|
||||
"""
|
||||
if conf.system != 'koji':
|
||||
if conf.system != "koji":
|
||||
return
|
||||
|
||||
koji_session = KojiModuleBuilder.get_session(conf, login=False)
|
||||
|
||||
module_builds = models.ModuleBuild.by_state(session, "build")
|
||||
for module_build in module_builds:
|
||||
complete_components = module_build.current_batch(
|
||||
koji.BUILD_STATES['COMPLETE'])
|
||||
complete_components = module_build.current_batch(koji.BUILD_STATES["COMPLETE"])
|
||||
for c in complete_components:
|
||||
# In case the component is tagged in the build tag and
|
||||
# also tagged in the final tag (or it is build_time_only
|
||||
@@ -402,8 +435,11 @@ class MBSProducer(PollingProducer):
|
||||
if c.tagged and (c.tagged_in_final or c.build_time_only):
|
||||
continue
|
||||
|
||||
log.info("%r: Component %r is complete, but not tagged in the "
|
||||
"final and/or build tags.", module_build, c)
|
||||
log.info(
|
||||
"%r: Component %r is complete, but not tagged in the "
|
||||
"final and/or build tags.",
|
||||
module_build, c,
|
||||
)
|
||||
|
||||
# Check in which tags the component is tagged.
|
||||
tag_dicts = koji_session.listTags(c.nvr)
|
||||
@@ -413,8 +449,8 @@ class MBSProducer(PollingProducer):
|
||||
# schedule fake message.
|
||||
if not c.tagged_in_final and module_build.koji_tag in tags:
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'sync_koji_build_tags_fake_message',
|
||||
module_build.koji_tag, c.package, c.nvr)
|
||||
"sync_koji_build_tags_fake_message", module_build.koji_tag, c.package, c.nvr
|
||||
)
|
||||
log.info(" Scheduling faked event %r" % msg)
|
||||
module_build_service.scheduler.consumer.work_queue_put(msg)
|
||||
|
||||
@@ -423,7 +459,6 @@ class MBSProducer(PollingProducer):
|
||||
build_tag = module_build.koji_tag + "-build"
|
||||
if not c.tagged and build_tag in tags:
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'sync_koji_build_tags_fake_message',
|
||||
build_tag, c.package, c.nvr)
|
||||
"sync_koji_build_tags_fake_message", build_tag, c.package, c.nvr)
|
||||
log.info(" Scheduling faked event %r" % msg)
|
||||
module_build_service.scheduler.consumer.work_queue_put(msg)
|
||||
|
||||
@@ -36,7 +36,11 @@ import datetime
|
||||
|
||||
from module_build_service import log, conf
|
||||
from module_build_service.errors import (
|
||||
Forbidden, ValidationError, UnprocessableEntity, ProgrammingError)
|
||||
Forbidden,
|
||||
ValidationError,
|
||||
UnprocessableEntity,
|
||||
ProgrammingError,
|
||||
)
|
||||
from module_build_service.utils.general import scm_url_schemes, retry
|
||||
|
||||
|
||||
@@ -57,16 +61,16 @@ class SCM(object):
|
||||
"""
|
||||
|
||||
if allowed_scm:
|
||||
if not (url.startswith(tuple(allowed_scm)) or
|
||||
(allow_local and url.startswith("file://"))):
|
||||
raise Forbidden(
|
||||
'%s is not in the list of allowed SCMs' % url)
|
||||
if not (
|
||||
url.startswith(tuple(allowed_scm)) or (allow_local and url.startswith("file://"))
|
||||
):
|
||||
raise Forbidden("%s is not in the list of allowed SCMs" % url)
|
||||
|
||||
# If we are given the option for the git protocol or the http(s) protocol,
|
||||
# then just use http(s)
|
||||
if re.match(r'(git\+http(?:s)?:\/\/)', url):
|
||||
if re.match(r"(git\+http(?:s)?:\/\/)", url):
|
||||
url = url[4:]
|
||||
url = url.rstrip('/')
|
||||
url = url.rstrip("/")
|
||||
|
||||
self.url = url
|
||||
self.sourcedir = None
|
||||
@@ -78,14 +82,14 @@ class SCM(object):
|
||||
self.scheme = scmtype
|
||||
break
|
||||
else:
|
||||
raise ValidationError('Invalid SCM URL: %s' % url)
|
||||
raise ValidationError("Invalid SCM URL: %s" % url)
|
||||
|
||||
# git is the only one supported SCM provider atm
|
||||
if self.scheme == "git":
|
||||
match = re.search(r"^(?P<repository>.*/(?P<name>[^?]*))(\?#(?P<commit>.*))?", url)
|
||||
self.repository = match.group("repository")
|
||||
self.name = match.group("name")
|
||||
self.repository_root = self.repository[:-len(self.name)]
|
||||
self.repository_root = self.repository[: -len(self.name)]
|
||||
if self.name.endswith(".git"):
|
||||
self.name = self.name[:-4]
|
||||
self.commit = match.group("commit")
|
||||
@@ -108,9 +112,10 @@ class SCM(object):
|
||||
raise ProgrammingError("Do .checkout() first.")
|
||||
|
||||
found = False
|
||||
branches = SCM._run(["git", "branch", "-r", "--contains", self.commit],
|
||||
chdir=self.sourcedir)[1]
|
||||
for branch in branches.decode('utf-8').split("\n"):
|
||||
branches = SCM._run(
|
||||
["git", "branch", "-r", "--contains", self.commit], chdir=self.sourcedir
|
||||
)[1]
|
||||
for branch in branches.decode("utf-8").split("\n"):
|
||||
branch = branch.strip()
|
||||
if branch[len("origin/"):] == self.branch:
|
||||
found = True
|
||||
@@ -137,15 +142,17 @@ class SCM(object):
|
||||
if stderr:
|
||||
log.warning(stderr)
|
||||
if proc.returncode != 0:
|
||||
raise UnprocessableEntity("Failed on %r, retcode %r, out %r, err %r" % (
|
||||
cmd, proc.returncode, stdout, stderr))
|
||||
raise UnprocessableEntity(
|
||||
"Failed on %r, retcode %r, out %r, err %r" % (cmd, proc.returncode, stdout, stderr)
|
||||
)
|
||||
return proc.returncode, stdout, stderr
|
||||
|
||||
@staticmethod
|
||||
@retry(
|
||||
timeout=conf.scm_net_timeout,
|
||||
interval=conf.scm_net_retry_interval,
|
||||
wait_on=UnprocessableEntity)
|
||||
wait_on=UnprocessableEntity,
|
||||
)
|
||||
def _run(cmd, chdir=None, log_stdout=False):
|
||||
return SCM._run_without_retry(cmd, chdir, log_stdout)
|
||||
|
||||
@@ -158,14 +165,14 @@ class SCM(object):
|
||||
"""
|
||||
# TODO: sanity check arguments
|
||||
if self.scheme == "git":
|
||||
self.sourcedir = '%s/%s' % (scmdir, self.name)
|
||||
self.sourcedir = "%s/%s" % (scmdir, self.name)
|
||||
|
||||
module_clone_cmd = ['git', 'clone', '-q']
|
||||
module_clone_cmd = ["git", "clone", "-q"]
|
||||
if self.commit:
|
||||
module_clone_cmd.append('--no-checkout')
|
||||
module_checkout_cmd = ['git', 'checkout', '-q', self.commit]
|
||||
module_clone_cmd.append("--no-checkout")
|
||||
module_checkout_cmd = ["git", "checkout", "-q", self.commit]
|
||||
else:
|
||||
module_clone_cmd.extend(['--depth', '1'])
|
||||
module_clone_cmd.extend(["--depth", "1"])
|
||||
module_clone_cmd.extend([self.repository, self.sourcedir])
|
||||
|
||||
# perform checkouts
|
||||
@@ -174,13 +181,15 @@ class SCM(object):
|
||||
try:
|
||||
SCM._run(module_checkout_cmd, chdir=self.sourcedir)
|
||||
except RuntimeError as e:
|
||||
if (e.message.endswith(
|
||||
" did not match any file(s) known to git.\\n\"") or
|
||||
"fatal: reference is not a tree: " in e.message):
|
||||
if (
|
||||
e.message.endswith(' did not match any file(s) known to git.\\n"')
|
||||
or "fatal: reference is not a tree: " in e.message
|
||||
):
|
||||
raise UnprocessableEntity(
|
||||
"checkout: The requested commit hash was not found "
|
||||
"within the repository. Perhaps you forgot to push. "
|
||||
"The original message was: %s" % e.message)
|
||||
"The original message was: %s" % e.message
|
||||
)
|
||||
raise
|
||||
|
||||
timestamp = SCM._run(["git", "show", "-s", "--format=%ct"], chdir=self.sourcedir)[1]
|
||||
@@ -190,7 +199,7 @@ class SCM(object):
|
||||
raise RuntimeError("checkout: Unhandled SCM scheme.")
|
||||
return self.sourcedir
|
||||
|
||||
def get_latest(self, ref='master'):
|
||||
def get_latest(self, ref="master"):
|
||||
""" Get the latest commit hash based on the provided git ref
|
||||
|
||||
:param ref: a string of a git ref (either a branch or commit hash)
|
||||
@@ -198,7 +207,7 @@ class SCM(object):
|
||||
:raises: RuntimeError
|
||||
"""
|
||||
if ref is None:
|
||||
ref = 'master'
|
||||
ref = "master"
|
||||
if self.scheme == "git":
|
||||
log.debug("Getting/verifying commit hash for %s" % self.repository)
|
||||
try:
|
||||
@@ -208,9 +217,9 @@ class SCM(object):
|
||||
# fallbac to `get_full_commit_hash`. We do not want to retry here, because
|
||||
# in case module contains only commit hashes, it would block for very long
|
||||
# time.
|
||||
_, output, _ = SCM._run_without_retry([
|
||||
"git", "ls-remote", "--exit-code", self.repository, 'refs/heads/' + ref
|
||||
])
|
||||
_, output, _ = SCM._run_without_retry(
|
||||
["git", "ls-remote", "--exit-code", self.repository, "refs/heads/" + ref]
|
||||
)
|
||||
except UnprocessableEntity:
|
||||
# The call below will either return the commit hash as is (if a full one was
|
||||
# provided) or the full commit hash (if a short hash was provided). If ref is not
|
||||
@@ -220,7 +229,7 @@ class SCM(object):
|
||||
# git-ls-remote prints output like this, where the first commit
|
||||
# hash is what to return.
|
||||
# bf028e573e7c18533d89c7873a411de92d4d913e refs/heads/master
|
||||
return output.split()[0].decode('utf-8')
|
||||
return output.split()[0].decode("utf-8")
|
||||
else:
|
||||
raise RuntimeError("get_latest: Unhandled SCM scheme.")
|
||||
|
||||
@@ -236,30 +245,28 @@ class SCM(object):
|
||||
elif self.commit:
|
||||
commit_to_check = self.commit
|
||||
else:
|
||||
raise RuntimeError('No commit hash was specified for "{0}"'.format(
|
||||
self.url))
|
||||
raise RuntimeError('No commit hash was specified for "{0}"'.format(self.url))
|
||||
|
||||
if self.scheme == 'git':
|
||||
log.debug('Getting the full commit hash for "{0}"'
|
||||
.format(self.repository))
|
||||
if self.scheme == "git":
|
||||
log.debug('Getting the full commit hash for "{0}"'.format(self.repository))
|
||||
td = None
|
||||
try:
|
||||
td = tempfile.mkdtemp()
|
||||
SCM._run(['git', 'clone', '-q', self.repository, td, '--bare'])
|
||||
output = SCM._run(
|
||||
['git', 'rev-parse', commit_to_check], chdir=td)[1]
|
||||
SCM._run(["git", "clone", "-q", self.repository, td, "--bare"])
|
||||
output = SCM._run(["git", "rev-parse", commit_to_check], chdir=td)[1]
|
||||
finally:
|
||||
if td and os.path.exists(td):
|
||||
shutil.rmtree(td)
|
||||
|
||||
if output:
|
||||
return str(output.decode('utf-8').strip('\n'))
|
||||
return str(output.decode("utf-8").strip("\n"))
|
||||
|
||||
raise UnprocessableEntity(
|
||||
'The full commit hash of "{0}" for "{1}" could not be found'
|
||||
.format(commit_hash, self.repository))
|
||||
'The full commit hash of "{0}" for "{1}" could not be found'.format(
|
||||
commit_hash, self.repository)
|
||||
)
|
||||
else:
|
||||
raise RuntimeError('get_full_commit_hash: Unhandled SCM scheme.')
|
||||
raise RuntimeError("get_full_commit_hash: Unhandled SCM scheme.")
|
||||
|
||||
def get_module_yaml(self):
|
||||
"""
|
||||
@@ -276,8 +283,10 @@ class SCM(object):
|
||||
with open(path_to_yaml):
|
||||
return path_to_yaml
|
||||
except IOError:
|
||||
log.error("get_module_yaml: The SCM repository doesn't contain a modulemd file. "
|
||||
"Couldn't access: %s" % path_to_yaml)
|
||||
log.error(
|
||||
"get_module_yaml: The SCM repository doesn't contain a modulemd file. "
|
||||
"Couldn't access: %s" % path_to_yaml
|
||||
)
|
||||
raise UnprocessableEntity("The SCM repository doesn't contain a modulemd file")
|
||||
|
||||
@staticmethod
|
||||
@@ -289,11 +298,11 @@ class SCM(object):
|
||||
:param commit: a string containing the commit
|
||||
:return: boolean
|
||||
"""
|
||||
if scheme == 'git':
|
||||
sha1_pattern = re.compile(r'^[0-9a-f]{40}$')
|
||||
if scheme == "git":
|
||||
sha1_pattern = re.compile(r"^[0-9a-f]{40}$")
|
||||
return bool(re.match(sha1_pattern, commit))
|
||||
else:
|
||||
raise RuntimeError('is_full_commit_hash: Unhandled SCM scheme.')
|
||||
raise RuntimeError("is_full_commit_hash: Unhandled SCM scheme.")
|
||||
|
||||
def is_available(self, strict=False):
|
||||
"""Check whether the scmurl is available for checkout.
|
||||
@@ -316,9 +325,7 @@ class SCM(object):
|
||||
if td is not None:
|
||||
shutil.rmtree(td)
|
||||
except Exception as e:
|
||||
log.warning(
|
||||
"Failed to remove temporary directory {!r}: {}".format(
|
||||
td, str(e)))
|
||||
log.warning("Failed to remove temporary directory {!r}: {}".format(td, str(e)))
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
|
||||
@@ -52,15 +52,15 @@ def at_concurrent_component_threshold(config, session):
|
||||
|
||||
import koji # Placed here to avoid py2/py3 conflicts...
|
||||
|
||||
if config.num_concurrent_builds and config.num_concurrent_builds <= \
|
||||
session.query(models.ComponentBuild).filter_by(
|
||||
state=koji.BUILD_STATES['BUILDING'],
|
||||
# Components which are reused should not be counted in, because
|
||||
# we do not submit new build for them. They are in BUILDING state
|
||||
# just internally in MBS to be handled by
|
||||
# scheduler.handlers.components.complete.
|
||||
reused_component_id=None).count():
|
||||
return True
|
||||
# Components which are reused should not be counted in, because
|
||||
# we do not submit new build for them. They are in BUILDING state
|
||||
# just internally in MBS to be handled by
|
||||
# scheduler.handlers.components.complete.
|
||||
if config.num_concurrent_builds:
|
||||
count = session.query(models.ComponentBuild).filter_by(
|
||||
state=koji.BUILD_STATES["BUILDING"], reused_component_id=None).count()
|
||||
if config.num_concurrent_builds <= count:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@@ -71,21 +71,21 @@ def start_build_component(builder, c):
|
||||
by QueueBasedThreadPool in continue_batch_build.
|
||||
"""
|
||||
import koji
|
||||
|
||||
try:
|
||||
c.task_id, c.state, c.state_reason, c.nvr = builder.build(
|
||||
artifact_name=c.package, source=c.scmurl)
|
||||
except Exception as e:
|
||||
c.state = koji.BUILD_STATES['FAILED']
|
||||
c.state = koji.BUILD_STATES["FAILED"]
|
||||
c.state_reason = "Failed to build artifact %s: %s" % (c.package, str(e))
|
||||
log.exception(e)
|
||||
c.module_build.transition(conf, models.BUILD_STATES['failed'], failure_type='infra')
|
||||
c.module_build.transition(conf, models.BUILD_STATES["failed"], failure_type="infra")
|
||||
return
|
||||
|
||||
if not c.task_id and c.state == koji.BUILD_STATES['BUILDING']:
|
||||
c.state = koji.BUILD_STATES['FAILED']
|
||||
c.state_reason = ("Failed to build artifact %s: "
|
||||
"Builder did not return task ID" % (c.package))
|
||||
c.module_build.transition(conf, models.BUILD_STATES['failed'], failure_type='infra')
|
||||
if not c.task_id and c.state == koji.BUILD_STATES["BUILDING"]:
|
||||
c.state = koji.BUILD_STATES["FAILED"]
|
||||
c.state_reason = "Failed to build artifact %s: Builder did not return task ID" % (c.package)
|
||||
c.module_build.transition(conf, models.BUILD_STATES["failed"], failure_type="infra")
|
||||
return
|
||||
|
||||
|
||||
@@ -104,10 +104,12 @@ def continue_batch_build(config, module, session, builder, components=None):
|
||||
# successfully built yet or isn't currently being built.
|
||||
unbuilt_components = components or [
|
||||
c for c in module.component_builds
|
||||
if (c.state != koji.BUILD_STATES['COMPLETE'] and
|
||||
c.state != koji.BUILD_STATES['BUILDING'] and
|
||||
c.state != koji.BUILD_STATES['FAILED'] and
|
||||
c.batch == module.batch)
|
||||
if (
|
||||
c.state != koji.BUILD_STATES["COMPLETE"]
|
||||
and c.state != koji.BUILD_STATES["BUILDING"]
|
||||
and c.state != koji.BUILD_STATES["FAILED"]
|
||||
and c.batch == module.batch
|
||||
)
|
||||
]
|
||||
|
||||
if not unbuilt_components:
|
||||
@@ -134,17 +136,17 @@ def continue_batch_build(config, module, session, builder, components=None):
|
||||
for c in unbuilt_components:
|
||||
# If a previous build of the component was found, then the state will be marked as
|
||||
# COMPLETE so we should skip this
|
||||
if c.state == koji.BUILD_STATES['COMPLETE']:
|
||||
if c.state == koji.BUILD_STATES["COMPLETE"]:
|
||||
continue
|
||||
# Check the concurrent build threshold.
|
||||
if at_concurrent_component_threshold(config, session):
|
||||
log.info('Concurrent build threshold met')
|
||||
log.info("Concurrent build threshold met")
|
||||
break
|
||||
|
||||
# We set state to "BUILDING" here because at this point we are committed
|
||||
# to build the component and at_concurrent_component_threshold() works by
|
||||
# counting the number of components in the "BUILDING" state.
|
||||
c.state = koji.BUILD_STATES['BUILDING']
|
||||
c.state = koji.BUILD_STATES["BUILDING"]
|
||||
components_to_build.append(c)
|
||||
|
||||
# Start build of components in this batch.
|
||||
@@ -152,8 +154,9 @@ def continue_batch_build(config, module, session, builder, components=None):
|
||||
if config.num_concurrent_builds > 0:
|
||||
max_workers = config.num_concurrent_builds
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
futures = {executor.submit(start_build_component, builder, c):
|
||||
c for c in components_to_build}
|
||||
futures = {
|
||||
executor.submit(start_build_component, builder, c): c for c in components_to_build
|
||||
}
|
||||
concurrent.futures.wait(futures)
|
||||
# In case there has been an excepion generated directly in the
|
||||
# start_build_component, the future.result() will re-raise it in the
|
||||
@@ -186,16 +189,15 @@ def start_next_batch_build(config, module, session, builder, components=None):
|
||||
# later on in the code
|
||||
all_reused_in_prev_batch = True
|
||||
for c in module.component_builds:
|
||||
if c.state in [None, koji.BUILD_STATES['BUILDING']]:
|
||||
if c.state in [None, koji.BUILD_STATES["BUILDING"]]:
|
||||
has_unbuilt_components = True
|
||||
|
||||
if c.batch == module.batch:
|
||||
if not c.state:
|
||||
has_unbuilt_components_in_batch = True
|
||||
elif c.state == koji.BUILD_STATES['BUILDING']:
|
||||
elif c.state == koji.BUILD_STATES["BUILDING"]:
|
||||
has_building_components_in_batch = True
|
||||
elif (c.state in [koji.BUILD_STATES['FAILED'],
|
||||
koji.BUILD_STATES['CANCELED']]):
|
||||
elif c.state in [koji.BUILD_STATES["FAILED"], koji.BUILD_STATES["CANCELED"]]:
|
||||
has_failed_components = True
|
||||
|
||||
if c.batch == module.batch and not c.reused_component_id:
|
||||
@@ -203,57 +205,60 @@ def start_next_batch_build(config, module, session, builder, components=None):
|
||||
|
||||
# Do not start new batch if there are no components to build.
|
||||
if not has_unbuilt_components:
|
||||
log.debug("Not starting new batch, there is no component to build "
|
||||
"for module %s" % module)
|
||||
log.debug(
|
||||
"Not starting new batch, there is no component to build for module %s" % module)
|
||||
return []
|
||||
|
||||
# Check that there is something to build in current batch before starting
|
||||
# the new one. If there is, continue building current batch.
|
||||
if has_unbuilt_components_in_batch:
|
||||
log.info("Continuing building batch %d", module.batch)
|
||||
return continue_batch_build(
|
||||
config, module, session, builder, components)
|
||||
return continue_batch_build(config, module, session, builder, components)
|
||||
|
||||
# Check that there are no components in BUILDING state in current batch.
|
||||
# If there are, wait until they are built.
|
||||
if has_building_components_in_batch:
|
||||
log.debug("Not starting new batch, there are still components in "
|
||||
"BUILDING state in current batch for module %s", module)
|
||||
log.debug(
|
||||
"Not starting new batch, there are still components in "
|
||||
"BUILDING state in current batch for module %s",
|
||||
module,
|
||||
)
|
||||
return []
|
||||
|
||||
# Check that there are no failed components in this batch. If there are,
|
||||
# do not start the new batch.
|
||||
if has_failed_components:
|
||||
log.info("Not starting new batch, there are failed components for "
|
||||
"module %s", module)
|
||||
log.info("Not starting new batch, there are failed components for module %s", module)
|
||||
return []
|
||||
|
||||
# Identify active tasks which might contain relicts of previous builds
|
||||
# and fail the module build if this^ happens.
|
||||
active_tasks = builder.list_tasks_for_components(module.component_builds,
|
||||
state='active')
|
||||
active_tasks = builder.list_tasks_for_components(module.component_builds, state="active")
|
||||
if isinstance(active_tasks, list) and active_tasks:
|
||||
state_reason = ("Cannot start a batch, because some components are already"
|
||||
" in 'building' state.")
|
||||
state_reason = \
|
||||
"Cannot start a batch, because some components are already in 'building' state."
|
||||
state_reason += " See tasks (ID): {}".format(
|
||||
', '.join([str(t['id']) for t in active_tasks])
|
||||
", ".join([str(t["id"]) for t in active_tasks])
|
||||
)
|
||||
module.transition(
|
||||
config,
|
||||
state=models.BUILD_STATES["failed"],
|
||||
state_reason=state_reason,
|
||||
failure_type="infra",
|
||||
)
|
||||
module.transition(config, state=models.BUILD_STATES['failed'],
|
||||
state_reason=state_reason, failure_type='infra')
|
||||
session.commit()
|
||||
return []
|
||||
|
||||
else:
|
||||
log.debug("Builder {} doesn't provide information about active tasks."
|
||||
.format(builder))
|
||||
log.debug("Builder {} doesn't provide information about active tasks.".format(builder))
|
||||
|
||||
# Find out if there is repo regeneration in progress for this module.
|
||||
# If there is, wait until the repo is regenerated before starting a new
|
||||
# batch.
|
||||
artifacts = [c.nvr for c in module.current_batch()]
|
||||
if not builder.buildroot_ready(artifacts):
|
||||
log.info("Not starting new batch, not all of %r are in the buildroot. "
|
||||
"Waiting." % artifacts)
|
||||
log.info(
|
||||
"Not starting new batch, not all of %r are in the buildroot. Waiting." % artifacts)
|
||||
return []
|
||||
|
||||
# Although this variable isn't necessary, it is easier to read code later on with it
|
||||
@@ -265,21 +270,21 @@ def start_next_batch_build(config, module, session, builder, components=None):
|
||||
# successfully built yet or isn't currently being built.
|
||||
unbuilt_components = components or [
|
||||
c for c in module.component_builds
|
||||
if (c.state != koji.BUILD_STATES['COMPLETE'] and
|
||||
c.state != koji.BUILD_STATES['BUILDING'] and
|
||||
c.state != koji.BUILD_STATES['FAILED'] and
|
||||
c.batch == module.batch)
|
||||
if (
|
||||
c.state != koji.BUILD_STATES["COMPLETE"]
|
||||
and c.state != koji.BUILD_STATES["BUILDING"]
|
||||
and c.state != koji.BUILD_STATES["FAILED"]
|
||||
and c.batch == module.batch
|
||||
)
|
||||
]
|
||||
|
||||
# If there are no components to build, skip the batch and start building
|
||||
# the new one. This can happen when resubmitting the failed module build.
|
||||
if not unbuilt_components and not components:
|
||||
log.info("Skipping build of batch %d, no component to build.",
|
||||
module.batch)
|
||||
log.info("Skipping build of batch %d, no component to build.", module.batch)
|
||||
return start_next_batch_build(config, module, session, builder)
|
||||
|
||||
log.info("Starting build of next batch %d, %s" % (module.batch,
|
||||
unbuilt_components))
|
||||
log.info("Starting build of next batch %d, %s" % (module.batch, unbuilt_components))
|
||||
|
||||
# Attempt to reuse any components possible in the batch before attempting to build any
|
||||
further_work = []
|
||||
@@ -288,14 +293,13 @@ def start_next_batch_build(config, module, session, builder, components=None):
|
||||
should_try_reuse = True
|
||||
# If the rebuild strategy is "changed-and-after", try to figure out if it's worth checking if
|
||||
# the components can be reused to save on resources
|
||||
if module.rebuild_strategy == 'changed-and-after':
|
||||
if module.rebuild_strategy == "changed-and-after":
|
||||
# Check to see if the previous batch had all their builds reused except for when the
|
||||
# previous batch was 1 because that always has the module-build-macros component built
|
||||
should_try_reuse = all_reused_in_prev_batch or prev_batch == 1
|
||||
if should_try_reuse:
|
||||
component_names = [c.package for c in unbuilt_components]
|
||||
reusable_components = get_reusable_components(
|
||||
session, module, component_names)
|
||||
reusable_components = get_reusable_components(session, module, component_names)
|
||||
for c, reusable_c in zip(unbuilt_components, reusable_components):
|
||||
if reusable_c:
|
||||
components_reused = True
|
||||
@@ -309,8 +313,10 @@ def start_next_batch_build(config, module, session, builder, components=None):
|
||||
# If all the components were reused in the batch then make a KojiRepoChange
|
||||
# message and return
|
||||
if components_reused and not unbuilt_components_after_reuse:
|
||||
further_work.append(module_build_service.messaging.KojiRepoChange(
|
||||
'start_build_batch: fake msg', builder.module_build_tag['name']))
|
||||
further_work.append(
|
||||
module_build_service.messaging.KojiRepoChange(
|
||||
"start_build_batch: fake msg", builder.module_build_tag["name"])
|
||||
)
|
||||
return further_work
|
||||
|
||||
return further_work + continue_batch_build(
|
||||
|
||||
@@ -30,8 +30,7 @@ from datetime import datetime
|
||||
from six import text_type, string_types
|
||||
|
||||
from module_build_service import conf, log, models, Modulemd, glib
|
||||
from module_build_service.errors import (
|
||||
ValidationError, ProgrammingError, UnprocessableEntity)
|
||||
from module_build_service.errors import ValidationError, ProgrammingError, UnprocessableEntity
|
||||
|
||||
|
||||
def to_text_type(s):
|
||||
@@ -62,8 +61,15 @@ def scm_url_schemes(terse=False):
|
||||
"""
|
||||
|
||||
scm_types = {
|
||||
"git": ("git://", "git+http://", "git+https://",
|
||||
"git+rsync://", "http://", "https://", "file://")
|
||||
"git": (
|
||||
"git://",
|
||||
"git+http://",
|
||||
"git+https://",
|
||||
"git+rsync://",
|
||||
"http://",
|
||||
"https://",
|
||||
"file://",
|
||||
)
|
||||
}
|
||||
|
||||
if not terse:
|
||||
@@ -79,6 +85,7 @@ def retry(timeout=conf.net_timeout, interval=conf.net_retry_interval, wait_on=Ex
|
||||
""" A decorator that allows to retry a section of code...
|
||||
...until success or timeout.
|
||||
"""
|
||||
|
||||
def wrapper(function):
|
||||
@functools.wraps(function)
|
||||
def inner(*args, **kwargs):
|
||||
@@ -87,21 +94,26 @@ def retry(timeout=conf.net_timeout, interval=conf.net_retry_interval, wait_on=Ex
|
||||
try:
|
||||
return function(*args, **kwargs)
|
||||
except wait_on as e:
|
||||
log.warning("Exception %r raised from %r. Retry in %rs" % (
|
||||
e, function, interval))
|
||||
log.warning(
|
||||
"Exception %r raised from %r. Retry in %rs" % (e, function, interval)
|
||||
)
|
||||
time.sleep(interval)
|
||||
if (time.time() - start) >= timeout:
|
||||
raise # This re-raises the last exception.
|
||||
|
||||
return inner
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def module_build_state_from_msg(msg):
|
||||
state = int(msg.module_build_state)
|
||||
# TODO better handling
|
||||
assert state in models.BUILD_STATES.values(), (
|
||||
'state=%s(%s) is not in %s'
|
||||
% (state, type(state), list(models.BUILD_STATES.values())))
|
||||
assert state in models.BUILD_STATES.values(), "state=%s(%s) is not in %s" % (
|
||||
state,
|
||||
type(state),
|
||||
list(models.BUILD_STATES.values()),
|
||||
)
|
||||
return state
|
||||
|
||||
|
||||
@@ -125,23 +137,23 @@ def generate_koji_tag(name, stream, version, context, max_length=256, scratch=Fa
|
||||
:rtype: str
|
||||
"""
|
||||
if scratch:
|
||||
prefix = 'scrmod-'
|
||||
prefix = "scrmod-"
|
||||
# use unique suffix so same commit can be resubmitted
|
||||
suffix = '+' + str(scratch_id)
|
||||
suffix = "+" + str(scratch_id)
|
||||
else:
|
||||
prefix = 'module-'
|
||||
suffix = ''
|
||||
prefix = "module-"
|
||||
suffix = ""
|
||||
nsvc_list = [name, stream, str(version), context]
|
||||
nsvc_tag = prefix + '-'.join(nsvc_list) + suffix
|
||||
if len(nsvc_tag) + len('-build') > max_length:
|
||||
nsvc_tag = prefix + "-".join(nsvc_list) + suffix
|
||||
if len(nsvc_tag) + len("-build") > max_length:
|
||||
# Fallback to the old format of 'module-<hash>' if the generated koji tag
|
||||
# name is longer than max_length
|
||||
nsvc_hash = hashlib.sha1('.'.join(nsvc_list).encode('utf-8')).hexdigest()[:16]
|
||||
nsvc_hash = hashlib.sha1(".".join(nsvc_list).encode("utf-8")).hexdigest()[:16]
|
||||
return prefix + nsvc_hash + suffix
|
||||
return nsvc_tag
|
||||
|
||||
|
||||
def validate_koji_tag(tag_arg_names, pre='', post='-', dict_key='name'):
|
||||
def validate_koji_tag(tag_arg_names, pre="", post="-", dict_key="name"):
|
||||
"""
|
||||
Used as a decorator validates koji tag arg(s)' value(s)
|
||||
against configurable list of koji tag prefixes.
|
||||
@@ -168,24 +180,29 @@ def validate_koji_tag(tag_arg_names, pre='', post='-', dict_key='name'):
|
||||
# If any of them don't appear in the function, then fail.
|
||||
if tag_arg_name not in call_args:
|
||||
raise ProgrammingError(
|
||||
'{} Inspected argument {} is not within function args.'
|
||||
' The function was: {}.'
|
||||
.format(err_subject, tag_arg_name, function.__name__))
|
||||
"{} Inspected argument {} is not within function args."
|
||||
" The function was: {}.".format(
|
||||
err_subject, tag_arg_name, function.__name__
|
||||
)
|
||||
)
|
||||
|
||||
tag_arg_val = call_args[tag_arg_name]
|
||||
|
||||
# First, check that we have some value
|
||||
if not tag_arg_val:
|
||||
raise ValidationError('{} Can not validate {}. No value provided.'
|
||||
.format(err_subject, tag_arg_name))
|
||||
raise ValidationError(
|
||||
"{} Can not validate {}. No value provided.".format(
|
||||
err_subject, tag_arg_name)
|
||||
)
|
||||
|
||||
# If any of them are a dict, then use the provided dict_key
|
||||
if isinstance(tag_arg_val, dict):
|
||||
if dict_key not in tag_arg_val:
|
||||
raise ProgrammingError(
|
||||
'{} Inspected dict arg {} does not contain {} key.'
|
||||
' The function was: {}.'
|
||||
.format(err_subject, tag_arg_name, dict_key, function.__name__))
|
||||
"{} Inspected dict arg {} does not contain {} key."
|
||||
" The function was: {}.".format(
|
||||
err_subject, tag_arg_name, dict_key, function.__name__)
|
||||
)
|
||||
tag_list = [tag_arg_val[dict_key]]
|
||||
elif isinstance(tag_arg_val, list):
|
||||
tag_list = tag_arg_val
|
||||
@@ -200,9 +217,9 @@ def validate_koji_tag(tag_arg_names, pre='', post='-', dict_key='name'):
|
||||
# Only raise this error if the given tags don't start with
|
||||
# *any* of our allowed prefixes.
|
||||
raise ValidationError(
|
||||
'Koji tag validation: {} does not satisfy any of allowed prefixes: {}'
|
||||
.format(tag_list,
|
||||
[pre + p + post for p in conf.koji_tag_prefixes]))
|
||||
"Koji tag validation: {} does not satisfy any of allowed prefixes: {}"
|
||||
.format(tag_list, [pre + p + post for p in conf.koji_tag_prefixes])
|
||||
)
|
||||
|
||||
# Finally.. after all that validation, call the original function
|
||||
# and return its value.
|
||||
@@ -223,8 +240,12 @@ def get_rpm_release(module_build):
|
||||
:param module_build: a models.ModuleBuild object
|
||||
:return: a string of the module's dist tag
|
||||
"""
|
||||
dist_str = '.'.join([module_build.name, module_build.stream, str(module_build.version),
|
||||
str(module_build.context)]).encode('utf-8')
|
||||
dist_str = ".".join([
|
||||
module_build.name,
|
||||
module_build.stream,
|
||||
str(module_build.version),
|
||||
str(module_build.context),
|
||||
]).encode("utf-8")
|
||||
dist_hash = hashlib.sha1(dist_str).hexdigest()[:8]
|
||||
|
||||
# We need to share the same auto-incrementing index in dist tag between all MSE builds.
|
||||
@@ -234,14 +255,14 @@ def get_rpm_release(module_build):
|
||||
mse_build_ids.sort()
|
||||
index = mse_build_ids[0]
|
||||
try:
|
||||
buildrequires = module_build.mmd().get_xmd()['mbs']['buildrequires']
|
||||
buildrequires = module_build.mmd().get_xmd()["mbs"]["buildrequires"]
|
||||
except (ValueError, KeyError):
|
||||
log.warning('Module build {0} does not have buildrequires in its xmd'
|
||||
.format(module_build.id))
|
||||
log.warning(
|
||||
"Module build {0} does not have buildrequires in its xmd".format(module_build.id))
|
||||
buildrequires = None
|
||||
|
||||
# Determine which buildrequired module will influence the disttag
|
||||
br_module_marking = ''
|
||||
br_module_marking = ""
|
||||
# If the buildrequires are recorded in the xmd then we can try to find the base module that
|
||||
# is buildrequired
|
||||
if buildrequires:
|
||||
@@ -256,13 +277,17 @@ def get_rpm_release(module_build):
|
||||
|
||||
with models.make_session(conf) as session:
|
||||
module_obj = models.ModuleBuild.get_build_from_nsvc(
|
||||
session, module, module_in_xmd['stream'], module_in_xmd['version'],
|
||||
module_in_xmd['context'])
|
||||
session,
|
||||
module,
|
||||
module_in_xmd["stream"],
|
||||
module_in_xmd["version"],
|
||||
module_in_xmd["context"],
|
||||
)
|
||||
if not module_obj:
|
||||
continue
|
||||
|
||||
try:
|
||||
marking = module_obj.mmd().get_xmd()['mbs']['disttag_marking']
|
||||
marking = module_obj.mmd().get_xmd()["mbs"]["disttag_marking"]
|
||||
# We must check for a KeyError because a Variant object doesn't support the `get`
|
||||
# method
|
||||
except KeyError:
|
||||
@@ -272,20 +297,19 @@ def get_rpm_release(module_build):
|
||||
# conf.allowed_disttag_marking_module_names, and the base module doesn't have
|
||||
# the disttag_marking set, then default to the stream of the first base module
|
||||
marking = module_obj.stream
|
||||
br_module_marking = marking + '+'
|
||||
br_module_marking = marking + "+"
|
||||
break
|
||||
else:
|
||||
log.warning('Module build {0} does not buildrequire a base module ({1})'
|
||||
.format(module_build.id, ' or '.join(conf.base_module_names)))
|
||||
log.warning(
|
||||
"Module build {0} does not buildrequire a base module ({1})".format(
|
||||
module_build.id, " or ".join(conf.base_module_names))
|
||||
)
|
||||
|
||||
# use alternate prefix for scratch module build components so they can be identified
|
||||
prefix = ('scrmod+' if module_build.scratch else conf.default_dist_tag_prefix)
|
||||
prefix = "scrmod+" if module_build.scratch else conf.default_dist_tag_prefix
|
||||
|
||||
return '{prefix}{base_module_marking}{index}+{dist_hash}'.format(
|
||||
prefix=prefix,
|
||||
base_module_marking=br_module_marking,
|
||||
index=index,
|
||||
dist_hash=dist_hash,
|
||||
return "{prefix}{base_module_marking}{index}+{dist_hash}".format(
|
||||
prefix=prefix, base_module_marking=br_module_marking, index=index, dist_hash=dist_hash
|
||||
)
|
||||
|
||||
|
||||
@@ -302,6 +326,7 @@ def create_dogpile_key_generator_func(skip_first_n_args=0):
|
||||
when the db.session is part of cached method call, and the caching should
|
||||
work no matter what session instance is passed to cached method argument.
|
||||
"""
|
||||
|
||||
def key_generator(namespace, fn):
|
||||
fname = fn.__name__
|
||||
|
||||
@@ -315,6 +340,7 @@ def create_dogpile_key_generator_func(skip_first_n_args=0):
|
||||
return key_template
|
||||
|
||||
return generate_key
|
||||
|
||||
return key_generator
|
||||
|
||||
|
||||
@@ -354,8 +380,8 @@ def import_mmd(session, mmd, check_buildrequires=True):
|
||||
|
||||
# Verify that the virtual streams are the correct type
|
||||
if virtual_streams and (
|
||||
not isinstance(virtual_streams, list) or
|
||||
any(not isinstance(vs, string_types) for vs in virtual_streams)
|
||||
not isinstance(virtual_streams, list)
|
||||
or any(not isinstance(vs, string_types) for vs in virtual_streams)
|
||||
):
|
||||
msg = "The virtual streams must be a list of strings"
|
||||
log.error(msg)
|
||||
@@ -398,19 +424,19 @@ def import_mmd(session, mmd, check_buildrequires=True):
|
||||
xmd_brs = set(xmd["mbs"].get("buildrequires", {}).keys())
|
||||
if brs - xmd_brs:
|
||||
raise UnprocessableEntity(
|
||||
'The imported module buildrequires other modules, but the metadata in the '
|
||||
'xmd["mbs"]["buildrequires"] dictionary is missing entries')
|
||||
"The imported module buildrequires other modules, but the metadata in the "
|
||||
'xmd["mbs"]["buildrequires"] dictionary is missing entries'
|
||||
)
|
||||
elif "buildrequires" not in xmd["mbs"]:
|
||||
xmd["mbs"]["buildrequires"] = {}
|
||||
mmd.set_xmd(glib.dict_values(xmd))
|
||||
|
||||
koji_tag = xmd['mbs'].get('koji_tag')
|
||||
koji_tag = xmd["mbs"].get("koji_tag")
|
||||
if koji_tag is None:
|
||||
log.warning("'koji_tag' is not set in xmd['mbs'] for module {}".format(nsvc))
|
||||
|
||||
# Get the ModuleBuild from DB.
|
||||
build = models.ModuleBuild.get_build_from_nsvc(
|
||||
session, name, stream, version, context)
|
||||
build = models.ModuleBuild.get_build_from_nsvc(session, name, stream, version, context)
|
||||
if build:
|
||||
msg = "Updating existing module build {}.".format(nsvc)
|
||||
log.info(msg)
|
||||
@@ -422,11 +448,11 @@ def import_mmd(session, mmd, check_buildrequires=True):
|
||||
build.stream = stream
|
||||
build.version = version
|
||||
build.koji_tag = koji_tag
|
||||
build.state = models.BUILD_STATES['ready']
|
||||
build.state = models.BUILD_STATES["ready"]
|
||||
build.modulemd = to_text_type(mmd.dumps())
|
||||
build.context = context
|
||||
build.owner = "mbs_import"
|
||||
build.rebuild_strategy = 'all'
|
||||
build.rebuild_strategy = "all"
|
||||
build.time_submitted = datetime.utcnow()
|
||||
build.time_modified = datetime.utcnow()
|
||||
build.time_completed = datetime.utcnow()
|
||||
@@ -492,15 +518,15 @@ def import_fake_base_module(nsvc):
|
||||
srpm_buildroot.add_rpm(rpm)
|
||||
mmd.add_profile(srpm_buildroot)
|
||||
|
||||
xmd = {'mbs': {}}
|
||||
xmd_mbs = xmd['mbs']
|
||||
xmd_mbs['buildrequires'] = {}
|
||||
xmd_mbs['requires'] = {}
|
||||
xmd_mbs['commit'] = 'ref_%s' % context
|
||||
xmd_mbs['mse'] = 'true'
|
||||
xmd = {"mbs": {}}
|
||||
xmd_mbs = xmd["mbs"]
|
||||
xmd_mbs["buildrequires"] = {}
|
||||
xmd_mbs["requires"] = {}
|
||||
xmd_mbs["commit"] = "ref_%s" % context
|
||||
xmd_mbs["mse"] = "true"
|
||||
# Use empty "repofile://" URI for base module. The base module will use the
|
||||
# `conf.base_module_names` list as list of default repositories.
|
||||
xmd_mbs['koji_tag'] = 'repofile://'
|
||||
xmd_mbs["koji_tag"] = "repofile://"
|
||||
mmd.set_xmd(glib.dict_values(xmd))
|
||||
|
||||
with models.make_session(conf) as session:
|
||||
@@ -513,6 +539,7 @@ def get_local_releasever():
|
||||
"""
|
||||
# Import DNF here to not force it as a hard MBS dependency.
|
||||
import dnf
|
||||
|
||||
dnf_base = dnf.Base()
|
||||
return dnf_base.conf.releasever
|
||||
|
||||
@@ -578,8 +605,8 @@ def get_mmd_from_scm(url):
|
||||
"""
|
||||
from module_build_service.utils.submit import _fetch_mmd
|
||||
|
||||
mmd, _ = _fetch_mmd(url, branch=None, allow_local_url=False,
|
||||
whitelist_url=False, mandatory_checks=False)
|
||||
mmd, _ = _fetch_mmd(
|
||||
url, branch=None, allow_local_url=False, whitelist_url=False, mandatory_checks=False)
|
||||
|
||||
return mmd
|
||||
|
||||
|
||||
@@ -60,11 +60,9 @@ def _expand_mse_streams(session, name, streams, default_streams, raise_if_stream
|
||||
if name in default_streams:
|
||||
expanded_streams = [default_streams[name]]
|
||||
elif raise_if_stream_ambigous:
|
||||
raise StreamAmbigous(
|
||||
"There are multiple streams to choose from for module %s." % name)
|
||||
raise StreamAmbigous("There are multiple streams to choose from for module %s." % name)
|
||||
else:
|
||||
builds = models.ModuleBuild.get_last_build_in_all_streams(
|
||||
session, name)
|
||||
builds = models.ModuleBuild.get_last_build_in_all_streams(session, name)
|
||||
expanded_streams = [build.stream for build in builds]
|
||||
else:
|
||||
expanded_streams = []
|
||||
@@ -79,8 +77,10 @@ def _expand_mse_streams(session, name, streams, default_streams, raise_if_stream
|
||||
if name in default_streams:
|
||||
expanded_streams = [default_streams[name]]
|
||||
elif raise_if_stream_ambigous:
|
||||
raise StreamAmbigous("There are multiple streams %r to choose from for module %s."
|
||||
% (expanded_streams, name))
|
||||
raise StreamAmbigous(
|
||||
"There are multiple streams %r to choose from for module %s."
|
||||
% (expanded_streams, name)
|
||||
)
|
||||
|
||||
return expanded_streams
|
||||
|
||||
@@ -102,23 +102,32 @@ def expand_mse_streams(session, mmd, default_streams=None, raise_if_stream_ambig
|
||||
expanded = {}
|
||||
for name, streams in deps.get_requires().items():
|
||||
streams_set = Modulemd.SimpleSet()
|
||||
streams_set.set(_expand_mse_streams(
|
||||
session, name, streams.get(), default_streams, raise_if_stream_ambigous))
|
||||
streams_set.set(
|
||||
_expand_mse_streams(
|
||||
session, name, streams.get(), default_streams, raise_if_stream_ambigous)
|
||||
)
|
||||
expanded[name] = streams_set
|
||||
deps.set_requires(expanded)
|
||||
|
||||
expanded = {}
|
||||
for name, streams in deps.get_buildrequires().items():
|
||||
streams_set = Modulemd.SimpleSet()
|
||||
streams_set.set(_expand_mse_streams(
|
||||
session, name, streams.get(), default_streams, raise_if_stream_ambigous))
|
||||
streams_set.set(
|
||||
_expand_mse_streams(
|
||||
session, name, streams.get(), default_streams, raise_if_stream_ambigous)
|
||||
)
|
||||
expanded[name] = streams_set
|
||||
deps.set_buildrequires(expanded)
|
||||
|
||||
|
||||
def _get_mmds_from_requires(requires, mmds, recursive=False,
|
||||
default_streams=None, raise_if_stream_ambigous=False,
|
||||
base_module_mmds=None):
|
||||
def _get_mmds_from_requires(
|
||||
requires,
|
||||
mmds,
|
||||
recursive=False,
|
||||
default_streams=None,
|
||||
raise_if_stream_ambigous=False,
|
||||
base_module_mmds=None,
|
||||
):
|
||||
"""
|
||||
Helper method for get_mmds_required_by_module_recursively returning
|
||||
the list of module metadata objects defined by `requires` dict.
|
||||
@@ -153,8 +162,10 @@ def _get_mmds_from_requires(requires, mmds, recursive=False,
|
||||
if name in default_streams:
|
||||
streams_to_try = [default_streams[name]]
|
||||
elif len(streams_to_try) > 1 and raise_if_stream_ambigous:
|
||||
raise StreamAmbigous("There are multiple streams %r to choose from for module %s."
|
||||
% (streams_to_try, name))
|
||||
raise StreamAmbigous(
|
||||
"There are multiple streams %r to choose from for module %s."
|
||||
% (streams_to_try, name)
|
||||
)
|
||||
|
||||
# For each valid stream, find the last build in a stream and also all
|
||||
# its contexts and add mmds of these builds to `mmds` and `added_mmds`.
|
||||
@@ -170,10 +181,12 @@ def _get_mmds_from_requires(requires, mmds, recursive=False,
|
||||
if base_module_mmds:
|
||||
for base_module_mmd in base_module_mmds:
|
||||
base_module_nsvc = ":".join([
|
||||
base_module_mmd.get_name(), base_module_mmd.get_stream(),
|
||||
str(base_module_mmd.get_version()), base_module_mmd.get_context()])
|
||||
mmds[ns] += resolver.get_buildrequired_modulemds(
|
||||
name, stream, base_module_nsvc)
|
||||
base_module_mmd.get_name(),
|
||||
base_module_mmd.get_stream(),
|
||||
str(base_module_mmd.get_version()),
|
||||
base_module_mmd.get_context(),
|
||||
])
|
||||
mmds[ns] += resolver.get_buildrequired_modulemds(name, stream, base_module_nsvc)
|
||||
else:
|
||||
mmds[ns] = resolver.get_module_modulemds(name, stream, strict=True)
|
||||
added_mmds[ns] += mmds[ns]
|
||||
@@ -262,7 +275,8 @@ def _get_base_module_mmds(mmd):
|
||||
|
||||
|
||||
def get_mmds_required_by_module_recursively(
|
||||
mmd, default_streams=None, raise_if_stream_ambigous=False):
|
||||
mmd, default_streams=None, raise_if_stream_ambigous=False
|
||||
):
|
||||
"""
|
||||
Returns the list of Module metadata objects of all modules required while
|
||||
building the module defined by `mmd` module metadata. This presumes the
|
||||
@@ -295,10 +309,11 @@ def get_mmds_required_by_module_recursively(
|
||||
# Get the MMDs of all compatible base modules based on the buildrequires.
|
||||
base_module_mmds = _get_base_module_mmds(mmd)
|
||||
if not base_module_mmds:
|
||||
base_module_choices = ' or '.join(conf.base_module_names)
|
||||
base_module_choices = " or ".join(conf.base_module_names)
|
||||
raise UnprocessableEntity(
|
||||
"None of the base module ({}) streams in the buildrequires section could be found"
|
||||
.format(base_module_choices))
|
||||
.format(base_module_choices)
|
||||
)
|
||||
|
||||
# Add base modules to `mmds`.
|
||||
for base_module in base_module_mmds:
|
||||
@@ -309,23 +324,32 @@ def get_mmds_required_by_module_recursively(
|
||||
# Get all the buildrequires of the module of interest.
|
||||
for deps in mmd.get_dependencies():
|
||||
mmds = _get_mmds_from_requires(
|
||||
deps.get_buildrequires(), mmds, False, default_streams,
|
||||
raise_if_stream_ambigous, base_module_mmds)
|
||||
deps.get_buildrequires(),
|
||||
mmds,
|
||||
False,
|
||||
default_streams,
|
||||
raise_if_stream_ambigous,
|
||||
base_module_mmds,
|
||||
)
|
||||
|
||||
# Now get the requires of buildrequires recursively.
|
||||
for mmd_key in list(mmds.keys()):
|
||||
for mmd in mmds[mmd_key]:
|
||||
for deps in mmd.get_dependencies():
|
||||
mmds = _get_mmds_from_requires(
|
||||
deps.get_requires(), mmds, True, default_streams,
|
||||
raise_if_stream_ambigous, base_module_mmds)
|
||||
deps.get_requires(),
|
||||
mmds,
|
||||
True,
|
||||
default_streams,
|
||||
raise_if_stream_ambigous,
|
||||
base_module_mmds,
|
||||
)
|
||||
|
||||
# Make single list from dict of lists.
|
||||
res = []
|
||||
for ns, mmds_list in mmds.items():
|
||||
if len(mmds_list) == 0:
|
||||
raise UnprocessableEntity(
|
||||
"Cannot find any module builds for %s" % (ns))
|
||||
raise UnprocessableEntity("Cannot find any module builds for %s" % (ns))
|
||||
res += mmds_list
|
||||
return res
|
||||
|
||||
@@ -374,7 +398,8 @@ def generate_expanded_mmds(session, mmd, raise_if_stream_ambigous=False, default
|
||||
# Show log.info message with the NSVCs we have added to mmd_resolver.
|
||||
nsvcs_to_solve = [
|
||||
":".join([m.get_name(), m.get_stream(), str(m.get_version()), str(m.get_context())])
|
||||
for m in mmds_for_resolving]
|
||||
for m in mmds_for_resolving
|
||||
]
|
||||
log.info("Starting resolving with following input modules: %r", nsvcs_to_solve)
|
||||
|
||||
# Resolve the dependencies between modules and get the list of all valid
|
||||
@@ -407,7 +432,7 @@ def generate_expanded_mmds(session, mmd, raise_if_stream_ambigous=False, default
|
||||
# Get the values for dependencies_id, self_nsvca and req_name_stream variables.
|
||||
for nsvca in requires:
|
||||
req_name, req_stream, _, req_context, req_arch = nsvca.split(":")
|
||||
if req_arch == 'src':
|
||||
if req_arch == "src":
|
||||
assert req_name == current_mmd.get_name()
|
||||
assert req_stream == current_mmd.get_stream()
|
||||
assert dependencies_id is None
|
||||
@@ -418,8 +443,9 @@ def generate_expanded_mmds(session, mmd, raise_if_stream_ambigous=False, default
|
||||
req_name_stream[req_name] = req_stream
|
||||
if dependencies_id is None or self_nsvca is None:
|
||||
raise RuntimeError(
|
||||
"%s:%s not found in requires %r" % (
|
||||
current_mmd.get_name(), current_mmd.get_stream(), requires))
|
||||
"%s:%s not found in requires %r"
|
||||
% (current_mmd.get_name(), current_mmd.get_stream(), requires)
|
||||
)
|
||||
|
||||
# The name:[streams, ...] pairs do not have to be the same in both
|
||||
# buildrequires/requires. In case they are the same, we replace the streams
|
||||
@@ -455,7 +481,7 @@ def generate_expanded_mmds(session, mmd, raise_if_stream_ambigous=False, default
|
||||
new_dep.add_buildrequires(req_name, [req_name_stream[req_name]])
|
||||
|
||||
# Set the new dependencies.
|
||||
mmd_copy.set_dependencies((new_dep, ))
|
||||
mmd_copy.set_dependencies((new_dep,))
|
||||
|
||||
# The Modulemd.Dependencies() stores only streams, but to really build this
|
||||
# module, we need NSVC of buildrequires, so we have to store this data in XMD.
|
||||
@@ -471,11 +497,11 @@ def generate_expanded_mmds(session, mmd, raise_if_stream_ambigous=False, default
|
||||
br_list.append(nsvc)
|
||||
|
||||
# Resolve the buildrequires and store the result in XMD.
|
||||
if 'mbs' not in xmd:
|
||||
xmd['mbs'] = {}
|
||||
if "mbs" not in xmd:
|
||||
xmd["mbs"] = {}
|
||||
resolver = module_build_service.resolver.system_resolver
|
||||
xmd['mbs']['buildrequires'] = resolver.resolve_requires(br_list)
|
||||
xmd['mbs']['mse'] = True
|
||||
xmd["mbs"]["buildrequires"] = resolver.resolve_requires(br_list)
|
||||
xmd["mbs"]["mse"] = True
|
||||
|
||||
mmd_copy.set_xmd(glib.dict_values(xmd))
|
||||
|
||||
|
||||
@@ -28,8 +28,7 @@ import module_build_service.messaging
|
||||
from module_build_service import log, models, conf
|
||||
|
||||
|
||||
def reuse_component(component, previous_component_build,
|
||||
change_state_now=False):
|
||||
def reuse_component(component, previous_component_build, change_state_now=False):
|
||||
"""
|
||||
Reuses component build `previous_component_build` instead of building
|
||||
component `component`
|
||||
@@ -42,8 +41,8 @@ def reuse_component(component, previous_component_build,
|
||||
|
||||
log.info(
|
||||
'Reusing component "{0}" from a previous module '
|
||||
'build with the nvr "{1}"'.format(
|
||||
component.package, previous_component_build.nvr))
|
||||
'build with the nvr "{1}"'.format(component.package, previous_component_build.nvr)
|
||||
)
|
||||
component.reused_component_id = previous_component_build.id
|
||||
component.task_id = previous_component_build.task_id
|
||||
if change_state_now:
|
||||
@@ -54,24 +53,23 @@ def reuse_component(component, previous_component_build,
|
||||
# few lines below. If we would set it to the right state right
|
||||
# here, we would miss the code path handling the KojiBuildChange
|
||||
# which works only when switching from BUILDING to COMPLETE.
|
||||
component.state = koji.BUILD_STATES['BUILDING']
|
||||
component.state_reason = \
|
||||
'Reused component from previous module build'
|
||||
component.state = koji.BUILD_STATES["BUILDING"]
|
||||
component.state_reason = "Reused component from previous module build"
|
||||
component.nvr = previous_component_build.nvr
|
||||
nvr_dict = kobo.rpmlib.parse_nvr(component.nvr)
|
||||
# Add this message to further_work so that the reused
|
||||
# component will be tagged properly
|
||||
return [
|
||||
module_build_service.messaging.KojiBuildChange(
|
||||
msg_id='reuse_component: fake msg',
|
||||
msg_id="reuse_component: fake msg",
|
||||
build_id=None,
|
||||
task_id=component.task_id,
|
||||
build_new_state=previous_component_build.state,
|
||||
build_name=nvr_dict['name'],
|
||||
build_version=nvr_dict['version'],
|
||||
build_release=nvr_dict['release'],
|
||||
build_name=nvr_dict["name"],
|
||||
build_version=nvr_dict["version"],
|
||||
build_release=nvr_dict["release"],
|
||||
module_build_id=component.module_id,
|
||||
state_reason=component.state_reason
|
||||
state_reason=component.state_reason,
|
||||
)
|
||||
]
|
||||
|
||||
@@ -90,18 +88,20 @@ def get_reusable_module(session, module):
|
||||
mmd = module.mmd()
|
||||
|
||||
# Find the latest module that is in the done or ready state
|
||||
previous_module_build = session.query(models.ModuleBuild)\
|
||||
.filter_by(name=mmd.get_name())\
|
||||
.filter_by(stream=mmd.get_stream())\
|
||||
.filter_by(state=models.BUILD_STATES["ready"])\
|
||||
.filter(models.ModuleBuild.scmurl.isnot(None))\
|
||||
.filter_by(build_context=module.build_context)\
|
||||
previous_module_build = (
|
||||
session.query(models.ModuleBuild)
|
||||
.filter_by(name=mmd.get_name())
|
||||
.filter_by(stream=mmd.get_stream())
|
||||
.filter_by(state=models.BUILD_STATES["ready"])
|
||||
.filter(models.ModuleBuild.scmurl.isnot(None))
|
||||
.filter_by(build_context=module.build_context)
|
||||
.order_by(models.ModuleBuild.time_completed.desc())
|
||||
)
|
||||
# If we are rebuilding with the "changed-and-after" option, then we can't reuse
|
||||
# components from modules that were built more liberally
|
||||
if module.rebuild_strategy == 'changed-and-after':
|
||||
if module.rebuild_strategy == "changed-and-after":
|
||||
previous_module_build = previous_module_build.filter(
|
||||
models.ModuleBuild.rebuild_strategy.in_(['all', 'changed-and-after']))
|
||||
models.ModuleBuild.rebuild_strategy.in_(["all", "changed-and-after"]))
|
||||
previous_module_build = previous_module_build.filter_by(
|
||||
ref_build_context=module.ref_build_context)
|
||||
previous_module_build = previous_module_build.first()
|
||||
@@ -139,9 +139,13 @@ def attempt_to_reuse_all_components(builder, session, module):
|
||||
if c.package == "module-build-macros":
|
||||
continue
|
||||
component_to_reuse = get_reusable_component(
|
||||
session, module, c.package,
|
||||
previous_module_build=previous_module_build, mmd=mmd,
|
||||
old_mmd=old_mmd)
|
||||
session,
|
||||
module,
|
||||
c.package,
|
||||
previous_module_build=previous_module_build,
|
||||
mmd=mmd,
|
||||
old_mmd=old_mmd,
|
||||
)
|
||||
if not component_to_reuse:
|
||||
return False
|
||||
|
||||
@@ -188,7 +192,7 @@ def get_reusable_components(session, module, component_names, previous_module_bu
|
||||
order as `component_names`
|
||||
"""
|
||||
# We support components reusing only for koji and test backend.
|
||||
if conf.system not in ['koji', 'test']:
|
||||
if conf.system not in ["koji", "test"]:
|
||||
return [None] * len(component_names)
|
||||
|
||||
if not previous_module_build:
|
||||
@@ -201,15 +205,17 @@ def get_reusable_components(session, module, component_names, previous_module_bu
|
||||
|
||||
ret = []
|
||||
for component_name in component_names:
|
||||
ret.append(get_reusable_component(
|
||||
session, module, component_name, previous_module_build, mmd,
|
||||
old_mmd))
|
||||
ret.append(
|
||||
get_reusable_component(
|
||||
session, module, component_name, previous_module_build, mmd, old_mmd)
|
||||
)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def get_reusable_component(session, module, component_name,
|
||||
previous_module_build=None, mmd=None, old_mmd=None):
|
||||
def get_reusable_component(
|
||||
session, module, component_name, previous_module_build=None, mmd=None, old_mmd=None
|
||||
):
|
||||
"""
|
||||
Returns the component (RPM) build of a module that can be reused
|
||||
instead of needing to rebuild it
|
||||
@@ -235,11 +241,11 @@ def get_reusable_component(session, module, component_name,
|
||||
"""
|
||||
|
||||
# We support component reusing only for koji and test backend.
|
||||
if conf.system not in ['koji', 'test']:
|
||||
if conf.system not in ["koji", "test"]:
|
||||
return None
|
||||
|
||||
# If the rebuild strategy is "all", that means that nothing can be reused
|
||||
if module.rebuild_strategy == 'all':
|
||||
if module.rebuild_strategy == "all":
|
||||
log.info('Cannot re-use the component because the rebuild strategy is "all".')
|
||||
return None
|
||||
|
||||
@@ -258,39 +264,46 @@ def get_reusable_component(session, module, component_name,
|
||||
# be reused
|
||||
new_module_build_component = models.ComponentBuild.from_component_name(
|
||||
session, component_name, module.id)
|
||||
if not new_module_build_component or not new_module_build_component.batch \
|
||||
or not new_module_build_component.ref:
|
||||
log.info('Cannot re-use. New component not found in the db.')
|
||||
if (
|
||||
not new_module_build_component
|
||||
or not new_module_build_component.batch
|
||||
or not new_module_build_component.ref
|
||||
):
|
||||
log.info("Cannot re-use. New component not found in the db.")
|
||||
return None
|
||||
|
||||
prev_module_build_component = models.ComponentBuild.from_component_name(
|
||||
session, component_name, previous_module_build.id)
|
||||
session, component_name, previous_module_build.id
|
||||
)
|
||||
# If the component to reuse for some reason was not found in the database,
|
||||
# or the ref is missing, something has gone wrong and the component cannot
|
||||
# be reused
|
||||
if not prev_module_build_component or not prev_module_build_component.batch\
|
||||
or not prev_module_build_component.ref:
|
||||
log.info('Cannot re-use. Previous component not found in the db.')
|
||||
if (
|
||||
not prev_module_build_component
|
||||
or not prev_module_build_component.batch
|
||||
or not prev_module_build_component.ref
|
||||
):
|
||||
log.info("Cannot re-use. Previous component not found in the db.")
|
||||
return None
|
||||
|
||||
# Make sure the ref for the component that is trying to be reused
|
||||
# hasn't changed since the last build
|
||||
if prev_module_build_component.ref != new_module_build_component.ref:
|
||||
log.info('Cannot re-use. Component commit hashes do not match.')
|
||||
log.info("Cannot re-use. Component commit hashes do not match.")
|
||||
return None
|
||||
|
||||
# At this point we've determined that both module builds contain the component
|
||||
# and the components share the same commit hash
|
||||
if module.rebuild_strategy == 'changed-and-after':
|
||||
if module.rebuild_strategy == "changed-and-after":
|
||||
# Make sure the batch number for the component that is trying to be reused
|
||||
# hasn't changed since the last build
|
||||
if prev_module_build_component.batch != new_module_build_component.batch:
|
||||
log.info('Cannot re-use. Batch numbers do not match.')
|
||||
log.info("Cannot re-use. Batch numbers do not match.")
|
||||
return None
|
||||
|
||||
# If the mmd.buildopts.macros.rpms changed, we cannot reuse
|
||||
if mmd.get_rpm_buildopts().get('macros') != old_mmd.get_rpm_buildopts().get('macros'):
|
||||
log.info('Cannot re-use. Old modulemd macros do not match the new.')
|
||||
if mmd.get_rpm_buildopts().get("macros") != old_mmd.get_rpm_buildopts().get("macros"):
|
||||
log.info("Cannot re-use. Old modulemd macros do not match the new.")
|
||||
return None
|
||||
|
||||
# At this point we've determined that both module builds contain the component
|
||||
@@ -318,32 +331,34 @@ def get_reusable_component(session, module, component_name,
|
||||
continue
|
||||
|
||||
new_module_build_components.append(set([
|
||||
(value.package, value.ref) for value in
|
||||
new_component_builds if value.batch == i + 1
|
||||
(value.package, value.ref)
|
||||
for value in new_component_builds
|
||||
if value.batch == i + 1
|
||||
]))
|
||||
|
||||
previous_module_build_components.append(set([
|
||||
(value.package, value.ref) for value in
|
||||
prev_component_builds if value.batch == i + 1
|
||||
(value.package, value.ref)
|
||||
for value in prev_component_builds
|
||||
if value.batch == i + 1
|
||||
]))
|
||||
|
||||
# If the previous batches don't have the same ordering and hashes, then the
|
||||
# component can't be reused
|
||||
if previous_module_build_components != new_module_build_components:
|
||||
log.info('Cannot re-use. Ordering or commit hashes of '
|
||||
'previous batches differ.')
|
||||
log.info("Cannot re-use. Ordering or commit hashes of previous batches differ.")
|
||||
return None
|
||||
|
||||
for pkg_name, pkg in mmd.get_rpm_components().items():
|
||||
if pkg_name not in old_mmd.get_rpm_components():
|
||||
log.info('Cannot re-use. Package lists are different.')
|
||||
log.info("Cannot re-use. Package lists are different.")
|
||||
return None
|
||||
if set(pkg.get_arches().get()) != \
|
||||
set(old_mmd.get_rpm_components()[pkg_name].get_arches().get()):
|
||||
log.info('Cannot re-use. Architectures are different for package: %s.' % pkg_name)
|
||||
if set(pkg.get_arches().get()) != set(
|
||||
old_mmd.get_rpm_components()[pkg_name].get_arches().get()
|
||||
):
|
||||
log.info("Cannot re-use. Architectures are different for package: %s." % pkg_name)
|
||||
return None
|
||||
|
||||
reusable_component = models.ComponentBuild.query.filter_by(
|
||||
package=component_name, module_id=previous_module_build.id).one()
|
||||
log.debug('Found reusable component!')
|
||||
log.debug("Found reusable component!")
|
||||
return reusable_component
|
||||
|
||||
@@ -40,8 +40,7 @@ from gi.repository import GLib
|
||||
|
||||
import module_build_service.scm
|
||||
from module_build_service import conf, db, log, models, Modulemd
|
||||
from module_build_service.errors import (
|
||||
ValidationError, UnprocessableEntity, Forbidden, Conflict)
|
||||
from module_build_service.errors import ValidationError, UnprocessableEntity, Forbidden, Conflict
|
||||
from module_build_service import glib
|
||||
from module_build_service.utils import to_text_type
|
||||
|
||||
@@ -106,18 +105,15 @@ def _scm_get_latest(pkg):
|
||||
# we want to pull from, we need to resolve that f25 branch
|
||||
# to the specific commit available at the time of
|
||||
# submission (now).
|
||||
pkgref = module_build_service.scm.SCM(
|
||||
pkg.get_repository()).get_latest(pkg.get_ref())
|
||||
pkgref = module_build_service.scm.SCM(pkg.get_repository()).get_latest(pkg.get_ref())
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
return {'error': "Failed to get the latest commit for %s#%s" % (
|
||||
pkg.get_repository(), pkg.get_ref())}
|
||||
return {
|
||||
"error": "Failed to get the latest commit for %s#%s"
|
||||
% (pkg.get_repository(), pkg.get_ref())
|
||||
}
|
||||
|
||||
return {
|
||||
'pkg_name': pkg.get_name(),
|
||||
'pkg_ref': pkgref,
|
||||
'error': None
|
||||
}
|
||||
return {"pkg_name": pkg.get_name(), "pkg_ref": pkgref, "error": None}
|
||||
|
||||
|
||||
def format_mmd(mmd, scmurl, module=None, session=None):
|
||||
@@ -136,12 +132,12 @@ def format_mmd(mmd, scmurl, module=None, session=None):
|
||||
from module_build_service.scm import SCM
|
||||
|
||||
xmd = glib.from_variant_dict(mmd.get_xmd())
|
||||
if 'mbs' not in xmd:
|
||||
xmd['mbs'] = {}
|
||||
if 'scmurl' not in xmd['mbs']:
|
||||
xmd['mbs']['scmurl'] = scmurl or ''
|
||||
if 'commit' not in xmd['mbs']:
|
||||
xmd['mbs']['commit'] = ''
|
||||
if "mbs" not in xmd:
|
||||
xmd["mbs"] = {}
|
||||
if "scmurl" not in xmd["mbs"]:
|
||||
xmd["mbs"]["scmurl"] = scmurl or ""
|
||||
if "commit" not in xmd["mbs"]:
|
||||
xmd["mbs"]["commit"] = ""
|
||||
|
||||
# If module build was submitted via yaml file, there is no scmurl
|
||||
if scmurl:
|
||||
@@ -154,35 +150,37 @@ def format_mmd(mmd, scmurl, module=None, session=None):
|
||||
else:
|
||||
full_scm_hash = scm.get_full_commit_hash()
|
||||
|
||||
xmd['mbs']['commit'] = full_scm_hash
|
||||
xmd["mbs"]["commit"] = full_scm_hash
|
||||
# If a commit hash wasn't provided then just get the latest from master
|
||||
else:
|
||||
xmd['mbs']['commit'] = scm.get_latest()
|
||||
xmd["mbs"]["commit"] = scm.get_latest()
|
||||
|
||||
if mmd.get_rpm_components() or mmd.get_module_components():
|
||||
if 'rpms' not in xmd['mbs']:
|
||||
xmd['mbs']['rpms'] = {}
|
||||
if "rpms" not in xmd["mbs"]:
|
||||
xmd["mbs"]["rpms"] = {}
|
||||
# Add missing data in RPM components
|
||||
for pkgname, pkg in mmd.get_rpm_components().items():
|
||||
# In case of resubmit of existing module which have been
|
||||
# cancelled/failed during the init state, the package
|
||||
# was maybe already handled by MBS, so skip it in this case.
|
||||
if pkgname in xmd['mbs']['rpms']:
|
||||
if pkgname in xmd["mbs"]["rpms"]:
|
||||
continue
|
||||
if pkg.get_repository() and not conf.rpms_allow_repository:
|
||||
raise Forbidden(
|
||||
"Custom component repositories aren't allowed. "
|
||||
"%r bears repository %r" % (pkgname, pkg.get_repository()))
|
||||
"%r bears repository %r" % (pkgname, pkg.get_repository())
|
||||
)
|
||||
if pkg.get_cache() and not conf.rpms_allow_cache:
|
||||
raise Forbidden(
|
||||
"Custom component caches aren't allowed. "
|
||||
"%r bears cache %r" % (pkgname, pkg.cache))
|
||||
"%r bears cache %r" % (pkgname, pkg.cache)
|
||||
)
|
||||
if not pkg.get_repository():
|
||||
pkg.set_repository(conf.rpms_default_repository + pkgname)
|
||||
if not pkg.get_cache():
|
||||
pkg.set_cache(conf.rpms_default_cache + pkgname)
|
||||
if not pkg.get_ref():
|
||||
pkg.set_ref('master')
|
||||
pkg.set_ref("master")
|
||||
if pkg.get_arches().size() == 0:
|
||||
arches = Modulemd.SimpleSet()
|
||||
arches.set(conf.arches)
|
||||
@@ -193,11 +191,12 @@ def format_mmd(mmd, scmurl, module=None, session=None):
|
||||
if mod.get_repository() and not conf.modules_allow_repository:
|
||||
raise Forbidden(
|
||||
"Custom module repositories aren't allowed. "
|
||||
"%r bears repository %r" % (modname, mod.get_repository()))
|
||||
"%r bears repository %r" % (modname, mod.get_repository())
|
||||
)
|
||||
if not mod.get_repository():
|
||||
mod.set_repository(conf.modules_default_repository + modname)
|
||||
if not mod.get_ref():
|
||||
mod.set_ref('master')
|
||||
mod.set_ref("master")
|
||||
|
||||
# Check that SCM URL is valid and replace potential branches in pkg refs
|
||||
# by real SCM hash and store the result to our private xmd place in modulemd.
|
||||
@@ -205,8 +204,10 @@ def format_mmd(mmd, scmurl, module=None, session=None):
|
||||
try:
|
||||
# Filter out the packages which we have already resolved in possible
|
||||
# previous runs of this method (can be caused by module build resubmition).
|
||||
pkgs_to_resolve = [pkg for pkg in mmd.get_rpm_components().values()
|
||||
if pkg.get_name() not in xmd['mbs']['rpms']]
|
||||
pkgs_to_resolve = [
|
||||
pkg for pkg in mmd.get_rpm_components().values()
|
||||
if pkg.get_name() not in xmd["mbs"]["rpms"]
|
||||
]
|
||||
async_result = pool.map_async(_scm_get_latest, pkgs_to_resolve)
|
||||
|
||||
# For modules with lot of components, the _scm_get_latest can take a lot of time.
|
||||
@@ -228,7 +229,7 @@ def format_mmd(mmd, scmurl, module=None, session=None):
|
||||
else:
|
||||
pkg_name = pkg_dict["pkg_name"]
|
||||
pkg_ref = pkg_dict["pkg_ref"]
|
||||
xmd['mbs']['rpms'][pkg_name] = {'ref': pkg_ref}
|
||||
xmd["mbs"]["rpms"][pkg_name] = {"ref": pkg_ref}
|
||||
if err_msg:
|
||||
raise UnprocessableEntity(err_msg)
|
||||
|
||||
@@ -251,32 +252,38 @@ def get_prefixed_version(mmd):
|
||||
for base_module in conf.base_module_names:
|
||||
# xmd is a GLib Variant and doesn't support .get() syntax
|
||||
try:
|
||||
base_module_stream = xmd['mbs']['buildrequires'].get(
|
||||
base_module, {}).get('stream')
|
||||
base_module_stream = xmd["mbs"]["buildrequires"].get(base_module, {}).get("stream")
|
||||
if base_module_stream:
|
||||
# Break after finding the first base module that is buildrequired
|
||||
break
|
||||
except KeyError:
|
||||
log.warning('The module\'s mmd is missing information in the xmd section')
|
||||
log.warning("The module's mmd is missing information in the xmd section")
|
||||
return version
|
||||
else:
|
||||
log.warning('This module does not buildrequire a base module ({0})'
|
||||
.format(' or '.join(conf.base_module_names)))
|
||||
log.warning(
|
||||
"This module does not buildrequire a base module ({0})".format(
|
||||
" or ".join(conf.base_module_names)
|
||||
)
|
||||
)
|
||||
return version
|
||||
|
||||
# The platform version (e.g. prefix1.2.0 => 010200)
|
||||
version_prefix = models.ModuleBuild.get_stream_version(base_module_stream, right_pad=False)
|
||||
|
||||
if version_prefix is None:
|
||||
log.warning('The "{0}" stream "{1}" couldn\'t be used to prefix the module\'s '
|
||||
'version'.format(base_module, base_module_stream))
|
||||
log.warning(
|
||||
'The "{0}" stream "{1}" couldn\'t be used to prefix the module\'s '
|
||||
"version".format(base_module, base_module_stream)
|
||||
)
|
||||
return version
|
||||
|
||||
# Strip the stream suffix because Modulemd requires version to be an integer
|
||||
new_version = int(str(int(math.floor(version_prefix))) + str(version))
|
||||
if new_version > GLib.MAXUINT64:
|
||||
log.warning('The "{0}" stream "{1}" caused the module\'s version prefix to be '
|
||||
'too long'.format(base_module, base_module_stream))
|
||||
log.warning(
|
||||
'The "{0}" stream "{1}" caused the module\'s version prefix to be '
|
||||
"too long".format(base_module, base_module_stream)
|
||||
)
|
||||
return version
|
||||
return new_version
|
||||
|
||||
@@ -297,13 +304,14 @@ def validate_mmd(mmd):
|
||||
if mod.get_repository() and not conf.modules_allow_repository:
|
||||
raise Forbidden(
|
||||
"Custom module repositories aren't allowed. "
|
||||
"%r bears repository %r" % (modname, mod.get_repository()))
|
||||
"%r bears repository %r" % (modname, mod.get_repository())
|
||||
)
|
||||
|
||||
name = mmd.get_name()
|
||||
xmd = mmd.get_xmd()
|
||||
if 'mbs' in xmd:
|
||||
if "mbs" in xmd:
|
||||
allowed_to_mark_disttag = name in conf.allowed_disttag_marking_module_names
|
||||
if not (xmd['mbs'].keys() == ['disttag_marking'] and allowed_to_mark_disttag):
|
||||
if not (xmd["mbs"].keys() == ["disttag_marking"] and allowed_to_mark_disttag):
|
||||
raise ValidationError('The "mbs" xmd field is reserved for MBS')
|
||||
|
||||
if name in conf.base_module_names:
|
||||
@@ -317,12 +325,12 @@ def merge_included_mmd(mmd, included_mmd):
|
||||
the `main` when it includes another module defined by `included_mmd`
|
||||
"""
|
||||
included_xmd = glib.from_variant_dict(included_mmd.get_xmd())
|
||||
if 'rpms' in included_xmd['mbs']:
|
||||
if "rpms" in included_xmd["mbs"]:
|
||||
xmd = glib.from_variant_dict(mmd.get_xmd())
|
||||
if 'rpms' not in xmd['mbs']:
|
||||
xmd['mbs']['rpms'] = included_xmd['mbs']['rpms']
|
||||
if "rpms" not in xmd["mbs"]:
|
||||
xmd["mbs"]["rpms"] = included_xmd["mbs"]["rpms"]
|
||||
else:
|
||||
xmd['mbs']['rpms'].update(included_xmd['mbs']['rpms'])
|
||||
xmd["mbs"]["rpms"].update(included_xmd["mbs"]["rpms"])
|
||||
# Set the modified xmd back to the modulemd
|
||||
mmd.set_xmd(glib.dict_values(xmd))
|
||||
|
||||
@@ -351,10 +359,10 @@ def get_module_srpm_overrides(module):
|
||||
raise ValueError("Invalid srpms list encountered: {}".format(module.srpms))
|
||||
|
||||
for source in srpms:
|
||||
if source.startswith('cli-build/') and source.endswith('.src.rpm'):
|
||||
if source.startswith("cli-build/") and source.endswith(".src.rpm"):
|
||||
# This is a custom srpm that has been uploaded to koji by rpkg
|
||||
# using the package name as the basename suffixed with .src.rpm
|
||||
rpm_name = os.path.basename(source)[:-len('.src.rpm')]
|
||||
rpm_name = os.path.basename(source)[: -len(".src.rpm")]
|
||||
else:
|
||||
# This should be a local custom srpm path
|
||||
if not os.path.exists(source):
|
||||
@@ -362,13 +370,15 @@ def get_module_srpm_overrides(module):
|
||||
# Get package name from rpm headers
|
||||
try:
|
||||
rpm_hdr = kobo.rpmlib.get_rpm_header(source)
|
||||
rpm_name = kobo.rpmlib.get_header_field(rpm_hdr, 'name').decode('utf-8')
|
||||
rpm_name = kobo.rpmlib.get_header_field(rpm_hdr, "name").decode("utf-8")
|
||||
except Exception:
|
||||
raise ValueError("Provided srpm is invalid: {}".format(source))
|
||||
|
||||
if rpm_name in overrides:
|
||||
log.warning('Encountered duplicate custom SRPM "{0}"'
|
||||
' for package {1}'.format(source, rpm_name))
|
||||
log.warning(
|
||||
'Encountered duplicate custom SRPM "{0}" for package {1}'
|
||||
.format(source, rpm_name)
|
||||
)
|
||||
continue
|
||||
|
||||
log.debug('Using custom SRPM "{0}" for package {1}'.format(source, rpm_name))
|
||||
@@ -377,8 +387,9 @@ def get_module_srpm_overrides(module):
|
||||
return overrides
|
||||
|
||||
|
||||
def record_component_builds(mmd, module, initial_batch=1,
|
||||
previous_buildorder=None, main_mmd=None, session=None):
|
||||
def record_component_builds(
|
||||
mmd, module, initial_batch=1, previous_buildorder=None, main_mmd=None, session=None
|
||||
):
|
||||
# Imported here to allow import of utils in GenericBuilder.
|
||||
import module_build_service.builder
|
||||
|
||||
@@ -394,13 +405,15 @@ def record_component_builds(mmd, module, initial_batch=1,
|
||||
if main_mmd:
|
||||
# Check for components that are in both MMDs before merging since MBS
|
||||
# currently can't handle that situation.
|
||||
duplicate_components = [rpm for rpm in main_mmd.get_rpm_components().keys()
|
||||
if rpm in mmd.get_rpm_components()]
|
||||
duplicate_components = [
|
||||
rpm for rpm in main_mmd.get_rpm_components().keys() if rpm in mmd.get_rpm_components()
|
||||
]
|
||||
if duplicate_components:
|
||||
error_msg = (
|
||||
'The included module "{0}" in "{1}" have the following '
|
||||
'conflicting components: {2}'.format(
|
||||
mmd.get_name(), main_mmd.get_name(), ', '.join(duplicate_components)))
|
||||
"conflicting components: {2}".format(
|
||||
mmd.get_name(), main_mmd.get_name(), ", ".join(duplicate_components))
|
||||
)
|
||||
raise UnprocessableEntity(error_msg)
|
||||
merge_included_mmd(main_mmd, mmd)
|
||||
else:
|
||||
@@ -417,7 +430,8 @@ def record_component_builds(mmd, module, initial_batch=1,
|
||||
srpm_overrides = get_module_srpm_overrides(module)
|
||||
|
||||
rpm_weights = module_build_service.builder.GenericBuilder.get_build_weights(
|
||||
[c.get_name() for c in rpm_components])
|
||||
[c.get_name() for c in rpm_components]
|
||||
)
|
||||
all_components.sort(key=lambda x: x.get_buildorder())
|
||||
# We do not start with batch = 0 here, because the first batch is
|
||||
# reserved for module-build-macros. First real components must be
|
||||
@@ -439,33 +453,35 @@ def record_component_builds(mmd, module, initial_batch=1,
|
||||
# It is OK to whitelist all URLs here, because the validity
|
||||
# of every URL have been already checked in format_mmd(...).
|
||||
included_mmd = _fetch_mmd(full_url, whitelist_url=True)[0]
|
||||
batch = record_component_builds(included_mmd, module, batch,
|
||||
previous_buildorder, main_mmd, session=session)
|
||||
batch = record_component_builds(
|
||||
included_mmd, module, batch, previous_buildorder, main_mmd, session=session)
|
||||
continue
|
||||
|
||||
package = component.get_name()
|
||||
if package in srpm_overrides:
|
||||
component_ref = None
|
||||
full_url = srpm_overrides[package]
|
||||
log.info('Building custom SRPM "{0}"'
|
||||
' for package {1}'.format(full_url, package))
|
||||
log.info('Building custom SRPM "{0}"' " for package {1}".format(full_url, package))
|
||||
else:
|
||||
component_ref = mmd.get_xmd()['mbs']['rpms'][package]['ref']
|
||||
component_ref = mmd.get_xmd()["mbs"]["rpms"][package]["ref"]
|
||||
full_url = component.get_repository() + "?#" + component_ref
|
||||
|
||||
# Skip the ComponentBuild if it already exists in database. This can happen
|
||||
# in case of module build resubmition.
|
||||
existing_build = models.ComponentBuild.from_component_name(
|
||||
db.session, package, module.id)
|
||||
existing_build = models.ComponentBuild.from_component_name(db.session, package, module.id)
|
||||
if existing_build:
|
||||
# Check that the existing build has the same most important attributes.
|
||||
# This should never be a problem, but it's good to be defensive here so
|
||||
# we do not mess things during resubmition.
|
||||
if (existing_build.batch != batch or existing_build.scmurl != full_url or
|
||||
existing_build.ref != component_ref):
|
||||
if (
|
||||
existing_build.batch != batch
|
||||
or existing_build.scmurl != full_url
|
||||
or existing_build.ref != component_ref
|
||||
):
|
||||
raise ValidationError(
|
||||
"Module build %s already exists in database, but its attributes "
|
||||
" are different from resubmitted one." % component.get_name())
|
||||
" are different from resubmitted one." % component.get_name()
|
||||
)
|
||||
continue
|
||||
|
||||
build = models.ComponentBuild(
|
||||
@@ -475,7 +491,7 @@ def record_component_builds(mmd, module, initial_batch=1,
|
||||
scmurl=full_url,
|
||||
batch=batch,
|
||||
ref=component_ref,
|
||||
weight=rpm_weights[package]
|
||||
weight=rpm_weights[package],
|
||||
)
|
||||
session.add(build)
|
||||
|
||||
@@ -486,12 +502,13 @@ def submit_module_build_from_yaml(username, handle, params, stream=None, skiptes
|
||||
yaml_file = to_text_type(handle.read())
|
||||
mmd = load_mmd(yaml_file)
|
||||
dt = datetime.utcfromtimestamp(int(time.time()))
|
||||
if hasattr(handle, 'filename'):
|
||||
if hasattr(handle, "filename"):
|
||||
def_name = str(os.path.splitext(os.path.basename(handle.filename))[0])
|
||||
elif not mmd.get_name():
|
||||
raise ValidationError(
|
||||
"The module's name was not present in the modulemd file. Please use the "
|
||||
"\"module_name\" parameter")
|
||||
'"module_name" parameter'
|
||||
)
|
||||
def_version = int(dt.strftime("%Y%m%d%H%M%S"))
|
||||
mmd.set_name(mmd.get_name() or def_name)
|
||||
mmd.set_stream(stream or mmd.get_stream() or "master")
|
||||
@@ -507,12 +524,11 @@ _url_check_re = re.compile(r"^[^:/]+:.*$")
|
||||
|
||||
|
||||
def submit_module_build_from_scm(username, params, allow_local_url=False):
|
||||
url = params['scmurl']
|
||||
branch = params['branch']
|
||||
url = params["scmurl"]
|
||||
branch = params["branch"]
|
||||
# Translate local paths into file:// URL
|
||||
if allow_local_url and not _url_check_re.match(url):
|
||||
log.info(
|
||||
"'{}' is not a valid URL, assuming local path".format(url))
|
||||
log.info("'{}' is not a valid URL, assuming local path".format(url))
|
||||
url = os.path.abspath(url)
|
||||
url = "file://" + url
|
||||
mmd, scm = _fetch_mmd(url, branch, allow_local_url)
|
||||
@@ -529,27 +545,29 @@ def _apply_dep_overrides(mmd, params):
|
||||
:raises ValidationError: if one of the overrides doesn't apply
|
||||
"""
|
||||
dep_overrides = {
|
||||
'buildrequires': copy.copy(params.get('buildrequire_overrides', {})),
|
||||
'requires': copy.copy(params.get('require_overrides', {}))
|
||||
"buildrequires": copy.copy(params.get("buildrequire_overrides", {})),
|
||||
"requires": copy.copy(params.get("require_overrides", {})),
|
||||
}
|
||||
|
||||
# Parse the module's branch to determine if it should override the stream of the buildrequired
|
||||
# module defined in conf.br_stream_override_module
|
||||
branch_search = None
|
||||
if params.get('branch') and conf.br_stream_override_module and conf.br_stream_override_regexes:
|
||||
if params.get("branch") and conf.br_stream_override_module and conf.br_stream_override_regexes:
|
||||
# Only parse the branch for a buildrequire override if the user didn't manually specify an
|
||||
# override for the module specified in conf.br_stream_override_module
|
||||
if not dep_overrides['buildrequires'].get(conf.br_stream_override_module):
|
||||
if not dep_overrides["buildrequires"].get(conf.br_stream_override_module):
|
||||
branch_search = None
|
||||
for regex in conf.br_stream_override_regexes:
|
||||
branch_search = re.search(regex, params['branch'])
|
||||
branch_search = re.search(regex, params["branch"])
|
||||
if branch_search:
|
||||
log.debug(
|
||||
'The stream override regex `%s` matched the branch %s',
|
||||
regex, params['branch'])
|
||||
"The stream override regex `%s` matched the branch %s",
|
||||
regex,
|
||||
params["branch"],
|
||||
)
|
||||
break
|
||||
else:
|
||||
log.debug('No stream override regexes matched the branch "%s"', params['branch'])
|
||||
log.debug('No stream override regexes matched the branch "%s"', params["branch"])
|
||||
|
||||
# If a stream was parsed from the branch, then add it as a stream override for the module
|
||||
# specified in conf.br_stream_override_module
|
||||
@@ -557,21 +575,23 @@ def _apply_dep_overrides(mmd, params):
|
||||
# Concatenate all the groups that are not None together to get the desired stream.
|
||||
# This approach is taken in case there are sections to ignore.
|
||||
# For instance, if we need to parse `el8.0.0` from `rhel-8.0.0`.
|
||||
parsed_stream = ''.join(group for group in branch_search.groups() if group)
|
||||
parsed_stream = "".join(group for group in branch_search.groups() if group)
|
||||
if parsed_stream:
|
||||
dep_overrides['buildrequires'][conf.br_stream_override_module] = [parsed_stream]
|
||||
dep_overrides["buildrequires"][conf.br_stream_override_module] = [parsed_stream]
|
||||
log.info(
|
||||
'The buildrequired stream of "%s" was overriden with "%s" based on the branch "%s"',
|
||||
conf.br_stream_override_module, parsed_stream, params['branch'])
|
||||
conf.br_stream_override_module, parsed_stream, params["branch"],
|
||||
)
|
||||
else:
|
||||
log.warning(
|
||||
('The regex `%s` only matched empty capture groups on the branch "%s". The regex '
|
||||
'is invalid and should be rewritten.'),
|
||||
regex, params['branch'])
|
||||
'The regex `%s` only matched empty capture groups on the branch "%s". The regex is '
|
||||
" invalid and should be rewritten.",
|
||||
regex, params["branch"],
|
||||
)
|
||||
|
||||
unused_dep_overrides = {
|
||||
'buildrequires': set(dep_overrides['buildrequires'].keys()),
|
||||
'requires': set(dep_overrides['requires'].keys())
|
||||
"buildrequires": set(dep_overrides["buildrequires"].keys()),
|
||||
"requires": set(dep_overrides["requires"].keys()),
|
||||
}
|
||||
|
||||
deps = mmd.get_dependencies()
|
||||
@@ -579,7 +599,7 @@ def _apply_dep_overrides(mmd, params):
|
||||
for dep_type, overrides in dep_overrides.items():
|
||||
overridden = False
|
||||
# Get the existing streams (e.g. dep.get_buildrequires())
|
||||
reqs = getattr(dep, 'get_' + dep_type)()
|
||||
reqs = getattr(dep, "get_" + dep_type)()
|
||||
for name, streams in dep_overrides[dep_type].items():
|
||||
if name in reqs:
|
||||
reqs[name].set(streams)
|
||||
@@ -587,7 +607,7 @@ def _apply_dep_overrides(mmd, params):
|
||||
overridden = True
|
||||
if overridden:
|
||||
# Set the overridden streams (e.g. dep.set_buildrequires(reqs))
|
||||
getattr(dep, 'set_' + dep_type)(reqs)
|
||||
getattr(dep, "set_" + dep_type)(reqs)
|
||||
|
||||
for dep_type in unused_dep_overrides.keys():
|
||||
# If a stream override was applied from parsing the branch and it wasn't applicable,
|
||||
@@ -596,8 +616,9 @@ def _apply_dep_overrides(mmd, params):
|
||||
unused_dep_overrides[dep_type].remove(conf.br_stream_override_module)
|
||||
if unused_dep_overrides[dep_type]:
|
||||
raise ValidationError(
|
||||
'The {} overrides for the following modules aren\'t applicable: {}'
|
||||
.format(dep_type[:-1], ', '.join(sorted(unused_dep_overrides[dep_type]))))
|
||||
"The {} overrides for the following modules aren't applicable: {}".format(
|
||||
dep_type[:-1], ", ".join(sorted(unused_dep_overrides[dep_type])))
|
||||
)
|
||||
|
||||
mmd.set_dependencies(deps)
|
||||
|
||||
@@ -624,7 +645,7 @@ def _handle_base_module_virtual_stream_br(mmd):
|
||||
for i, stream in enumerate(streams):
|
||||
# Ignore streams that start with a minus sign, since those are handled in the
|
||||
# MSE code
|
||||
if stream.startswith('-'):
|
||||
if stream.startswith("-"):
|
||||
continue
|
||||
|
||||
# Check if the base module stream is available
|
||||
@@ -636,30 +657,25 @@ def _handle_base_module_virtual_stream_br(mmd):
|
||||
# If the base module stream is not available, check if there's a virtual stream
|
||||
log.debug(
|
||||
'Checking to see if there is a base module "%s" with the virtual stream "%s"',
|
||||
base_module,
|
||||
stream
|
||||
base_module, stream,
|
||||
)
|
||||
base_module_mmd = system_resolver.get_latest_with_virtual_stream(
|
||||
name=base_module, virtual_stream=stream)
|
||||
name=base_module, virtual_stream=stream
|
||||
)
|
||||
if not base_module_mmd:
|
||||
# If there isn't this base module stream or virtual stream available, skip it,
|
||||
# and let the dep solving code deal with it like it normally would
|
||||
log.warning(
|
||||
'There is no base module "%s" with stream/virtual stream "%s"',
|
||||
base_module,
|
||||
stream
|
||||
base_module, stream,
|
||||
)
|
||||
continue
|
||||
|
||||
latest_stream = base_module_mmd.get_stream()
|
||||
log.info(
|
||||
('Replacing the buildrequire "%s:%s" with "%s:%s", since "%s" is a virtual '
|
||||
'stream'),
|
||||
base_module,
|
||||
stream,
|
||||
base_module,
|
||||
latest_stream,
|
||||
stream
|
||||
'Replacing the buildrequire "%s:%s" with "%s:%s", since "%s" is a virtual '
|
||||
"stream",
|
||||
base_module, stream, base_module, latest_stream, stream
|
||||
)
|
||||
new_streams[i] = latest_stream
|
||||
overridden = True
|
||||
@@ -687,9 +703,13 @@ def submit_module_build(username, mmd, params):
|
||||
import koji # Placed here to avoid py2/py3 conflicts...
|
||||
from .mse import generate_expanded_mmds
|
||||
|
||||
log.debug('Submitted %s module build for %s:%s:%s',
|
||||
("scratch" if params.get('scratch', False) else "normal"),
|
||||
mmd.get_name(), mmd.get_stream(), mmd.get_version())
|
||||
log.debug(
|
||||
"Submitted %s module build for %s:%s:%s",
|
||||
("scratch" if params.get("scratch", False) else "normal"),
|
||||
mmd.get_name(),
|
||||
mmd.get_stream(),
|
||||
mmd.get_version(),
|
||||
)
|
||||
validate_mmd(mmd)
|
||||
|
||||
raise_if_stream_ambigous = False
|
||||
@@ -706,8 +726,10 @@ def submit_module_build(username, mmd, params):
|
||||
|
||||
mmds = generate_expanded_mmds(db.session, mmd, raise_if_stream_ambigous, default_streams)
|
||||
if not mmds:
|
||||
raise ValidationError('No dependency combination was satisfied. Please verify the '
|
||||
'buildrequires in your modulemd have previously been built.')
|
||||
raise ValidationError(
|
||||
"No dependency combination was satisfied. Please verify the "
|
||||
"buildrequires in your modulemd have previously been built."
|
||||
)
|
||||
modules = []
|
||||
|
||||
# True if all module builds are skipped so MBS will actually not rebuild
|
||||
@@ -722,52 +744,57 @@ def submit_module_build(username, mmd, params):
|
||||
version_str = str(version)
|
||||
nsvc = ":".join([mmd.get_name(), mmd.get_stream(), version_str, mmd.get_context()])
|
||||
|
||||
log.debug('Checking whether module build already exists: %s.', nsvc)
|
||||
log.debug("Checking whether module build already exists: %s.", nsvc)
|
||||
module = models.ModuleBuild.get_build_from_nsvc(
|
||||
db.session, mmd.get_name(), mmd.get_stream(), version_str, mmd.get_context())
|
||||
if module and not params.get('scratch', False):
|
||||
if module.state != models.BUILD_STATES['failed']:
|
||||
log.info("Skipping rebuild of %s, only rebuild of modules in failed state "
|
||||
"is allowed.", nsvc)
|
||||
if module and not params.get("scratch", False):
|
||||
if module.state != models.BUILD_STATES["failed"]:
|
||||
log.info(
|
||||
"Skipping rebuild of %s, only rebuild of modules in failed state is allowed.",
|
||||
nsvc,
|
||||
)
|
||||
modules.append(module)
|
||||
continue
|
||||
|
||||
rebuild_strategy = params.get('rebuild_strategy')
|
||||
rebuild_strategy = params.get("rebuild_strategy")
|
||||
if rebuild_strategy and module.rebuild_strategy != rebuild_strategy:
|
||||
raise ValidationError(
|
||||
'You cannot change the module\'s "rebuild_strategy" when '
|
||||
'resuming a module build')
|
||||
"resuming a module build"
|
||||
)
|
||||
|
||||
log.debug('Resuming existing module build %r' % module)
|
||||
log.debug("Resuming existing module build %r" % module)
|
||||
# Reset all component builds that didn't complete
|
||||
for component in module.component_builds:
|
||||
if component.state and component.state != koji.BUILD_STATES['COMPLETE']:
|
||||
if component.state and component.state != koji.BUILD_STATES["COMPLETE"]:
|
||||
component.state = None
|
||||
component.state_reason = None
|
||||
db.session.add(component)
|
||||
module.username = username
|
||||
prev_state = module.previous_non_failed_state
|
||||
if prev_state == models.BUILD_STATES['init']:
|
||||
transition_to = models.BUILD_STATES['init']
|
||||
if prev_state == models.BUILD_STATES["init"]:
|
||||
transition_to = models.BUILD_STATES["init"]
|
||||
else:
|
||||
transition_to = models.BUILD_STATES['wait']
|
||||
transition_to = models.BUILD_STATES["wait"]
|
||||
module.batch = 0
|
||||
module.transition(conf, transition_to, "Resubmitted by %s" % username)
|
||||
log.info("Resumed existing module build in previous state %s" % module.state)
|
||||
else:
|
||||
# make NSVC unique for every scratch build
|
||||
context_suffix = ''
|
||||
if params.get('scratch', False):
|
||||
log.debug('Checking for existing scratch module builds by NSVC')
|
||||
context_suffix = ""
|
||||
if params.get("scratch", False):
|
||||
log.debug("Checking for existing scratch module builds by NSVC")
|
||||
scrmods = models.ModuleBuild.get_scratch_builds_from_nsvc(
|
||||
db.session, mmd.get_name(), mmd.get_stream(), version_str, mmd.get_context())
|
||||
scrmod_contexts = [scrmod.context for scrmod in scrmods]
|
||||
log.debug('Found %d previous scratch module build context(s): %s',
|
||||
len(scrmods), ",".join(scrmod_contexts))
|
||||
log.debug(
|
||||
"Found %d previous scratch module build context(s): %s",
|
||||
len(scrmods), ",".join(scrmod_contexts),
|
||||
)
|
||||
# append incrementing counter to context
|
||||
context_suffix = '_' + str(len(scrmods) + 1)
|
||||
context_suffix = "_" + str(len(scrmods) + 1)
|
||||
mmd.set_context(mmd.get_context() + context_suffix)
|
||||
log.debug('Creating new module build')
|
||||
log.debug("Creating new module build")
|
||||
module = models.ModuleBuild.create(
|
||||
db.session,
|
||||
conf,
|
||||
@@ -775,27 +802,35 @@ def submit_module_build(username, mmd, params):
|
||||
stream=mmd.get_stream(),
|
||||
version=version_str,
|
||||
modulemd=to_text_type(mmd.dumps()),
|
||||
scmurl=params.get('scmurl'),
|
||||
scmurl=params.get("scmurl"),
|
||||
username=username,
|
||||
rebuild_strategy=params.get('rebuild_strategy'),
|
||||
scratch=params.get('scratch'),
|
||||
srpms=params.get('srpms')
|
||||
rebuild_strategy=params.get("rebuild_strategy"),
|
||||
scratch=params.get("scratch"),
|
||||
srpms=params.get("srpms"),
|
||||
)
|
||||
(module.ref_build_context, module.build_context, module.runtime_context,
|
||||
module.context) = module.contexts_from_mmd(module.modulemd)
|
||||
(
|
||||
module.ref_build_context,
|
||||
module.build_context,
|
||||
module.runtime_context,
|
||||
module.context,
|
||||
) = module.contexts_from_mmd(module.modulemd)
|
||||
module.context += context_suffix
|
||||
|
||||
all_modules_skipped = False
|
||||
db.session.add(module)
|
||||
db.session.commit()
|
||||
modules.append(module)
|
||||
log.info("%s submitted build of %s, stream=%s, version=%s, context=%s", username,
|
||||
mmd.get_name(), mmd.get_stream(), version_str, mmd.get_context())
|
||||
log.info(
|
||||
"%s submitted build of %s, stream=%s, version=%s, context=%s",
|
||||
username, mmd.get_name(), mmd.get_stream(), version_str, mmd.get_context()
|
||||
)
|
||||
|
||||
if all_modules_skipped:
|
||||
err_msg = ('Module (state=%s) already exists. Only a new build, resubmission of '
|
||||
'a failed build or build against new buildrequirements is '
|
||||
'allowed.' % module.state)
|
||||
err_msg = (
|
||||
"Module (state=%s) already exists. Only a new build, resubmission of "
|
||||
"a failed build or build against new buildrequirements is "
|
||||
"allowed." % module.state
|
||||
)
|
||||
log.error(err_msg)
|
||||
raise Conflict(err_msg)
|
||||
|
||||
@@ -805,25 +840,24 @@ def submit_module_build(username, mmd, params):
|
||||
def _is_eol_in_pdc(name, stream):
|
||||
""" Check PDC if the module name:stream is no longer active. """
|
||||
|
||||
params = {'type': 'module', 'global_component': name, 'name': stream}
|
||||
url = conf.pdc_url + '/component-branches/'
|
||||
params = {"type": "module", "global_component": name, "name": stream}
|
||||
url = conf.pdc_url + "/component-branches/"
|
||||
|
||||
response = requests.get(url, params=params)
|
||||
if not response:
|
||||
raise ValidationError("Failed to talk to PDC {}{}".format(response, response.text))
|
||||
|
||||
data = response.json()
|
||||
results = data['results']
|
||||
results = data["results"]
|
||||
if not results:
|
||||
raise ValidationError("No such module {}:{} found at {}".format(
|
||||
name, stream, response.request.url))
|
||||
raise ValidationError(
|
||||
"No such module {}:{} found at {}".format(name, stream, response.request.url))
|
||||
|
||||
# If the module is active, then it is not EOL and vice versa.
|
||||
return not results[0]['active']
|
||||
return not results[0]["active"]
|
||||
|
||||
|
||||
def _fetch_mmd(url, branch=None, allow_local_url=False, whitelist_url=False,
|
||||
mandatory_checks=True):
|
||||
def _fetch_mmd(url, branch=None, allow_local_url=False, whitelist_url=False, mandatory_checks=True):
|
||||
# Import it here, because SCM uses utils methods
|
||||
# and fails to import them because of dep-chain.
|
||||
import module_build_service.scm
|
||||
@@ -831,7 +865,7 @@ def _fetch_mmd(url, branch=None, allow_local_url=False, whitelist_url=False,
|
||||
td = None
|
||||
scm = None
|
||||
try:
|
||||
log.debug('Verifying modulemd')
|
||||
log.debug("Verifying modulemd")
|
||||
td = tempfile.mkdtemp()
|
||||
if whitelist_url:
|
||||
scm = module_build_service.scm.SCM(url, branch, [url], allow_local_url)
|
||||
@@ -847,14 +881,12 @@ def _fetch_mmd(url, branch=None, allow_local_url=False, whitelist_url=False,
|
||||
if td is not None:
|
||||
shutil.rmtree(td)
|
||||
except Exception as e:
|
||||
log.warning(
|
||||
"Failed to remove temporary directory {!r}: {}".format(
|
||||
td, str(e)))
|
||||
log.warning("Failed to remove temporary directory {!r}: {}".format(td, str(e)))
|
||||
|
||||
if conf.check_for_eol:
|
||||
if _is_eol_in_pdc(scm.name, scm.branch):
|
||||
raise ValidationError(
|
||||
'Module {}:{} is marked as EOL in PDC.'.format(scm.name, scm.branch))
|
||||
"Module {}:{} is marked as EOL in PDC.".format(scm.name, scm.branch))
|
||||
|
||||
if not mandatory_checks:
|
||||
return mmd, scm
|
||||
@@ -863,8 +895,8 @@ def _fetch_mmd(url, branch=None, allow_local_url=False, whitelist_url=False,
|
||||
# says it should be
|
||||
if mmd.get_name() and mmd.get_name() != scm.name:
|
||||
if not conf.allow_name_override_from_scm:
|
||||
raise ValidationError('The name "{0}" that is stored in the modulemd '
|
||||
'is not valid'.format(mmd.get_name()))
|
||||
raise ValidationError(
|
||||
'The name "{0}" that is stored in the modulemd is not valid'.format(mmd.get_name()))
|
||||
else:
|
||||
mmd.set_name(scm.name)
|
||||
|
||||
@@ -872,19 +904,20 @@ def _fetch_mmd(url, branch=None, allow_local_url=False, whitelist_url=False,
|
||||
# branch is
|
||||
if mmd.get_stream() and mmd.get_stream() != scm.branch:
|
||||
if not conf.allow_stream_override_from_scm:
|
||||
raise ValidationError('The stream "{0}" that is stored in the modulemd '
|
||||
'does not match the branch "{1}"'.format(
|
||||
mmd.get_stream(), scm.branch))
|
||||
raise ValidationError(
|
||||
'The stream "{0}" that is stored in the modulemd does not match the branch "{1}"'
|
||||
.format(mmd.get_stream(), scm.branch)
|
||||
)
|
||||
else:
|
||||
mmd.set_stream(scm.branch)
|
||||
|
||||
# If the version is in the modulemd, throw an exception since the version
|
||||
# since the version is generated by MBS
|
||||
if mmd.get_version():
|
||||
raise ValidationError('The version "{0}" is already defined in the '
|
||||
'modulemd but it shouldn\'t be since the version '
|
||||
'is generated based on the commit time'.format(
|
||||
mmd.get_version()))
|
||||
raise ValidationError(
|
||||
'The version "{0}" is already defined in the modulemd but it shouldn\'t be since the '
|
||||
"version is generated based on the commit time".format(mmd.get_version())
|
||||
)
|
||||
else:
|
||||
mmd.set_version(int(scm.version))
|
||||
|
||||
@@ -901,18 +934,17 @@ def load_mmd(yaml, is_file=False):
|
||||
mmd.upgrade()
|
||||
except Exception:
|
||||
if is_file:
|
||||
error = 'The modulemd {} is invalid. Please verify the syntax is correct'.format(
|
||||
os.path.basename(yaml)
|
||||
)
|
||||
error = "The modulemd {} is invalid. Please verify the syntax is correct".format(
|
||||
os.path.basename(yaml))
|
||||
if os.path.exists(yaml):
|
||||
with open(yaml, 'rt') as yaml_hdl:
|
||||
log.debug('Modulemd content:\n%s', yaml_hdl.read())
|
||||
with open(yaml, "rt") as yaml_hdl:
|
||||
log.debug("Modulemd content:\n%s", yaml_hdl.read())
|
||||
else:
|
||||
error = 'The modulemd file {} not found!'.format(os.path.basename(yaml))
|
||||
log.error('The modulemd file %s not found!', yaml)
|
||||
error = "The modulemd file {} not found!".format(os.path.basename(yaml))
|
||||
log.error("The modulemd file %s not found!", yaml)
|
||||
else:
|
||||
error = 'The modulemd is invalid. Please verify the syntax is correct.'
|
||||
log.debug('Modulemd content:\n%s', yaml)
|
||||
error = "The modulemd is invalid. Please verify the syntax is correct."
|
||||
log.debug("Modulemd content:\n%s", yaml)
|
||||
log.exception(error)
|
||||
raise UnprocessableEntity(error)
|
||||
|
||||
@@ -943,7 +975,7 @@ def load_local_builds(local_build_nsvs, session=None):
|
||||
builds = []
|
||||
try:
|
||||
for d in os.listdir(conf.mock_resultsdir):
|
||||
m = re.match('^module-(.*)-([^-]*)-([0-9]+)$', d)
|
||||
m = re.match("^module-(.*)-([^-]*)-([0-9]+)$", d)
|
||||
if m:
|
||||
builds.append((m.group(1), m.group(2), int(m.group(3)), d))
|
||||
except OSError:
|
||||
@@ -958,11 +990,12 @@ def load_local_builds(local_build_nsvs, session=None):
|
||||
builds.sort(key=lambda a: a[2], reverse=True)
|
||||
|
||||
for nsv in local_build_nsvs:
|
||||
parts = nsv.split(':')
|
||||
parts = nsv.split(":")
|
||||
if len(parts) < 1 or len(parts) > 3:
|
||||
raise RuntimeError(
|
||||
'The local build "{0}" couldn\'t be be parsed into '
|
||||
'NAME[:STREAM[:VERSION]]'.format(nsv))
|
||||
'The local build "{0}" couldn\'t be be parsed into NAME[:STREAM[:VERSION]]'
|
||||
.format(nsv)
|
||||
)
|
||||
|
||||
name = parts[0]
|
||||
stream = parts[1] if len(parts) > 1 else None
|
||||
@@ -983,11 +1016,12 @@ def load_local_builds(local_build_nsvs, session=None):
|
||||
if not found_build:
|
||||
raise RuntimeError(
|
||||
'The local build "{0}" couldn\'t be found in "{1}"'.format(
|
||||
nsv, conf.mock_resultsdir))
|
||||
nsv, conf.mock_resultsdir)
|
||||
)
|
||||
|
||||
# Load the modulemd metadata.
|
||||
path = os.path.join(conf.mock_resultsdir, found_build[3], 'results')
|
||||
mmd = load_mmd(os.path.join(path, 'modules.yaml'), is_file=True)
|
||||
path = os.path.join(conf.mock_resultsdir, found_build[3], "results")
|
||||
mmd = load_mmd(os.path.join(path, "modules.yaml"), is_file=True)
|
||||
|
||||
# Create ModuleBuild in database.
|
||||
module = models.ModuleBuild.create(
|
||||
@@ -1000,14 +1034,19 @@ def load_local_builds(local_build_nsvs, session=None):
|
||||
modulemd=to_text_type(mmd.dumps()),
|
||||
scmurl="",
|
||||
username="mbs",
|
||||
publish_msg=False)
|
||||
publish_msg=False,
|
||||
)
|
||||
module.koji_tag = path
|
||||
module.state = models.BUILD_STATES['ready']
|
||||
module.state = models.BUILD_STATES["ready"]
|
||||
session.commit()
|
||||
|
||||
if (found_build[0] != module.name or found_build[1] != module.stream or
|
||||
str(found_build[2]) != module.version):
|
||||
if (
|
||||
found_build[0] != module.name
|
||||
or found_build[1] != module.stream
|
||||
or str(found_build[2]) != module.version
|
||||
):
|
||||
raise RuntimeError(
|
||||
'Parsed metadata results for "{0}" don\'t match the directory name'
|
||||
.format(found_build[3]))
|
||||
'Parsed metadata results for "{0}" don\'t match the directory name'.format(
|
||||
found_build[3])
|
||||
)
|
||||
log.info("Loaded local module build %r", module)
|
||||
|
||||
@@ -63,22 +63,25 @@ def find_build_tags_from_external_repos(koji_session, repo_infos):
|
||||
:return: a list of tag names.
|
||||
:rtype: list[str]
|
||||
"""
|
||||
re_external_repo_url = r'^{}/repos/(.+-build)/latest/\$arch/?$'.format(
|
||||
conf.koji_external_repo_url_prefix.rstrip('/'))
|
||||
re_external_repo_url = r"^{}/repos/(.+-build)/latest/\$arch/?$".format(
|
||||
conf.koji_external_repo_url_prefix.rstrip("/"))
|
||||
tag_names = []
|
||||
for info in repo_infos:
|
||||
match = re.match(re_external_repo_url, info['url'])
|
||||
match = re.match(re_external_repo_url, info["url"])
|
||||
if match:
|
||||
name = match.groups()[0]
|
||||
if koji_session.getTag(name) is None:
|
||||
log.warning('Ignoring the found tag %s because no tag info was found '
|
||||
'with this name.', name)
|
||||
log.warning(
|
||||
"Ignoring the found tag %s because no tag info was found with this name.",
|
||||
name,
|
||||
)
|
||||
else:
|
||||
tag_names.append(name)
|
||||
else:
|
||||
log.warning('The build tag could not be parsed from external repo '
|
||||
'%s whose url is %s.',
|
||||
info['external_repo_name'], info['url'])
|
||||
log.warning(
|
||||
"The build tag could not be parsed from external repo %s whose url is %s.",
|
||||
info["external_repo_name"], info["url"],
|
||||
)
|
||||
return tag_names
|
||||
|
||||
|
||||
@@ -98,8 +101,9 @@ def find_module_koji_tags(koji_session, build_tag):
|
||||
:rtype: list[str]
|
||||
"""
|
||||
return [
|
||||
data['name'] for data in koji_session.getFullInheritance(build_tag)
|
||||
if any(data['name'].startswith(prefix) for prefix in conf.koji_tag_prefixes)
|
||||
data["name"]
|
||||
for data in koji_session.getFullInheritance(build_tag)
|
||||
if any(data["name"].startswith(prefix) for prefix in conf.koji_tag_prefixes)
|
||||
]
|
||||
|
||||
|
||||
@@ -126,11 +130,12 @@ def get_modulemds_from_ursine_content(tag):
|
||||
:rtype: list[Modulemd.Module]
|
||||
"""
|
||||
from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder
|
||||
|
||||
koji_session = KojiModuleBuilder.get_session(conf, login=False)
|
||||
repos = koji_session.getExternalRepoList(tag)
|
||||
build_tags = find_build_tags_from_external_repos(koji_session, repos)
|
||||
if not build_tags:
|
||||
log.debug('No external repo containing ursine content is found.')
|
||||
log.debug("No external repo containing ursine content is found.")
|
||||
return []
|
||||
modulemds = []
|
||||
for tag in build_tags:
|
||||
@@ -140,7 +145,7 @@ def get_modulemds_from_ursine_content(tag):
|
||||
if md:
|
||||
modulemds.append(md)
|
||||
else:
|
||||
log.warning('No module is found by koji_tag \'%s\'', koji_tag)
|
||||
log.warning("No module is found by koji_tag '%s'", koji_tag)
|
||||
return modulemds
|
||||
|
||||
|
||||
@@ -161,7 +166,7 @@ def find_stream_collision_modules(buildrequired_modules, koji_tag):
|
||||
"""
|
||||
ursine_modulemds = get_modulemds_from_ursine_content(koji_tag)
|
||||
if not ursine_modulemds:
|
||||
log.debug('No module metadata is found from ursine content.')
|
||||
log.debug("No module metadata is found from ursine content.")
|
||||
return []
|
||||
|
||||
collision_modules = [
|
||||
@@ -171,15 +176,19 @@ def find_stream_collision_modules(buildrequired_modules, koji_tag):
|
||||
# different stream, that is what we want to record here, whose RPMs will be
|
||||
# excluded from buildroot by adding them into SRPM module-build-macros as
|
||||
# Conflicts.
|
||||
if (item.get_name() in buildrequired_modules and
|
||||
item.get_stream() != buildrequired_modules[item.get_name()]['stream'])
|
||||
if (
|
||||
item.get_name() in buildrequired_modules
|
||||
and item.get_stream() != buildrequired_modules[item.get_name()]["stream"]
|
||||
)
|
||||
]
|
||||
|
||||
for item in collision_modules:
|
||||
name, stream, _ = item.split(':', 2)
|
||||
log.info('Buildrequired module %s exists in ursine content with '
|
||||
'different stream %s, whose RPMs will be excluded.',
|
||||
name, stream)
|
||||
name, stream, _ = item.split(":", 2)
|
||||
log.info(
|
||||
"Buildrequired module %s exists in ursine content with "
|
||||
"different stream %s, whose RPMs will be excluded.",
|
||||
name, stream,
|
||||
)
|
||||
|
||||
return collision_modules
|
||||
|
||||
@@ -206,17 +215,18 @@ def handle_stream_collision_modules(mmd):
|
||||
:param mmd: a module's metadata which will be built.
|
||||
:type mmd: Modulemd.Module
|
||||
"""
|
||||
log.info('Start to find out stream collision modules.')
|
||||
log.info("Start to find out stream collision modules.")
|
||||
unpacked_xmd = glib.from_variant_dict(mmd.get_xmd())
|
||||
buildrequires = unpacked_xmd['mbs']['buildrequires']
|
||||
buildrequires = unpacked_xmd["mbs"]["buildrequires"]
|
||||
|
||||
for module_name in conf.base_module_names:
|
||||
base_module_info = buildrequires.get(module_name)
|
||||
if base_module_info is None:
|
||||
log.info(
|
||||
'Base module %s is not a buildrequire of module %s. '
|
||||
'Skip handling module stream collision for this base module.',
|
||||
module_name, mmd.get_name())
|
||||
"Base module %s is not a buildrequire of module %s. "
|
||||
"Skip handling module stream collision for this base module.",
|
||||
module_name, mmd.get_name(),
|
||||
)
|
||||
continue
|
||||
|
||||
# Module stream collision is handled only for newly created module
|
||||
@@ -225,27 +235,26 @@ def handle_stream_collision_modules(mmd):
|
||||
# base module.
|
||||
# Just check the existence, and following code ensures this key exists
|
||||
# even if no stream collision module is found.
|
||||
if ('stream_collision_modules' in base_module_info and
|
||||
'ursine_rpms' in base_module_info):
|
||||
log.debug('Base module %s has stream collision modules and ursine '
|
||||
'rpms. Skip to handle stream collision again for it.',
|
||||
module_name)
|
||||
if "stream_collision_modules" in base_module_info and "ursine_rpms" in base_module_info:
|
||||
log.debug(
|
||||
"Base module %s has stream collision modules and ursine "
|
||||
"rpms. Skip to handle stream collision again for it.",
|
||||
module_name,
|
||||
)
|
||||
continue
|
||||
|
||||
modules_nsvc = find_stream_collision_modules(
|
||||
buildrequires, base_module_info['koji_tag'])
|
||||
modules_nsvc = find_stream_collision_modules(buildrequires, base_module_info["koji_tag"])
|
||||
|
||||
if modules_nsvc:
|
||||
# Save modules NSVC for later use in subsequent event handlers to
|
||||
# log readable messages.
|
||||
base_module_info['stream_collision_modules'] = modules_nsvc
|
||||
base_module_info['ursine_rpms'] = find_module_built_rpms(modules_nsvc)
|
||||
base_module_info["stream_collision_modules"] = modules_nsvc
|
||||
base_module_info["ursine_rpms"] = find_module_built_rpms(modules_nsvc)
|
||||
else:
|
||||
log.info('No stream collision module is found against base module %s.',
|
||||
module_name)
|
||||
log.info("No stream collision module is found against base module %s.", module_name)
|
||||
# Always set in order to mark it as handled already.
|
||||
base_module_info['stream_collision_modules'] = None
|
||||
base_module_info['ursine_rpms'] = None
|
||||
base_module_info["stream_collision_modules"] = None
|
||||
base_module_info["ursine_rpms"] = None
|
||||
|
||||
mmd.set_xmd(glib.dict_values(unpacked_xmd))
|
||||
|
||||
@@ -262,18 +271,17 @@ def find_module_built_rpms(modules_nsvc):
|
||||
import kobo.rpmlib
|
||||
from module_build_service.resolver import GenericResolver
|
||||
from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder
|
||||
|
||||
resolver = GenericResolver.create(conf)
|
||||
|
||||
built_rpms = []
|
||||
koji_session = KojiModuleBuilder.get_session(conf, login=False)
|
||||
|
||||
for nsvc in modules_nsvc:
|
||||
name, stream, version, context = nsvc.split(':')
|
||||
name, stream, version, context = nsvc.split(":")
|
||||
module = resolver._get_module(name, stream, version, context, strict=True)
|
||||
rpms = koji_session.listTaggedRPMS(module['koji_tag'], latest=True)[0]
|
||||
built_rpms.extend(
|
||||
kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms
|
||||
)
|
||||
rpms = koji_session.listTaggedRPMS(module["koji_tag"], latest=True)[0]
|
||||
built_rpms.extend(kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms)
|
||||
|
||||
# In case there is duplicate NEVRs, ensure every NEVR is unique in the final list.
|
||||
# And, sometimes, sorted list of RPMs would be easier to read.
|
||||
|
||||
@@ -41,10 +41,11 @@ def get_scm_url_re():
|
||||
"""
|
||||
Returns a regular expression for SCM URL extraction and validation.
|
||||
"""
|
||||
schemes_re = '|'.join(map(re.escape, scm_url_schemes(terse=True)))
|
||||
schemes_re = "|".join(map(re.escape, scm_url_schemes(terse=True)))
|
||||
regex = (
|
||||
r"(?P<giturl>(?P<scheme>(?:" + schemes_re + r"))://(?P<host>[^/]+)?"
|
||||
r"(?P<repopath>/[^\?]+))(?:\?(?P<modpath>[^#]+)?)?#(?P<revision>.+)")
|
||||
r"(?P<repopath>/[^\?]+))(?:\?(?P<modpath>[^#]+)?)?#(?P<revision>.+)"
|
||||
)
|
||||
return re.compile(regex)
|
||||
|
||||
|
||||
@@ -62,35 +63,56 @@ def pagination_metadata(p_query, api_version, request_args):
|
||||
# Remove pagination related args because those are handled elsewhere
|
||||
# Also, remove any args that url_for accepts in case the user entered
|
||||
# those in
|
||||
for key in ['page', 'per_page', 'endpoint']:
|
||||
for key in ["page", "per_page", "endpoint"]:
|
||||
if key in request_args_wo_page:
|
||||
request_args_wo_page.pop(key)
|
||||
for key in request_args:
|
||||
if key.startswith('_'):
|
||||
if key.startswith("_"):
|
||||
request_args_wo_page.pop(key)
|
||||
|
||||
pagination_data = {
|
||||
'page': p_query.page,
|
||||
'pages': p_query.pages,
|
||||
'per_page': p_query.per_page,
|
||||
'prev': None,
|
||||
'next': None,
|
||||
'total': p_query.total,
|
||||
'first': url_for(request.endpoint, api_version=api_version, page=1,
|
||||
per_page=p_query.per_page, _external=True, **request_args_wo_page),
|
||||
'last': url_for(request.endpoint, api_version=api_version, page=p_query.pages,
|
||||
per_page=p_query.per_page, _external=True,
|
||||
**request_args_wo_page)
|
||||
"page": p_query.page,
|
||||
"pages": p_query.pages,
|
||||
"per_page": p_query.per_page,
|
||||
"prev": None,
|
||||
"next": None,
|
||||
"total": p_query.total,
|
||||
"first": url_for(
|
||||
request.endpoint,
|
||||
api_version=api_version,
|
||||
page=1,
|
||||
per_page=p_query.per_page,
|
||||
_external=True,
|
||||
**request_args_wo_page
|
||||
),
|
||||
"last": url_for(
|
||||
request.endpoint,
|
||||
api_version=api_version,
|
||||
page=p_query.pages,
|
||||
per_page=p_query.per_page,
|
||||
_external=True,
|
||||
**request_args_wo_page
|
||||
),
|
||||
}
|
||||
|
||||
if p_query.has_prev:
|
||||
pagination_data['prev'] = url_for(request.endpoint, api_version=api_version,
|
||||
page=p_query.prev_num, per_page=p_query.per_page,
|
||||
_external=True, **request_args_wo_page)
|
||||
pagination_data["prev"] = url_for(
|
||||
request.endpoint,
|
||||
api_version=api_version,
|
||||
page=p_query.prev_num,
|
||||
per_page=p_query.per_page,
|
||||
_external=True,
|
||||
**request_args_wo_page
|
||||
)
|
||||
if p_query.has_next:
|
||||
pagination_data['next'] = url_for(request.endpoint, api_version=api_version,
|
||||
page=p_query.next_num, per_page=p_query.per_page,
|
||||
_external=True, **request_args_wo_page)
|
||||
pagination_data["next"] = url_for(
|
||||
request.endpoint,
|
||||
api_version=api_version,
|
||||
page=p_query.next_num,
|
||||
per_page=p_query.per_page,
|
||||
_external=True,
|
||||
**request_args_wo_page
|
||||
)
|
||||
|
||||
return pagination_data
|
||||
|
||||
@@ -104,14 +126,14 @@ def _add_order_by_clause(flask_request, query, column_source):
|
||||
:param column_source: a SQLAlchemy database model
|
||||
:return: a SQLAlchemy query object
|
||||
"""
|
||||
order_by = flask_request.args.getlist('order_by')
|
||||
order_desc_by = flask_request.args.getlist('order_desc_by')
|
||||
order_by = flask_request.args.getlist("order_by")
|
||||
order_desc_by = flask_request.args.getlist("order_desc_by")
|
||||
# Default to ordering by ID in descending order
|
||||
descending = True
|
||||
requested_order = ['id']
|
||||
requested_order = ["id"]
|
||||
|
||||
if order_by and order_desc_by:
|
||||
raise ValidationError('You may not specify both order_by and order_desc_by')
|
||||
raise ValidationError("You may not specify both order_by and order_desc_by")
|
||||
elif order_by:
|
||||
descending = False
|
||||
requested_order = order_by
|
||||
@@ -127,7 +149,7 @@ def _add_order_by_clause(flask_request, query, column_source):
|
||||
'An invalid ordering key of "{}" was supplied'.format(column_name))
|
||||
column = column_dict[column_name]
|
||||
# If the version column is provided, cast it as an integer so the sorting is correct
|
||||
if column_name == 'version':
|
||||
if column_name == "version":
|
||||
column = sqlalchemy.cast(column, sqlalchemy.BigInteger)
|
||||
if descending:
|
||||
column = column.desc()
|
||||
@@ -155,7 +177,7 @@ def filter_component_builds(flask_request):
|
||||
search_query = dict()
|
||||
for key in request.args.keys():
|
||||
# Search by state will be handled separately
|
||||
if key == 'state':
|
||||
if key == "state":
|
||||
continue
|
||||
# Only filter on valid database columns
|
||||
if key in models.ComponentBuild.__table__.columns.keys():
|
||||
@@ -165,7 +187,7 @@ def filter_component_builds(flask_request):
|
||||
search_query[key] = flask_request.args[key]
|
||||
|
||||
# Multiple states can be supplied => or-ing will take place
|
||||
states = flask_request.args.getlist('state')
|
||||
states = flask_request.args.getlist("state")
|
||||
search_states = []
|
||||
for state in states:
|
||||
if state.isdigit():
|
||||
@@ -174,16 +196,16 @@ def filter_component_builds(flask_request):
|
||||
try:
|
||||
import koji
|
||||
except ImportError:
|
||||
raise ValidationError('Cannot filter by state names because koji isn\'t installed')
|
||||
raise ValidationError("Cannot filter by state names because koji isn't installed")
|
||||
|
||||
if state.upper() in koji.BUILD_STATES:
|
||||
search_states.append(koji.BUILD_STATES[state.upper()])
|
||||
else:
|
||||
raise ValidationError('Invalid state was supplied: %s' % state)
|
||||
raise ValidationError("Invalid state was supplied: %s" % state)
|
||||
|
||||
# Allow the user to specify the module build ID with a more intuitive key name
|
||||
if 'module_build' in flask_request.args:
|
||||
search_query['module_id'] = flask_request.args['module_build']
|
||||
if "module_build" in flask_request.args:
|
||||
search_query["module_id"] = flask_request.args["module_build"]
|
||||
|
||||
query = models.ComponentBuild.query
|
||||
|
||||
@@ -194,8 +216,8 @@ def filter_component_builds(flask_request):
|
||||
|
||||
query = _add_order_by_clause(flask_request, query, models.ComponentBuild)
|
||||
|
||||
page = flask_request.args.get('page', 1, type=int)
|
||||
per_page = flask_request.args.get('per_page', 10, type=int)
|
||||
page = flask_request.args.get("page", 1, type=int)
|
||||
per_page = flask_request.args.get("per_page", 10, type=int)
|
||||
return query.paginate(page, per_page, False)
|
||||
|
||||
|
||||
@@ -207,8 +229,13 @@ def filter_module_builds(flask_request):
|
||||
"""
|
||||
search_query = dict()
|
||||
special_columns = set((
|
||||
'time_submitted', 'time_modified', 'time_completed', 'state', 'stream_version_lte',
|
||||
'virtual_stream',))
|
||||
"time_submitted",
|
||||
"time_modified",
|
||||
"time_completed",
|
||||
"state",
|
||||
"stream_version_lte",
|
||||
"virtual_stream",
|
||||
))
|
||||
columns = models.ModuleBuild.__table__.columns.keys()
|
||||
for key in set(request.args.keys()) - special_columns:
|
||||
# Only filter on valid database columns but skip columns that are treated specially or
|
||||
@@ -217,7 +244,7 @@ def filter_module_builds(flask_request):
|
||||
search_query[key] = flask_request.args[key]
|
||||
|
||||
# Multiple states can be supplied => or-ing will take place
|
||||
states = flask_request.args.getlist('state')
|
||||
states = flask_request.args.getlist("state")
|
||||
search_states = []
|
||||
for state in states:
|
||||
if state.isdigit():
|
||||
@@ -226,22 +253,23 @@ def filter_module_builds(flask_request):
|
||||
if state in models.BUILD_STATES:
|
||||
search_states.append(models.BUILD_STATES[state])
|
||||
else:
|
||||
raise ValidationError('Invalid state was supplied: %s' % state)
|
||||
raise ValidationError("Invalid state was supplied: %s" % state)
|
||||
|
||||
nsvc = flask_request.args.get('nsvc', None)
|
||||
nsvc = flask_request.args.get("nsvc", None)
|
||||
if nsvc:
|
||||
nsvc_parts = nsvc.split(":")
|
||||
query_keys = ["name", "stream", "version", "context"]
|
||||
for key, part in zip(query_keys, nsvc_parts):
|
||||
search_query[key] = part
|
||||
|
||||
rpm = flask_request.args.get('rpm', None)
|
||||
rpm = flask_request.args.get("rpm", None)
|
||||
koji_tags = []
|
||||
if rpm:
|
||||
if conf.system == "koji":
|
||||
# we are importing the koji builder here so we can search for the rpm metadata
|
||||
# from koji. If we imported this regulary we would have gotten a circular import error.
|
||||
from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder # noqa
|
||||
from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder # noqa
|
||||
|
||||
koji_tags = KojiModuleBuilder.get_rpm_module_tag(rpm)
|
||||
else:
|
||||
raise ValidationError("Configured builder does not allow to search by rpm binary name!")
|
||||
@@ -257,41 +285,43 @@ def filter_module_builds(flask_request):
|
||||
|
||||
# This is used when filtering the date request parameters, but it is here to avoid recompiling
|
||||
utc_iso_datetime_regex = re.compile(
|
||||
r'^(?P<datetime>\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})(?:\.\d+)?'
|
||||
r'(?:Z|[-+]00(?::00)?)?$')
|
||||
r"^(?P<datetime>\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})(?:\.\d+)?(?:Z|[-+]00(?::00)?)?$")
|
||||
|
||||
# Filter the query based on date request parameters
|
||||
for item in ('submitted', 'modified', 'completed'):
|
||||
for context in ('before', 'after'):
|
||||
request_arg = '%s_%s' % (item, context) # i.e. submitted_before
|
||||
for item in ("submitted", "modified", "completed"):
|
||||
for context in ("before", "after"):
|
||||
request_arg = "%s_%s" % (item, context) # i.e. submitted_before
|
||||
iso_datetime_arg = request.args.get(request_arg, None)
|
||||
|
||||
if iso_datetime_arg:
|
||||
iso_datetime_matches = re.match(utc_iso_datetime_regex, iso_datetime_arg)
|
||||
|
||||
if not iso_datetime_matches or not iso_datetime_matches.group('datetime'):
|
||||
raise ValidationError(('An invalid Zulu ISO 8601 timestamp was provided'
|
||||
' for the "%s" parameter')
|
||||
% request_arg)
|
||||
if not iso_datetime_matches or not iso_datetime_matches.group("datetime"):
|
||||
raise ValidationError(
|
||||
'An invalid Zulu ISO 8601 timestamp was provided for the "%s" parameter'
|
||||
% request_arg
|
||||
)
|
||||
# Converts the ISO 8601 string to a datetime object for SQLAlchemy to use to filter
|
||||
item_datetime = datetime.strptime(iso_datetime_matches.group('datetime'),
|
||||
'%Y-%m-%dT%H:%M:%S')
|
||||
item_datetime = datetime.strptime(
|
||||
iso_datetime_matches.group("datetime"), "%Y-%m-%dT%H:%M:%S")
|
||||
# Get the database column to filter against
|
||||
column = getattr(models.ModuleBuild, 'time_' + item)
|
||||
column = getattr(models.ModuleBuild, "time_" + item)
|
||||
|
||||
if context == 'after':
|
||||
if context == "after":
|
||||
query = query.filter(column >= item_datetime)
|
||||
elif context == 'before':
|
||||
elif context == "before":
|
||||
query = query.filter(column <= item_datetime)
|
||||
|
||||
# Multiple virtual_streams can be supplied for "or" logic filtering
|
||||
virtual_streams = flask_request.args.getlist('virtual_stream')
|
||||
virtual_streams = flask_request.args.getlist("virtual_stream")
|
||||
query = models.ModuleBuild._add_virtual_streams_filter(db.session, query, virtual_streams)
|
||||
|
||||
stream_version_lte = flask_request.args.get('stream_version_lte')
|
||||
stream_version_lte = flask_request.args.get("stream_version_lte")
|
||||
if stream_version_lte is not None:
|
||||
invalid_error = ('An invalid value of stream_version_lte was provided. It must be an '
|
||||
'integer greater than or equal to 10000.')
|
||||
invalid_error = (
|
||||
"An invalid value of stream_version_lte was provided. It must be an "
|
||||
"integer greater than or equal to 10000."
|
||||
)
|
||||
try:
|
||||
stream_version_lte = int(stream_version_lte)
|
||||
except (TypeError, ValueError):
|
||||
@@ -305,19 +335,27 @@ def filter_module_builds(flask_request):
|
||||
|
||||
br_joined = False
|
||||
module_br_alias = None
|
||||
for item in ('base_module_br', 'name', 'stream', 'version', 'context', 'stream_version',
|
||||
'stream_version_lte', 'stream_version_gte'):
|
||||
if item == 'base_module_br':
|
||||
for item in (
|
||||
"base_module_br",
|
||||
"name",
|
||||
"stream",
|
||||
"version",
|
||||
"context",
|
||||
"stream_version",
|
||||
"stream_version_lte",
|
||||
"stream_version_gte",
|
||||
):
|
||||
if item == "base_module_br":
|
||||
request_arg_name = item
|
||||
else:
|
||||
request_arg_name = 'base_module_br_{}'.format(item)
|
||||
request_arg_name = "base_module_br_{}".format(item)
|
||||
request_arg = flask_request.args.get(request_arg_name)
|
||||
|
||||
if not request_arg:
|
||||
continue
|
||||
|
||||
if not br_joined:
|
||||
module_br_alias = aliased(models.ModuleBuild, name='module_br')
|
||||
module_br_alias = aliased(models.ModuleBuild, name="module_br")
|
||||
# Shorten this table name for clarity in the query below
|
||||
mb_to_br = models.module_builds_to_module_buildrequires
|
||||
# The following joins get added:
|
||||
@@ -325,14 +363,13 @@ def filter_module_builds(flask_request):
|
||||
# ON module_builds_to_module_buildrequires.module_id = module_builds.id
|
||||
# JOIN module_builds AS module_br
|
||||
# ON module_builds_to_module_buildrequires.module_buildrequire_id = module_br.id
|
||||
query = query.join(mb_to_br, mb_to_br.c.module_id == models.ModuleBuild.id)\
|
||||
.join(module_br_alias,
|
||||
mb_to_br.c.module_buildrequire_id == module_br_alias.id)
|
||||
query = query.join(mb_to_br, mb_to_br.c.module_id == models.ModuleBuild.id).join(
|
||||
module_br_alias, mb_to_br.c.module_buildrequire_id == module_br_alias.id)
|
||||
br_joined = True
|
||||
|
||||
if item == 'base_module_br':
|
||||
if item == "base_module_br":
|
||||
try:
|
||||
name, stream, version, context = flask_request.args['base_module_br'].split(':')
|
||||
name, stream, version, context = flask_request.args["base_module_br"].split(":")
|
||||
except ValueError:
|
||||
raise ValidationError(
|
||||
'The filter argument for "base_module_br" must be in the format of N:S:V:C')
|
||||
@@ -340,12 +377,12 @@ def filter_module_builds(flask_request):
|
||||
module_br_alias.name == name,
|
||||
module_br_alias.stream == stream,
|
||||
module_br_alias.version == version,
|
||||
module_br_alias.context == context
|
||||
module_br_alias.context == context,
|
||||
)
|
||||
elif item.endswith('_lte'):
|
||||
elif item.endswith("_lte"):
|
||||
column = getattr(module_br_alias, item[:-4])
|
||||
query = query.filter(column <= request_arg)
|
||||
elif item.endswith('_gte'):
|
||||
elif item.endswith("_gte"):
|
||||
column = getattr(module_br_alias, item[:-4])
|
||||
query = query.filter(column >= request_arg)
|
||||
else:
|
||||
@@ -354,17 +391,18 @@ def filter_module_builds(flask_request):
|
||||
|
||||
query = _add_order_by_clause(flask_request, query, models.ModuleBuild)
|
||||
|
||||
page = flask_request.args.get('page', 1, type=int)
|
||||
per_page = flask_request.args.get('per_page', 10, type=int)
|
||||
page = flask_request.args.get("page", 1, type=int)
|
||||
per_page = flask_request.args.get("per_page", 10, type=int)
|
||||
return query.paginate(page, per_page, False)
|
||||
|
||||
|
||||
def cors_header(allow='*'):
|
||||
def cors_header(allow="*"):
|
||||
"""
|
||||
A decorator that sets the Access-Control-Allow-Origin header to the desired value on a Flask
|
||||
route
|
||||
:param allow: a string of the domain to allow. This defaults to '*'.
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
@@ -377,9 +415,11 @@ def cors_header(allow='*'):
|
||||
response = rv
|
||||
# Make sure we are dealing with a Flask Response object
|
||||
if isinstance(response, Response):
|
||||
response.headers.add('Access-Control-Allow-Origin', allow)
|
||||
response.headers.add("Access-Control-Allow-Origin", allow)
|
||||
return rv
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
@@ -387,12 +427,15 @@ def validate_api_version():
|
||||
"""
|
||||
A decorator that validates the requested API version on a route
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
req_api_version = kwargs.get('api_version', 1)
|
||||
req_api_version = kwargs.get("api_version", 1)
|
||||
if req_api_version > api_version or req_api_version < 1:
|
||||
raise NotFound('The requested API version is not available')
|
||||
raise NotFound("The requested API version is not available")
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
@@ -35,67 +35,56 @@ from io import BytesIO
|
||||
|
||||
from module_build_service import app, conf, log, models, db, version, api_version as max_api_version
|
||||
from module_build_service.utils import (
|
||||
pagination_metadata, filter_module_builds, filter_component_builds,
|
||||
submit_module_build_from_scm, submit_module_build_from_yaml,
|
||||
get_scm_url_re, cors_header, validate_api_version, import_mmd,
|
||||
get_mmd_from_scm, str_to_bool)
|
||||
from module_build_service.errors import (
|
||||
ValidationError, Forbidden, NotFound, ProgrammingError)
|
||||
pagination_metadata,
|
||||
filter_module_builds,
|
||||
filter_component_builds,
|
||||
submit_module_build_from_scm,
|
||||
submit_module_build_from_yaml,
|
||||
get_scm_url_re,
|
||||
cors_header,
|
||||
validate_api_version,
|
||||
import_mmd,
|
||||
get_mmd_from_scm,
|
||||
str_to_bool,
|
||||
)
|
||||
from module_build_service.errors import ValidationError, Forbidden, NotFound, ProgrammingError
|
||||
from module_build_service.backports import jsonify
|
||||
from module_build_service.monitor import monitor_api
|
||||
|
||||
|
||||
api_routes = {
|
||||
'module_builds': {
|
||||
'url': '/module-build-service/<int:api_version>/module-builds/',
|
||||
'options': {
|
||||
'methods': ['POST'],
|
||||
}
|
||||
"module_builds": {
|
||||
"url": "/module-build-service/<int:api_version>/module-builds/",
|
||||
"options": {"methods": ["POST"]},
|
||||
},
|
||||
'module_builds_list': {
|
||||
'url': '/module-build-service/<int:api_version>/module-builds/',
|
||||
'options': {
|
||||
'defaults': {'id': None},
|
||||
'methods': ['GET'],
|
||||
}
|
||||
"module_builds_list": {
|
||||
"url": "/module-build-service/<int:api_version>/module-builds/",
|
||||
"options": {"defaults": {"id": None}, "methods": ["GET"]},
|
||||
},
|
||||
'module_build': {
|
||||
'url': '/module-build-service/<int:api_version>/module-builds/<int:id>',
|
||||
'options': {
|
||||
'methods': ['GET', 'PATCH'],
|
||||
}
|
||||
"module_build": {
|
||||
"url": "/module-build-service/<int:api_version>/module-builds/<int:id>",
|
||||
"options": {"methods": ["GET", "PATCH"]},
|
||||
},
|
||||
'component_builds_list': {
|
||||
'url': '/module-build-service/<int:api_version>/component-builds/',
|
||||
'options': {
|
||||
'defaults': {'id': None},
|
||||
'methods': ['GET'],
|
||||
}
|
||||
"component_builds_list": {
|
||||
"url": "/module-build-service/<int:api_version>/component-builds/",
|
||||
"options": {"defaults": {"id": None}, "methods": ["GET"]},
|
||||
},
|
||||
'component_build': {
|
||||
'url': '/module-build-service/<int:api_version>/component-builds/<int:id>',
|
||||
'options': {
|
||||
'methods': ['GET'],
|
||||
}
|
||||
"component_build": {
|
||||
"url": "/module-build-service/<int:api_version>/component-builds/<int:id>",
|
||||
"options": {"methods": ["GET"]},
|
||||
},
|
||||
'about': {
|
||||
'url': '/module-build-service/<int:api_version>/about/',
|
||||
'options': {
|
||||
'methods': ['GET']
|
||||
}
|
||||
"about": {
|
||||
"url": "/module-build-service/<int:api_version>/about/",
|
||||
"options": {"methods": ["GET"]},
|
||||
},
|
||||
'rebuild_strategies_list': {
|
||||
'url': '/module-build-service/<int:api_version>/rebuild-strategies/',
|
||||
'options': {
|
||||
'methods': ['GET']
|
||||
}
|
||||
"rebuild_strategies_list": {
|
||||
"url": "/module-build-service/<int:api_version>/rebuild-strategies/",
|
||||
"options": {"methods": ["GET"]},
|
||||
},
|
||||
"import_module": {
|
||||
"url": "/module-build-service/<int:api_version>/import-module/",
|
||||
"options": {"methods": ["POST"]},
|
||||
},
|
||||
'import_module': {
|
||||
'url': '/module-build-service/<int:api_version>/import-module/',
|
||||
'options': {
|
||||
'methods': ['POST'],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -105,59 +94,60 @@ class AbstractQueryableBuildAPI(MethodView):
|
||||
@cors_header()
|
||||
@validate_api_version()
|
||||
def get(self, api_version, id):
|
||||
id_flag = request.args.get('id')
|
||||
id_flag = request.args.get("id")
|
||||
if id_flag:
|
||||
endpoint = request.endpoint.split('s_list')[0]
|
||||
endpoint = request.endpoint.split("s_list")[0]
|
||||
raise ValidationError(
|
||||
'The "id" query option is invalid. Did you mean to go to "{0}"?'.format(
|
||||
url_for(endpoint, api_version=api_version, id=id_flag)))
|
||||
verbose_flag = request.args.get('verbose', 'false').lower()
|
||||
short_flag = request.args.get('short', 'false').lower()
|
||||
url_for(endpoint, api_version=api_version, id=id_flag)
|
||||
)
|
||||
)
|
||||
verbose_flag = request.args.get("verbose", "false").lower()
|
||||
short_flag = request.args.get("short", "false").lower()
|
||||
json_func_kwargs = {}
|
||||
json_func_name = 'json'
|
||||
json_func_name = "json"
|
||||
|
||||
if id is None:
|
||||
# Lists all tracked builds
|
||||
p_query = self.query_filter(request)
|
||||
json_data = {
|
||||
'meta': pagination_metadata(p_query, api_version, request.args)
|
||||
}
|
||||
json_data = {"meta": pagination_metadata(p_query, api_version, request.args)}
|
||||
|
||||
if verbose_flag == 'true' or verbose_flag == '1':
|
||||
json_func_name = 'extended_json'
|
||||
json_func_kwargs['show_state_url'] = True
|
||||
json_func_kwargs['api_version'] = api_version
|
||||
elif short_flag == 'true' or short_flag == '1':
|
||||
if hasattr(p_query.items[0], 'short_json'):
|
||||
json_func_name = 'short_json'
|
||||
json_data['items'] = [getattr(item, json_func_name)(**json_func_kwargs)
|
||||
for item in p_query.items]
|
||||
if verbose_flag == "true" or verbose_flag == "1":
|
||||
json_func_name = "extended_json"
|
||||
json_func_kwargs["show_state_url"] = True
|
||||
json_func_kwargs["api_version"] = api_version
|
||||
elif short_flag == "true" or short_flag == "1":
|
||||
if hasattr(p_query.items[0], "short_json"):
|
||||
json_func_name = "short_json"
|
||||
json_data["items"] = [
|
||||
getattr(item, json_func_name)(**json_func_kwargs) for item in p_query.items
|
||||
]
|
||||
|
||||
return jsonify(json_data), 200
|
||||
else:
|
||||
# Lists details for the specified build
|
||||
instance = self.model.query.filter_by(id=id).first()
|
||||
if instance:
|
||||
if verbose_flag == 'true' or verbose_flag == '1':
|
||||
json_func_name = 'extended_json'
|
||||
json_func_kwargs['show_state_url'] = True
|
||||
json_func_kwargs['api_version'] = api_version
|
||||
elif short_flag == 'true' or short_flag == '1':
|
||||
if getattr(instance, 'short_json', None):
|
||||
json_func_name = 'short_json'
|
||||
if verbose_flag == "true" or verbose_flag == "1":
|
||||
json_func_name = "extended_json"
|
||||
json_func_kwargs["show_state_url"] = True
|
||||
json_func_kwargs["api_version"] = api_version
|
||||
elif short_flag == "true" or short_flag == "1":
|
||||
if getattr(instance, "short_json", None):
|
||||
json_func_name = "short_json"
|
||||
return jsonify(getattr(instance, json_func_name)(**json_func_kwargs)), 200
|
||||
else:
|
||||
raise NotFound('No such %s found.' % self.kind)
|
||||
raise NotFound("No such %s found." % self.kind)
|
||||
|
||||
|
||||
class ComponentBuildAPI(AbstractQueryableBuildAPI):
|
||||
kind = 'component'
|
||||
kind = "component"
|
||||
query_filter = staticmethod(filter_component_builds)
|
||||
model = models.ComponentBuild
|
||||
|
||||
|
||||
class ModuleBuildAPI(AbstractQueryableBuildAPI):
|
||||
kind = 'module'
|
||||
kind = "module"
|
||||
query_filter = staticmethod(filter_module_builds)
|
||||
model = models.ModuleBuild
|
||||
|
||||
@@ -167,8 +157,7 @@ class ModuleBuildAPI(AbstractQueryableBuildAPI):
|
||||
if username in conf.allowed_users:
|
||||
return
|
||||
if allowed_groups and not (allowed_groups & groups):
|
||||
raise Forbidden("%s is not in any of %r, only %r" % (
|
||||
username, allowed_groups, groups))
|
||||
raise Forbidden("%s is not in any of %r, only %r" % (username, allowed_groups, groups))
|
||||
|
||||
# Additional POST and DELETE handlers for modules follow.
|
||||
@validate_api_version()
|
||||
@@ -200,13 +189,14 @@ class ModuleBuildAPI(AbstractQueryableBuildAPI):
|
||||
try:
|
||||
r = json.loads(request.get_data().decode("utf-8"))
|
||||
except Exception:
|
||||
log.exception('Invalid JSON submitted')
|
||||
raise ValidationError('Invalid JSON submitted')
|
||||
log.exception("Invalid JSON submitted")
|
||||
raise ValidationError("Invalid JSON submitted")
|
||||
|
||||
if "owner" in r:
|
||||
if conf.no_auth is not True:
|
||||
raise ValidationError(("The request contains 'owner' parameter,"
|
||||
" however NO_AUTH is not allowed"))
|
||||
raise ValidationError(
|
||||
"The request contains 'owner' parameter, however NO_AUTH is not allowed"
|
||||
)
|
||||
elif username == "anonymous":
|
||||
username = r["owner"]
|
||||
|
||||
@@ -214,27 +204,23 @@ class ModuleBuildAPI(AbstractQueryableBuildAPI):
|
||||
|
||||
module = models.ModuleBuild.query.filter_by(id=id).first()
|
||||
if not module:
|
||||
raise NotFound('No such module found.')
|
||||
raise NotFound("No such module found.")
|
||||
|
||||
if module.owner != username and not (conf.admin_groups & groups):
|
||||
raise Forbidden('You are not owner of this build and '
|
||||
'therefore cannot modify it.')
|
||||
raise Forbidden("You are not owner of this build and therefore cannot modify it.")
|
||||
|
||||
if not r.get('state'):
|
||||
log.error('Invalid JSON submitted')
|
||||
raise ValidationError('Invalid JSON submitted')
|
||||
if not r.get("state"):
|
||||
log.error("Invalid JSON submitted")
|
||||
raise ValidationError("Invalid JSON submitted")
|
||||
|
||||
if module.state == models.BUILD_STATES['failed']:
|
||||
raise Forbidden('You can\'t cancel a failed module')
|
||||
if module.state == models.BUILD_STATES["failed"]:
|
||||
raise Forbidden("You can't cancel a failed module")
|
||||
|
||||
if r['state'] == 'failed' \
|
||||
or r['state'] == str(models.BUILD_STATES['failed']):
|
||||
module.transition(conf, models.BUILD_STATES["failed"],
|
||||
"Canceled by %s." % username)
|
||||
if r["state"] == "failed" or r["state"] == str(models.BUILD_STATES["failed"]):
|
||||
module.transition(conf, models.BUILD_STATES["failed"], "Canceled by %s." % username)
|
||||
else:
|
||||
log.error('The provided state change of "{}" is not supported'
|
||||
.format(r['state']))
|
||||
raise ValidationError('The provided state change is not supported')
|
||||
log.error('The provided state change of "{}" is not supported'.format(r["state"]))
|
||||
raise ValidationError("The provided state change is not supported")
|
||||
db.session.add(module)
|
||||
db.session.commit()
|
||||
|
||||
@@ -245,14 +231,13 @@ class AboutAPI(MethodView):
|
||||
@cors_header()
|
||||
@validate_api_version()
|
||||
def get(self, api_version):
|
||||
json = {'version': version, 'api_version': max_api_version}
|
||||
config_items = ['auth_method']
|
||||
json = {"version": version, "api_version": max_api_version}
|
||||
config_items = ["auth_method"]
|
||||
for item in config_items:
|
||||
config_item = getattr(conf, item)
|
||||
# All config items have a default, so if doesn't exist it is a programming error
|
||||
if not config_item:
|
||||
raise ProgrammingError(
|
||||
'An invalid config item of "{0}" was specified'.format(item))
|
||||
raise ProgrammingError('An invalid config item of "{0}" was specified'.format(item))
|
||||
json[item] = config_item
|
||||
return jsonify(json), 200
|
||||
|
||||
@@ -268,37 +253,37 @@ class RebuildStrategies(MethodView):
|
||||
if strategy == conf.rebuild_strategy:
|
||||
default = True
|
||||
allowed = True
|
||||
elif conf.rebuild_strategy_allow_override and \
|
||||
strategy in conf.rebuild_strategies_allowed:
|
||||
elif (
|
||||
conf.rebuild_strategy_allow_override and strategy in conf.rebuild_strategies_allowed
|
||||
):
|
||||
allowed = True
|
||||
else:
|
||||
allowed = False
|
||||
items.append({
|
||||
'name': strategy,
|
||||
'description': models.ModuleBuild.rebuild_strategies[strategy],
|
||||
'allowed': allowed,
|
||||
'default': default
|
||||
"name": strategy,
|
||||
"description": models.ModuleBuild.rebuild_strategies[strategy],
|
||||
"allowed": allowed,
|
||||
"default": default,
|
||||
})
|
||||
|
||||
return jsonify({'items': items}), 200
|
||||
return jsonify({"items": items}), 200
|
||||
|
||||
|
||||
class ImportModuleAPI(MethodView):
|
||||
|
||||
@validate_api_version()
|
||||
def post(self, api_version):
|
||||
# disable this API endpoint if no groups are defined
|
||||
if not conf.allowed_groups_to_import_module:
|
||||
log.error(
|
||||
"Import module API is disabled. Set 'ALLOWED_GROUPS_TO_IMPORT_MODULE'"
|
||||
" configuration value first.")
|
||||
raise Forbidden(
|
||||
"Import module API is disabled.")
|
||||
" configuration value first."
|
||||
)
|
||||
raise Forbidden("Import module API is disabled.")
|
||||
|
||||
# auth checks
|
||||
username, groups = module_build_service.auth.get_user(request)
|
||||
ModuleBuildAPI.check_groups(username, groups,
|
||||
allowed_groups=conf.allowed_groups_to_import_module)
|
||||
ModuleBuildAPI.check_groups(
|
||||
username, groups, allowed_groups=conf.allowed_groups_to_import_module)
|
||||
|
||||
# process request using SCM handler
|
||||
handler = SCMHandler(request)
|
||||
@@ -306,8 +291,7 @@ class ImportModuleAPI(MethodView):
|
||||
|
||||
mmd = get_mmd_from_scm(handler.data["scmurl"])
|
||||
build, messages = import_mmd(db.session, mmd)
|
||||
json_data = {"module": build.json(show_tasks=False),
|
||||
"messages": messages}
|
||||
json_data = {"module": build.json(show_tasks=False), "messages": messages}
|
||||
|
||||
# return 201 Created if we reach this point
|
||||
return jsonify(json_data), 201
|
||||
@@ -315,16 +299,16 @@ class ImportModuleAPI(MethodView):
|
||||
|
||||
class BaseHandler(object):
|
||||
valid_params = set([
|
||||
'branch',
|
||||
'buildrequire_overrides',
|
||||
'modulemd',
|
||||
'module_name',
|
||||
'owner',
|
||||
'rebuild_strategy',
|
||||
'require_overrides',
|
||||
'scmurl',
|
||||
'scratch',
|
||||
'srpms'
|
||||
"branch",
|
||||
"buildrequire_overrides",
|
||||
"modulemd",
|
||||
"module_name",
|
||||
"owner",
|
||||
"rebuild_strategy",
|
||||
"require_overrides",
|
||||
"scmurl",
|
||||
"scratch",
|
||||
"srpms",
|
||||
])
|
||||
|
||||
def __init__(self, request, data=None):
|
||||
@@ -332,21 +316,21 @@ class BaseHandler(object):
|
||||
self.data = data or _dict_from_request(request)
|
||||
|
||||
# canonicalize and validate scratch option
|
||||
if 'scratch' in self.data and str_to_bool(str(self.data['scratch'])):
|
||||
self.data['scratch'] = True
|
||||
if "scratch" in self.data and str_to_bool(str(self.data["scratch"])):
|
||||
self.data["scratch"] = True
|
||||
if conf.modules_allow_scratch is not True:
|
||||
raise Forbidden('Scratch builds are not enabled')
|
||||
raise Forbidden("Scratch builds are not enabled")
|
||||
else:
|
||||
self.data['scratch'] = False
|
||||
self.data["scratch"] = False
|
||||
|
||||
# canonicalize and validate srpms list
|
||||
if 'srpms' in self.data and self.data['srpms']:
|
||||
if not self.data['scratch']:
|
||||
raise Forbidden('srpms may only be specified for scratch builds')
|
||||
if not isinstance(self.data['srpms'], list):
|
||||
raise ValidationError('srpms must be specified as a list')
|
||||
if "srpms" in self.data and self.data["srpms"]:
|
||||
if not self.data["scratch"]:
|
||||
raise Forbidden("srpms may only be specified for scratch builds")
|
||||
if not isinstance(self.data["srpms"], list):
|
||||
raise ValidationError("srpms must be specified as a list")
|
||||
else:
|
||||
self.data['srpms'] = []
|
||||
self.data["srpms"] = []
|
||||
|
||||
def _validate_dep_overrides_format(self, key):
|
||||
"""
|
||||
@@ -357,8 +341,10 @@ class BaseHandler(object):
|
||||
"""
|
||||
if not self.data.get(key):
|
||||
return
|
||||
invalid_override_msg = ('The "{}" parameter must be an object with the keys as module '
|
||||
'names and the values as arrays of streams'.format(key))
|
||||
invalid_override_msg = (
|
||||
'The "{}" parameter must be an object with the keys as module '
|
||||
"names and the values as arrays of streams".format(key)
|
||||
)
|
||||
if not isinstance(self.data[key], dict):
|
||||
raise ValidationError(invalid_override_msg)
|
||||
for streams in self.data[key].values():
|
||||
@@ -371,33 +357,37 @@ class BaseHandler(object):
|
||||
def validate_optional_params(self):
|
||||
forbidden_params = [k for k in self.data if k not in self.valid_params]
|
||||
if forbidden_params:
|
||||
raise ValidationError('The request contains unspecified parameters: {}'
|
||||
.format(", ".join(forbidden_params)))
|
||||
raise ValidationError(
|
||||
"The request contains unspecified parameters: {}".format(
|
||||
", ".join(forbidden_params))
|
||||
)
|
||||
|
||||
if not conf.no_auth and "owner" in self.data:
|
||||
raise ValidationError(("The request contains 'owner' parameter,"
|
||||
" however NO_AUTH is not allowed"))
|
||||
raise ValidationError(
|
||||
"The request contains 'owner' parameter, however NO_AUTH is not allowed")
|
||||
|
||||
if not conf.rebuild_strategy_allow_override and 'rebuild_strategy' in self.data:
|
||||
raise ValidationError('The request contains the "rebuild_strategy" parameter but '
|
||||
'overriding the default isn\'t allowed')
|
||||
if not conf.rebuild_strategy_allow_override and "rebuild_strategy" in self.data:
|
||||
raise ValidationError(
|
||||
'The request contains the "rebuild_strategy" parameter but '
|
||||
"overriding the default isn't allowed"
|
||||
)
|
||||
|
||||
if 'rebuild_strategy' in self.data:
|
||||
if self.data['rebuild_strategy'] not in conf.rebuild_strategies_allowed:
|
||||
if "rebuild_strategy" in self.data:
|
||||
if self.data["rebuild_strategy"] not in conf.rebuild_strategies_allowed:
|
||||
raise ValidationError(
|
||||
'The rebuild method of "{0}" is not allowed. Choose from: {1}.'
|
||||
.format(self.data['rebuild_strategy'],
|
||||
', '.join(conf.rebuild_strategies_allowed)))
|
||||
'The rebuild method of "{0}" is not allowed. Choose from: {1}.'.format(
|
||||
self.data["rebuild_strategy"], ", ".join(conf.rebuild_strategies_allowed))
|
||||
)
|
||||
|
||||
self._validate_dep_overrides_format('buildrequire_overrides')
|
||||
self._validate_dep_overrides_format('require_overrides')
|
||||
self._validate_dep_overrides_format("buildrequire_overrides")
|
||||
self._validate_dep_overrides_format("require_overrides")
|
||||
|
||||
|
||||
class SCMHandler(BaseHandler):
|
||||
def validate(self, skip_branch=False, skip_optional_params=False):
|
||||
if "scmurl" not in self.data:
|
||||
log.error('Missing scmurl')
|
||||
raise ValidationError('Missing scmurl')
|
||||
log.error("Missing scmurl")
|
||||
raise ValidationError("Missing scmurl")
|
||||
|
||||
url = self.data["scmurl"]
|
||||
allowed_prefix = any(url.startswith(prefix) for prefix in conf.scmurls)
|
||||
@@ -410,8 +400,8 @@ class SCMHandler(BaseHandler):
|
||||
raise Forbidden("The submitted scmurl %s is not valid" % url)
|
||||
|
||||
if not skip_branch and "branch" not in self.data:
|
||||
log.error('Missing branch')
|
||||
raise ValidationError('Missing branch')
|
||||
log.error("Missing branch")
|
||||
raise ValidationError("Missing branch")
|
||||
|
||||
if not skip_optional_params:
|
||||
self.validate_optional_params()
|
||||
@@ -423,14 +413,16 @@ class SCMHandler(BaseHandler):
|
||||
class YAMLFileHandler(BaseHandler):
|
||||
def __init__(self, request, data=None):
|
||||
super(YAMLFileHandler, self).__init__(request, data)
|
||||
if not self.data['scratch'] and not conf.yaml_submit_allowed:
|
||||
if not self.data["scratch"] and not conf.yaml_submit_allowed:
|
||||
raise Forbidden("YAML submission is not enabled")
|
||||
|
||||
def validate(self):
|
||||
if ("modulemd" not in self.data and
|
||||
(not hasattr(request, "files") or "yaml" not in request.files)):
|
||||
log.error('Invalid file submitted')
|
||||
raise ValidationError('Invalid file submitted')
|
||||
if (
|
||||
"modulemd" not in self.data
|
||||
and (not hasattr(request, "files") or "yaml" not in request.files)
|
||||
):
|
||||
log.error("Invalid file submitted")
|
||||
raise ValidationError("Invalid file submitted")
|
||||
self.validate_optional_params()
|
||||
|
||||
def post(self):
|
||||
@@ -450,44 +442,31 @@ def _dict_from_request(request):
|
||||
try:
|
||||
data = json.loads(request.get_data().decode("utf-8"))
|
||||
except Exception:
|
||||
log.exception('Invalid JSON submitted')
|
||||
raise ValidationError('Invalid JSON submitted')
|
||||
log.exception("Invalid JSON submitted")
|
||||
raise ValidationError("Invalid JSON submitted")
|
||||
return data
|
||||
|
||||
|
||||
def register_api():
|
||||
""" Registers the MBS API. """
|
||||
module_view = ModuleBuildAPI.as_view('module_builds')
|
||||
component_view = ComponentBuildAPI.as_view('component_builds')
|
||||
about_view = AboutAPI.as_view('about')
|
||||
rebuild_strategies_view = RebuildStrategies.as_view('rebuild_strategies')
|
||||
import_module = ImportModuleAPI.as_view('import_module')
|
||||
module_view = ModuleBuildAPI.as_view("module_builds")
|
||||
component_view = ComponentBuildAPI.as_view("component_builds")
|
||||
about_view = AboutAPI.as_view("about")
|
||||
rebuild_strategies_view = RebuildStrategies.as_view("rebuild_strategies")
|
||||
import_module = ImportModuleAPI.as_view("import_module")
|
||||
for key, val in api_routes.items():
|
||||
if key.startswith('component_build'):
|
||||
app.add_url_rule(val['url'],
|
||||
endpoint=key,
|
||||
view_func=component_view,
|
||||
**val['options'])
|
||||
elif key.startswith('module_build'):
|
||||
app.add_url_rule(val['url'],
|
||||
endpoint=key,
|
||||
view_func=module_view,
|
||||
**val['options'])
|
||||
elif key.startswith('about'):
|
||||
app.add_url_rule(val['url'],
|
||||
endpoint=key,
|
||||
view_func=about_view,
|
||||
**val['options'])
|
||||
elif key == 'rebuild_strategies_list':
|
||||
app.add_url_rule(val['url'],
|
||||
endpoint=key,
|
||||
view_func=rebuild_strategies_view,
|
||||
**val['options'])
|
||||
elif key == 'import_module':
|
||||
app.add_url_rule(val['url'],
|
||||
endpoint=key,
|
||||
view_func=import_module,
|
||||
**val['options'])
|
||||
if key.startswith("component_build"):
|
||||
app.add_url_rule(val["url"], endpoint=key, view_func=component_view, **val["options"])
|
||||
elif key.startswith("module_build"):
|
||||
app.add_url_rule(val["url"], endpoint=key, view_func=module_view, **val["options"])
|
||||
elif key.startswith("about"):
|
||||
app.add_url_rule(val["url"], endpoint=key, view_func=about_view, **val["options"])
|
||||
elif key == "rebuild_strategies_list":
|
||||
app.add_url_rule(
|
||||
val["url"], endpoint=key, view_func=rebuild_strategies_view, **val["options"]
|
||||
)
|
||||
elif key == "import_module":
|
||||
app.add_url_rule(val["url"], endpoint=key, view_func=import_module, **val["options"])
|
||||
else:
|
||||
raise NotImplementedError("Unhandled api key.")
|
||||
|
||||
|
||||
121
setup.py
121
setup.py
@@ -6,11 +6,11 @@ from setuptools import setup, find_packages
|
||||
def read_requirements(filename):
|
||||
specifiers = []
|
||||
dep_links = []
|
||||
with open(filename, 'r') as f:
|
||||
with open(filename, "r") as f:
|
||||
for line in f:
|
||||
if line.startswith('-r') or line.strip() == '':
|
||||
if line.startswith("-r") or line.strip() == "":
|
||||
continue
|
||||
if line.startswith('git+'):
|
||||
if line.startswith("git+"):
|
||||
dep_links.append(line.strip())
|
||||
else:
|
||||
specifiers.append(line.strip())
|
||||
@@ -18,56 +18,67 @@ def read_requirements(filename):
|
||||
|
||||
|
||||
setup_py_path = path.dirname(path.realpath(__file__))
|
||||
install_requires, deps_links = read_requirements(path.join(setup_py_path, 'requirements.txt'))
|
||||
tests_require, _ = read_requirements(path.join(setup_py_path, 'test-requirements.txt'))
|
||||
install_requires, deps_links = read_requirements(path.join(setup_py_path, "requirements.txt"))
|
||||
tests_require, _ = read_requirements(path.join(setup_py_path, "test-requirements.txt"))
|
||||
|
||||
setup(name='module-build-service',
|
||||
description='The Module Build Service for Modularity',
|
||||
version='2.19.1',
|
||||
classifiers=[
|
||||
"Programming Language :: Python",
|
||||
"Topic :: Software Development :: Build Tools"
|
||||
],
|
||||
keywords='module build service fedora modularity koji mock rpm',
|
||||
author='The Factory 2.0 Team',
|
||||
author_email='module-build-service-owner@fedoraproject.org',
|
||||
url='https://pagure.io/fm-orchestrator/',
|
||||
license='MIT',
|
||||
packages=find_packages(),
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
install_requires=install_requires,
|
||||
tests_require=tests_require,
|
||||
dependency_links=deps_links,
|
||||
entry_points={
|
||||
'console_scripts': ['mbs-upgradedb = module_build_service.manage:upgradedb',
|
||||
'mbs-frontend = module_build_service.manage:run',
|
||||
'mbs-manager = module_build_service.manage:manager_wrapper'],
|
||||
'moksha.consumer': 'mbsconsumer = module_build_service.scheduler.consumer:MBSConsumer',
|
||||
'moksha.producer': 'mbspoller = module_build_service.scheduler.producer:MBSProducer',
|
||||
'mbs.messaging_backends': [
|
||||
'fedmsg = module_build_service.messaging:_fedmsg_backend',
|
||||
'in_memory = module_build_service.messaging:_in_memory_backend',
|
||||
# 'custom = your_organization:_custom_backend',
|
||||
],
|
||||
'mbs.builder_backends': [
|
||||
'koji = module_build_service.builder.KojiModuleBuilder:KojiModuleBuilder',
|
||||
'mock = module_build_service.builder.MockModuleBuilder:MockModuleBuilder',
|
||||
],
|
||||
'mbs.resolver_backends': [
|
||||
'mbs = module_build_service.resolver.MBSResolver:MBSResolver',
|
||||
'db = module_build_service.resolver.DBResolver:DBResolver',
|
||||
'local = module_build_service.resolver.LocalResolver:LocalResolver',
|
||||
],
|
||||
},
|
||||
scripts=['client/mbs-cli'],
|
||||
data_files=[('/etc/module-build-service/', ['conf/cacert.pem',
|
||||
'conf/config.py',
|
||||
'conf/koji.conf',
|
||||
'conf/mock.cfg',
|
||||
'conf/yum.conf']),
|
||||
('/etc/fedmsg.d/', ['fedmsg.d/mbs-logging.py',
|
||||
'fedmsg.d/mbs-scheduler.py',
|
||||
'fedmsg.d/module_build_service.py']),
|
||||
],
|
||||
)
|
||||
setup(
|
||||
name="module-build-service",
|
||||
description="The Module Build Service for Modularity",
|
||||
version="2.19.1",
|
||||
classifiers=["Programming Language :: Python", "Topic :: Software Development :: Build Tools"],
|
||||
keywords="module build service fedora modularity koji mock rpm",
|
||||
author="The Factory 2.0 Team",
|
||||
author_email="module-build-service-owner@fedoraproject.org",
|
||||
url="https://pagure.io/fm-orchestrator/",
|
||||
license="MIT",
|
||||
packages=find_packages(),
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
install_requires=install_requires,
|
||||
tests_require=tests_require,
|
||||
dependency_links=deps_links,
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"mbs-upgradedb = module_build_service.manage:upgradedb",
|
||||
"mbs-frontend = module_build_service.manage:run",
|
||||
"mbs-manager = module_build_service.manage:manager_wrapper",
|
||||
],
|
||||
"moksha.consumer": "mbsconsumer = module_build_service.scheduler.consumer:MBSConsumer",
|
||||
"moksha.producer": "mbspoller = module_build_service.scheduler.producer:MBSProducer",
|
||||
"mbs.messaging_backends": [
|
||||
"fedmsg = module_build_service.messaging:_fedmsg_backend",
|
||||
"in_memory = module_build_service.messaging:_in_memory_backend",
|
||||
# 'custom = your_organization:_custom_backend',
|
||||
],
|
||||
"mbs.builder_backends": [
|
||||
"koji = module_build_service.builder.KojiModuleBuilder:KojiModuleBuilder",
|
||||
"mock = module_build_service.builder.MockModuleBuilder:MockModuleBuilder",
|
||||
],
|
||||
"mbs.resolver_backends": [
|
||||
"mbs = module_build_service.resolver.MBSResolver:MBSResolver",
|
||||
"db = module_build_service.resolver.DBResolver:DBResolver",
|
||||
"local = module_build_service.resolver.LocalResolver:LocalResolver",
|
||||
],
|
||||
},
|
||||
scripts=["client/mbs-cli"],
|
||||
data_files=[
|
||||
(
|
||||
"/etc/module-build-service/",
|
||||
[
|
||||
"conf/cacert.pem",
|
||||
"conf/config.py",
|
||||
"conf/koji.conf",
|
||||
"conf/mock.cfg",
|
||||
"conf/yum.conf",
|
||||
],
|
||||
),
|
||||
(
|
||||
"/etc/fedmsg.d/",
|
||||
[
|
||||
"fedmsg.d/mbs-logging.py",
|
||||
"fedmsg.d/mbs-scheduler.py",
|
||||
"fedmsg.d/module_build_service.py",
|
||||
],
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
@@ -55,8 +55,8 @@ def read_staged_data(yaml_name):
|
||||
"""
|
||||
filename = os.path.join(base_dir, "staged_data", "{}.yaml".format(yaml_name))
|
||||
if not os.path.exists(filename):
|
||||
raise ValueError('Staged data {}.yaml does not exist.'.format(yaml_name))
|
||||
with open(filename, 'r') as mmd:
|
||||
raise ValueError("Staged data {}.yaml does not exist.".format(yaml_name))
|
||||
with open(filename, "r") as mmd:
|
||||
return to_text_type(mmd.read())
|
||||
|
||||
|
||||
@@ -64,7 +64,7 @@ def patch_config():
|
||||
# add test builders for all resolvers
|
||||
with_test_builders = dict()
|
||||
for k, v in module_build_service.config.SUPPORTED_RESOLVERS.items():
|
||||
v['builders'].extend(['test', 'testlocal'])
|
||||
v["builders"].extend(["test", "testlocal"])
|
||||
with_test_builders[k] = v
|
||||
patch("module_build_service.config.SUPPORTED_RESOLVERS", with_test_builders)
|
||||
|
||||
@@ -108,7 +108,7 @@ def clean_database(add_platform_module=True):
|
||||
db.drop_all()
|
||||
db.create_all()
|
||||
if add_platform_module:
|
||||
mmd = load_mmd_file(os.path.join(base_dir, 'staged_data', 'platform.yaml'))
|
||||
mmd = load_mmd_file(os.path.join(base_dir, "staged_data", "platform.yaml"))
|
||||
import_mmd(db.session, mmd)
|
||||
|
||||
|
||||
@@ -124,7 +124,7 @@ def init_data(data_size=10, contexts=False, multiple_stream_versions=False, scra
|
||||
"""
|
||||
clean_database()
|
||||
if multiple_stream_versions:
|
||||
mmd = load_mmd_file(os.path.join(base_dir, 'staged_data', 'platform.yaml'))
|
||||
mmd = load_mmd_file(os.path.join(base_dir, "staged_data", "platform.yaml"))
|
||||
for stream in ["f28.0.0", "f29.0.0", "f29.1.0", "f29.2.0"]:
|
||||
mmd.set_name("platform")
|
||||
mmd.set_stream(stream)
|
||||
@@ -132,7 +132,7 @@ def init_data(data_size=10, contexts=False, multiple_stream_versions=False, scra
|
||||
# Set the virtual_streams based on "fXY" to mark the platform streams
|
||||
# with the same major stream_version compatible.
|
||||
xmd = glib.from_variant_dict(mmd.get_xmd())
|
||||
xmd['mbs']['virtual_streams'] = [stream[:3]]
|
||||
xmd["mbs"]["virtual_streams"] = [stream[:3]]
|
||||
mmd.set_xmd(glib.dict_values(xmd))
|
||||
import_mmd(db.session, mmd)
|
||||
|
||||
@@ -149,33 +149,37 @@ def _populate_data(session, data_size=10, contexts=False, scratch=False):
|
||||
for index in range(data_size):
|
||||
for context in range(num_contexts):
|
||||
build_one = ModuleBuild(
|
||||
name='nginx',
|
||||
stream='1',
|
||||
name="nginx",
|
||||
stream="1",
|
||||
version=2 + index,
|
||||
state=BUILD_STATES['ready'],
|
||||
state=BUILD_STATES["ready"],
|
||||
scratch=scratch,
|
||||
modulemd=read_staged_data('nginx_mmd'),
|
||||
koji_tag='scrmod-nginx-1.2' if scratch else 'module-nginx-1.2',
|
||||
scmurl='git://pkgs.domain.local/modules/nginx'
|
||||
'?#ba95886c7a443b36a9ce31abda1f9bef22f2f8c9',
|
||||
modulemd=read_staged_data("nginx_mmd"),
|
||||
koji_tag="scrmod-nginx-1.2" if scratch else "module-nginx-1.2",
|
||||
scmurl="git://pkgs.domain.local/modules/nginx"
|
||||
"?#ba95886c7a443b36a9ce31abda1f9bef22f2f8c9",
|
||||
batch=2,
|
||||
# https://www.youtube.com/watch?v=iQGwrK_yDEg,
|
||||
owner='Moe Szyslak',
|
||||
owner="Moe Szyslak",
|
||||
time_submitted=datetime(2016, 9, 3, 11, 23, 20) + timedelta(minutes=(index * 10)),
|
||||
time_modified=datetime(2016, 9, 3, 11, 25, 32) + timedelta(minutes=(index * 10)),
|
||||
time_completed=datetime(2016, 9, 3, 11, 25, 32) + timedelta(minutes=(index * 10)),
|
||||
rebuild_strategy='changed-and-after',
|
||||
rebuild_strategy="changed-and-after",
|
||||
)
|
||||
|
||||
if contexts:
|
||||
build_one.stream = str(index)
|
||||
unique_hash = hashlib.sha1(("%s:%s:%d:%d" % (
|
||||
build_one.name, build_one.stream, build_one.version,
|
||||
context)).encode("utf-8")).hexdigest()
|
||||
nsvc = "{}:{}:{}:{}".format(
|
||||
build_one.name,
|
||||
build_one.stream,
|
||||
build_one.version,
|
||||
context
|
||||
)
|
||||
unique_hash = hashlib.sha1(nsvc.encode('utf-8')).hexdigest()
|
||||
build_one.build_context = unique_hash
|
||||
build_one.runtime_context = unique_hash
|
||||
build_one.ref_build_context = unique_hash
|
||||
combined_hashes = '{0}:{1}'.format(unique_hash, unique_hash)
|
||||
combined_hashes = "{0}:{1}".format(unique_hash, unique_hash)
|
||||
build_one.context = hashlib.sha1(combined_hashes.encode("utf-8")).hexdigest()[:8]
|
||||
|
||||
session.add(build_one)
|
||||
@@ -183,13 +187,13 @@ def _populate_data(session, data_size=10, contexts=False, scratch=False):
|
||||
build_one_component_release = get_rpm_release(build_one)
|
||||
|
||||
component_one_build_one = ComponentBuild(
|
||||
package='nginx',
|
||||
scmurl='git://pkgs.domain.local/rpms/nginx?'
|
||||
'#ga95886c8a443b36a9ce31abda1f9bed22f2f8c3',
|
||||
format='rpms',
|
||||
package="nginx",
|
||||
scmurl="git://pkgs.domain.local/rpms/nginx?"
|
||||
"#ga95886c8a443b36a9ce31abda1f9bed22f2f8c3",
|
||||
format="rpms",
|
||||
task_id=12312345 + index,
|
||||
state=koji.BUILD_STATES['COMPLETE'],
|
||||
nvr='nginx-1.10.1-2.{0}'.format(build_one_component_release),
|
||||
state=koji.BUILD_STATES["COMPLETE"],
|
||||
nvr="nginx-1.10.1-2.{0}".format(build_one_component_release),
|
||||
batch=1,
|
||||
module_id=2 + index * 3,
|
||||
tagged=True,
|
||||
@@ -197,13 +201,13 @@ def _populate_data(session, data_size=10, contexts=False, scratch=False):
|
||||
)
|
||||
|
||||
component_two_build_one = ComponentBuild(
|
||||
package='module-build-macros',
|
||||
scmurl='/tmp/module_build_service-build-macrosWZUPeK/SRPMS/'
|
||||
'module-build-macros-0.1-1.module_nginx_1_2.src.rpm',
|
||||
format='rpms',
|
||||
package="module-build-macros",
|
||||
scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"
|
||||
"module-build-macros-0.1-1.module_nginx_1_2.src.rpm",
|
||||
format="rpms",
|
||||
task_id=12312321 + index,
|
||||
state=koji.BUILD_STATES['COMPLETE'],
|
||||
nvr='module-build-macros-01-1.{0}'.format(build_one_component_release),
|
||||
state=koji.BUILD_STATES["COMPLETE"],
|
||||
nvr="module-build-macros-01-1.{0}".format(build_one_component_release),
|
||||
batch=2,
|
||||
module_id=2 + index * 3,
|
||||
tagged=True,
|
||||
@@ -211,21 +215,21 @@ def _populate_data(session, data_size=10, contexts=False, scratch=False):
|
||||
)
|
||||
|
||||
build_two = ModuleBuild(
|
||||
name='postgressql',
|
||||
stream='1',
|
||||
name="postgressql",
|
||||
stream="1",
|
||||
version=2 + index,
|
||||
state=BUILD_STATES['done'],
|
||||
state=BUILD_STATES["done"],
|
||||
scratch=scratch,
|
||||
modulemd=read_staged_data('testmodule'),
|
||||
koji_tag='scrmod-postgressql-1.2' if scratch else 'module-postgressql-1.2',
|
||||
scmurl='git://pkgs.domain.local/modules/postgressql'
|
||||
'?#aa95886c7a443b36a9ce31abda1f9bef22f2f8c9',
|
||||
modulemd=read_staged_data("testmodule"),
|
||||
koji_tag="scrmod-postgressql-1.2" if scratch else "module-postgressql-1.2",
|
||||
scmurl="git://pkgs.domain.local/modules/postgressql"
|
||||
"?#aa95886c7a443b36a9ce31abda1f9bef22f2f8c9",
|
||||
batch=2,
|
||||
owner='some_user',
|
||||
owner="some_user",
|
||||
time_submitted=datetime(2016, 9, 3, 12, 25, 33) + timedelta(minutes=(index * 10)),
|
||||
time_modified=datetime(2016, 9, 3, 12, 27, 19) + timedelta(minutes=(index * 10)),
|
||||
time_completed=datetime(2016, 9, 3, 11, 27, 19) + timedelta(minutes=(index * 10)),
|
||||
rebuild_strategy='changed-and-after',
|
||||
rebuild_strategy="changed-and-after",
|
||||
)
|
||||
|
||||
session.add(build_two)
|
||||
@@ -233,13 +237,13 @@ def _populate_data(session, data_size=10, contexts=False, scratch=False):
|
||||
build_two_component_release = get_rpm_release(build_two)
|
||||
|
||||
component_one_build_two = ComponentBuild(
|
||||
package='postgresql',
|
||||
scmurl='git://pkgs.domain.local/rpms/postgresql'
|
||||
'?#dc95586c4a443b26a9ce38abda1f9bed22f2f8c3',
|
||||
format='rpms',
|
||||
package="postgresql",
|
||||
scmurl="git://pkgs.domain.local/rpms/postgresql"
|
||||
"?#dc95586c4a443b26a9ce38abda1f9bed22f2f8c3",
|
||||
format="rpms",
|
||||
task_id=2433433 + index,
|
||||
state=koji.BUILD_STATES['COMPLETE'],
|
||||
nvr='postgresql-9.5.3-4.{0}'.format(build_two_component_release),
|
||||
state=koji.BUILD_STATES["COMPLETE"],
|
||||
nvr="postgresql-9.5.3-4.{0}".format(build_two_component_release),
|
||||
batch=2,
|
||||
module_id=3 + index * 3,
|
||||
tagged=True,
|
||||
@@ -247,58 +251,58 @@ def _populate_data(session, data_size=10, contexts=False, scratch=False):
|
||||
)
|
||||
|
||||
component_two_build_two = ComponentBuild(
|
||||
package='module-build-macros',
|
||||
scmurl='/tmp/module_build_service-build-macrosWZUPeK/SRPMS/'
|
||||
'module-build-macros-0.1-1.module_postgresql_1_2.src.rpm',
|
||||
format='rpms',
|
||||
package="module-build-macros",
|
||||
scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"
|
||||
"module-build-macros-0.1-1.module_postgresql_1_2.src.rpm",
|
||||
format="rpms",
|
||||
task_id=47383993 + index,
|
||||
state=koji.BUILD_STATES['COMPLETE'],
|
||||
nvr='module-build-macros-01-1.{0}'.format(build_two_component_release),
|
||||
state=koji.BUILD_STATES["COMPLETE"],
|
||||
nvr="module-build-macros-01-1.{0}".format(build_two_component_release),
|
||||
batch=1,
|
||||
module_id=3 + index * 3,
|
||||
)
|
||||
|
||||
build_three = ModuleBuild(
|
||||
name='testmodule',
|
||||
stream='4.3.43',
|
||||
name="testmodule",
|
||||
stream="4.3.43",
|
||||
version=6 + index,
|
||||
state=BUILD_STATES['wait'],
|
||||
state=BUILD_STATES["wait"],
|
||||
scratch=scratch,
|
||||
modulemd=read_staged_data('testmodule'),
|
||||
modulemd=read_staged_data("testmodule"),
|
||||
koji_tag=None,
|
||||
scmurl='git://pkgs.domain.local/modules/testmodule'
|
||||
'?#ca95886c7a443b36a9ce31abda1f9bef22f2f8c9',
|
||||
scmurl="git://pkgs.domain.local/modules/testmodule"
|
||||
"?#ca95886c7a443b36a9ce31abda1f9bef22f2f8c9",
|
||||
batch=0,
|
||||
owner='some_other_user',
|
||||
owner="some_other_user",
|
||||
time_submitted=datetime(2016, 9, 3, 12, 28, 33) + timedelta(minutes=(index * 10)),
|
||||
time_modified=datetime(2016, 9, 3, 12, 28, 40) + timedelta(minutes=(index * 10)),
|
||||
time_completed=None,
|
||||
rebuild_strategy='changed-and-after',
|
||||
rebuild_strategy="changed-and-after",
|
||||
)
|
||||
session.add(build_three)
|
||||
session.commit()
|
||||
build_three_component_release = get_rpm_release(build_three)
|
||||
|
||||
component_one_build_three = ComponentBuild(
|
||||
package='rubygem-rails',
|
||||
scmurl='git://pkgs.domain.local/rpms/rubygem-rails'
|
||||
'?#dd55886c4a443b26a9ce38abda1f9bed22f2f8c3',
|
||||
format='rpms',
|
||||
package="rubygem-rails",
|
||||
scmurl="git://pkgs.domain.local/rpms/rubygem-rails"
|
||||
"?#dd55886c4a443b26a9ce38abda1f9bed22f2f8c3",
|
||||
format="rpms",
|
||||
task_id=2433433 + index,
|
||||
state=koji.BUILD_STATES['FAILED'],
|
||||
nvr='postgresql-9.5.3-4.{0}'.format(build_three_component_release),
|
||||
state=koji.BUILD_STATES["FAILED"],
|
||||
nvr="postgresql-9.5.3-4.{0}".format(build_three_component_release),
|
||||
batch=2,
|
||||
module_id=4 + index * 3,
|
||||
)
|
||||
|
||||
component_two_build_three = ComponentBuild(
|
||||
package='module-build-macros',
|
||||
scmurl='/tmp/module_build_service-build-macrosWZUPeK/SRPMS/'
|
||||
'module-build-macros-0.1-1.module_testmodule_1_2.src.rpm',
|
||||
format='rpms',
|
||||
package="module-build-macros",
|
||||
scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"
|
||||
"module-build-macros-0.1-1.module_testmodule_1_2.src.rpm",
|
||||
format="rpms",
|
||||
task_id=47383993 + index,
|
||||
state=koji.BUILD_STATES['COMPLETE'],
|
||||
nvr='module-build-macros-01-1.{0}'.format(build_three_component_release),
|
||||
state=koji.BUILD_STATES["COMPLETE"],
|
||||
nvr="module-build-macros-01-1.{0}".format(build_three_component_release),
|
||||
batch=1,
|
||||
module_id=4 + index * 3,
|
||||
tagged=True,
|
||||
@@ -321,31 +325,31 @@ def scheduler_init_data(tangerine_state=None, scratch=False):
|
||||
|
||||
current_dir = os.path.dirname(__file__)
|
||||
formatted_testmodule_yml_path = os.path.join(
|
||||
current_dir, 'staged_data', 'formatted_testmodule.yaml')
|
||||
current_dir, "staged_data", "formatted_testmodule.yaml")
|
||||
mmd = load_mmd_file(formatted_testmodule_yml_path)
|
||||
mmd.get_rpm_components()['tangerine'].set_buildorder(0)
|
||||
mmd.get_rpm_components()["tangerine"].set_buildorder(0)
|
||||
|
||||
platform_br = module_build_service.models.ModuleBuild.query.get(1)
|
||||
|
||||
module_build = module_build_service.models.ModuleBuild(
|
||||
name='testmodule',
|
||||
stream='master',
|
||||
name="testmodule",
|
||||
stream="master",
|
||||
version=20170109091357,
|
||||
state=BUILD_STATES['build'],
|
||||
state=BUILD_STATES["build"],
|
||||
scratch=scratch,
|
||||
build_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0',
|
||||
runtime_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0',
|
||||
context='7c29193d',
|
||||
koji_tag='scrmod-testmodule-master-20170109091357-7c29193d'
|
||||
if scratch else
|
||||
'module-testmodule-master-20170109091357-7c29193d',
|
||||
scmurl='https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79',
|
||||
build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
|
||||
runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
|
||||
context="7c29193d",
|
||||
koji_tag="scrmod-testmodule-master-20170109091357-7c29193d"
|
||||
if scratch
|
||||
else "module-testmodule-master-20170109091357-7c29193d",
|
||||
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79",
|
||||
batch=3 if tangerine_state else 2,
|
||||
# https://www.youtube.com/watch?v=iOKymYVSaJE
|
||||
owner='Buzz Lightyear',
|
||||
owner="Buzz Lightyear",
|
||||
time_submitted=datetime(2017, 2, 15, 16, 8, 18),
|
||||
time_modified=datetime(2017, 2, 15, 16, 19, 35),
|
||||
rebuild_strategy='changed-and-after',
|
||||
rebuild_strategy="changed-and-after",
|
||||
modulemd=to_text_type(mmd.dumps()),
|
||||
)
|
||||
|
||||
@@ -354,53 +358,54 @@ def scheduler_init_data(tangerine_state=None, scratch=False):
|
||||
|
||||
module_build.component_builds.extend([
|
||||
module_build_service.models.ComponentBuild(
|
||||
package='perl-Tangerine',
|
||||
scmurl='https://src.fedoraproject.org/rpms/perl-Tangerine'
|
||||
'?#4ceea43add2366d8b8c5a622a2fb563b625b9abf',
|
||||
format='rpms',
|
||||
package="perl-Tangerine",
|
||||
scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine"
|
||||
"?#4ceea43add2366d8b8c5a622a2fb563b625b9abf",
|
||||
format="rpms",
|
||||
task_id=90276227,
|
||||
state=koji.BUILD_STATES['COMPLETE'],
|
||||
nvr='perl-Tangerine-0.23-1.{0}'.format(build_one_component_release),
|
||||
state=koji.BUILD_STATES["COMPLETE"],
|
||||
nvr="perl-Tangerine-0.23-1.{0}".format(build_one_component_release),
|
||||
batch=2,
|
||||
ref='4ceea43add2366d8b8c5a622a2fb563b625b9abf',
|
||||
ref="4ceea43add2366d8b8c5a622a2fb563b625b9abf",
|
||||
tagged=True,
|
||||
tagged_in_final=True,
|
||||
),
|
||||
module_build_service.models.ComponentBuild(
|
||||
package='perl-List-Compare',
|
||||
scmurl='https://src.fedoraproject.org/rpms/perl-List-Compare'
|
||||
'?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb',
|
||||
format='rpms',
|
||||
package="perl-List-Compare",
|
||||
scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare"
|
||||
"?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
|
||||
format="rpms",
|
||||
task_id=90276228,
|
||||
state=koji.BUILD_STATES['COMPLETE'],
|
||||
nvr='perl-List-Compare-0.53-5.{0}'.format(build_one_component_release),
|
||||
state=koji.BUILD_STATES["COMPLETE"],
|
||||
nvr="perl-List-Compare-0.53-5.{0}".format(build_one_component_release),
|
||||
batch=2,
|
||||
ref='76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb',
|
||||
ref="76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
|
||||
tagged=True,
|
||||
tagged_in_final=True,
|
||||
),
|
||||
module_build_service.models.ComponentBuild(
|
||||
package='tangerine',
|
||||
scmurl='https://src.fedoraproject.org/rpms/tangerine'
|
||||
'?#fbed359411a1baa08d4a88e0d12d426fbf8f602c',
|
||||
format='rpms',
|
||||
package="tangerine",
|
||||
scmurl="https://src.fedoraproject.org/rpms/tangerine"
|
||||
"?#fbed359411a1baa08d4a88e0d12d426fbf8f602c",
|
||||
format="rpms",
|
||||
batch=3,
|
||||
ref='fbed359411a1baa08d4a88e0d12d426fbf8f602c',
|
||||
ref="fbed359411a1baa08d4a88e0d12d426fbf8f602c",
|
||||
state=tangerine_state,
|
||||
task_id=90276315 if tangerine_state else None,
|
||||
nvr='tangerine-0.22-3.{}'.format(build_one_component_release)
|
||||
if tangerine_state else None,
|
||||
tagged=tangerine_state == koji.BUILD_STATES['COMPLETE'],
|
||||
tagged_in_final=tangerine_state == koji.BUILD_STATES['COMPLETE'],
|
||||
nvr="tangerine-0.22-3.{}".format(build_one_component_release)
|
||||
if tangerine_state
|
||||
else None,
|
||||
tagged=tangerine_state == koji.BUILD_STATES["COMPLETE"],
|
||||
tagged_in_final=tangerine_state == koji.BUILD_STATES["COMPLETE"],
|
||||
),
|
||||
module_build_service.models.ComponentBuild(
|
||||
package='module-build-macros',
|
||||
scmurl='/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-'
|
||||
'macros-0.1-1.module_testmodule_master_20170109091357.src.rpm',
|
||||
format='rpms',
|
||||
package="module-build-macros",
|
||||
scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-"
|
||||
"macros-0.1-1.module_testmodule_master_20170109091357.src.rpm",
|
||||
format="rpms",
|
||||
task_id=90276181,
|
||||
state=koji.BUILD_STATES['COMPLETE'],
|
||||
nvr='module-build-macros-0.1-1.{}'.format(build_one_component_release),
|
||||
state=koji.BUILD_STATES["COMPLETE"],
|
||||
nvr="module-build-macros-0.1-1.{}".format(build_one_component_release),
|
||||
batch=1,
|
||||
tagged=True,
|
||||
build_time_only=True,
|
||||
@@ -418,88 +423,88 @@ def reuse_component_init_data():
|
||||
|
||||
current_dir = os.path.dirname(__file__)
|
||||
formatted_testmodule_yml_path = os.path.join(
|
||||
current_dir, 'staged_data', 'formatted_testmodule.yaml')
|
||||
current_dir, "staged_data", "formatted_testmodule.yaml")
|
||||
mmd = load_mmd_file(formatted_testmodule_yml_path)
|
||||
|
||||
platform_br = module_build_service.models.ModuleBuild.query.get(1)
|
||||
|
||||
build_one = module_build_service.models.ModuleBuild(
|
||||
name='testmodule',
|
||||
stream='master',
|
||||
name="testmodule",
|
||||
stream="master",
|
||||
version=20170109091357,
|
||||
state=BUILD_STATES['ready'],
|
||||
ref_build_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0',
|
||||
runtime_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0',
|
||||
build_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1',
|
||||
context='78e4a6fd',
|
||||
koji_tag='module-testmodule-master-20170109091357-78e4a6fd',
|
||||
scmurl='https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79',
|
||||
state=BUILD_STATES["ready"],
|
||||
ref_build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
|
||||
runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
|
||||
build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1",
|
||||
context="78e4a6fd",
|
||||
koji_tag="module-testmodule-master-20170109091357-78e4a6fd",
|
||||
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79",
|
||||
batch=3,
|
||||
owner='Tom Brady',
|
||||
owner="Tom Brady",
|
||||
time_submitted=datetime(2017, 2, 15, 16, 8, 18),
|
||||
time_modified=datetime(2017, 2, 15, 16, 19, 35),
|
||||
time_completed=datetime(2017, 2, 15, 16, 19, 35),
|
||||
rebuild_strategy='changed-and-after',
|
||||
rebuild_strategy="changed-and-after",
|
||||
)
|
||||
|
||||
build_one_component_release = get_rpm_release(build_one)
|
||||
|
||||
mmd.set_version(int(build_one.version))
|
||||
xmd = glib.from_variant_dict(mmd.get_xmd())
|
||||
xmd['mbs']['scmurl'] = build_one.scmurl
|
||||
xmd['mbs']['commit'] = 'ff1ea79fc952143efeed1851aa0aa006559239ba'
|
||||
xmd["mbs"]["scmurl"] = build_one.scmurl
|
||||
xmd["mbs"]["commit"] = "ff1ea79fc952143efeed1851aa0aa006559239ba"
|
||||
mmd.set_xmd(glib.dict_values(xmd))
|
||||
build_one.modulemd = to_text_type(mmd.dumps())
|
||||
build_one.buildrequires.append(platform_br)
|
||||
|
||||
build_one.component_builds.extend([
|
||||
module_build_service.models.ComponentBuild(
|
||||
package='perl-Tangerine',
|
||||
scmurl='https://src.fedoraproject.org/rpms/perl-Tangerine'
|
||||
'?#4ceea43add2366d8b8c5a622a2fb563b625b9abf',
|
||||
format='rpms',
|
||||
package="perl-Tangerine",
|
||||
scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine"
|
||||
"?#4ceea43add2366d8b8c5a622a2fb563b625b9abf",
|
||||
format="rpms",
|
||||
task_id=90276227,
|
||||
state=koji.BUILD_STATES['COMPLETE'],
|
||||
nvr='perl-Tangerine-0.23-1.{0}'.format(build_one_component_release),
|
||||
state=koji.BUILD_STATES["COMPLETE"],
|
||||
nvr="perl-Tangerine-0.23-1.{0}".format(build_one_component_release),
|
||||
batch=2,
|
||||
ref='4ceea43add2366d8b8c5a622a2fb563b625b9abf',
|
||||
ref="4ceea43add2366d8b8c5a622a2fb563b625b9abf",
|
||||
tagged=True,
|
||||
tagged_in_final=True,
|
||||
),
|
||||
module_build_service.models.ComponentBuild(
|
||||
package='perl-List-Compare',
|
||||
scmurl='https://src.fedoraproject.org/rpms/perl-List-Compare'
|
||||
'?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb',
|
||||
format='rpms',
|
||||
package="perl-List-Compare",
|
||||
scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare"
|
||||
"?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
|
||||
format="rpms",
|
||||
task_id=90276228,
|
||||
state=koji.BUILD_STATES['COMPLETE'],
|
||||
nvr='perl-List-Compare-0.53-5.{0}'.format(build_one_component_release),
|
||||
state=koji.BUILD_STATES["COMPLETE"],
|
||||
nvr="perl-List-Compare-0.53-5.{0}".format(build_one_component_release),
|
||||
batch=2,
|
||||
ref='76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb',
|
||||
ref="76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
|
||||
tagged=True,
|
||||
tagged_in_final=True,
|
||||
),
|
||||
module_build_service.models.ComponentBuild(
|
||||
package='tangerine',
|
||||
scmurl='https://src.fedoraproject.org/rpms/tangerine'
|
||||
'?#fbed359411a1baa08d4a88e0d12d426fbf8f602c',
|
||||
format='rpms',
|
||||
package="tangerine",
|
||||
scmurl="https://src.fedoraproject.org/rpms/tangerine"
|
||||
"?#fbed359411a1baa08d4a88e0d12d426fbf8f602c",
|
||||
format="rpms",
|
||||
task_id=90276315,
|
||||
state=koji.BUILD_STATES['COMPLETE'],
|
||||
nvr='tangerine-0.22-3.{0}'.format(build_one_component_release),
|
||||
state=koji.BUILD_STATES["COMPLETE"],
|
||||
nvr="tangerine-0.22-3.{0}".format(build_one_component_release),
|
||||
batch=3,
|
||||
ref='fbed359411a1baa08d4a88e0d12d426fbf8f602c',
|
||||
ref="fbed359411a1baa08d4a88e0d12d426fbf8f602c",
|
||||
tagged=True,
|
||||
tagged_in_final=True,
|
||||
),
|
||||
module_build_service.models.ComponentBuild(
|
||||
package='module-build-macros',
|
||||
scmurl='/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-'
|
||||
'macros-0.1-1.module_testmodule_master_20170109091357.src.rpm',
|
||||
format='rpms',
|
||||
package="module-build-macros",
|
||||
scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-"
|
||||
"macros-0.1-1.module_testmodule_master_20170109091357.src.rpm",
|
||||
format="rpms",
|
||||
task_id=90276181,
|
||||
state=koji.BUILD_STATES['COMPLETE'],
|
||||
nvr='module-build-macros-0.1-1.{0}'.format(build_one_component_release),
|
||||
state=koji.BUILD_STATES["COMPLETE"],
|
||||
nvr="module-build-macros-0.1-1.{0}".format(build_one_component_release),
|
||||
batch=1,
|
||||
tagged=True,
|
||||
build_time_only=True,
|
||||
@@ -507,70 +512,70 @@ def reuse_component_init_data():
|
||||
])
|
||||
|
||||
build_two = module_build_service.models.ModuleBuild(
|
||||
name='testmodule',
|
||||
stream='master',
|
||||
name="testmodule",
|
||||
stream="master",
|
||||
version=20170219191323,
|
||||
state=BUILD_STATES['build'],
|
||||
ref_build_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0',
|
||||
runtime_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0',
|
||||
build_context='ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1',
|
||||
context='c40c156c',
|
||||
koji_tag='module-testmodule-master-20170219191323-c40c156c',
|
||||
scmurl='https://src.stg.fedoraproject.org/modules/testmodule.git?#55f4a0a',
|
||||
state=BUILD_STATES["build"],
|
||||
ref_build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
|
||||
runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
|
||||
build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1",
|
||||
context="c40c156c",
|
||||
koji_tag="module-testmodule-master-20170219191323-c40c156c",
|
||||
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#55f4a0a",
|
||||
batch=1,
|
||||
owner='Tom Brady',
|
||||
owner="Tom Brady",
|
||||
time_submitted=datetime(2017, 2, 19, 16, 8, 18),
|
||||
time_modified=datetime(2017, 2, 19, 16, 8, 18),
|
||||
rebuild_strategy='changed-and-after',
|
||||
rebuild_strategy="changed-and-after",
|
||||
)
|
||||
|
||||
build_two_component_release = get_rpm_release(build_two)
|
||||
|
||||
mmd.set_version(int(build_one.version))
|
||||
xmd = glib.from_variant_dict(mmd.get_xmd())
|
||||
xmd['mbs']['scmurl'] = build_one.scmurl
|
||||
xmd['mbs']['commit'] = '55f4a0a2e6cc255c88712a905157ab39315b8fd8'
|
||||
xmd["mbs"]["scmurl"] = build_one.scmurl
|
||||
xmd["mbs"]["commit"] = "55f4a0a2e6cc255c88712a905157ab39315b8fd8"
|
||||
mmd.set_xmd(glib.dict_values(xmd))
|
||||
build_two.modulemd = to_text_type(mmd.dumps())
|
||||
build_two.buildrequires.append(platform_br)
|
||||
|
||||
build_two.component_builds.extend([
|
||||
module_build_service.models.ComponentBuild(
|
||||
package='perl-Tangerine',
|
||||
scmurl='https://src.fedoraproject.org/rpms/perl-Tangerine'
|
||||
'?#4ceea43add2366d8b8c5a622a2fb563b625b9abf',
|
||||
format='rpms',
|
||||
package="perl-Tangerine",
|
||||
scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine"
|
||||
"?#4ceea43add2366d8b8c5a622a2fb563b625b9abf",
|
||||
format="rpms",
|
||||
batch=2,
|
||||
ref='4ceea43add2366d8b8c5a622a2fb563b625b9abf',
|
||||
ref="4ceea43add2366d8b8c5a622a2fb563b625b9abf",
|
||||
),
|
||||
module_build_service.models.ComponentBuild(
|
||||
package='perl-List-Compare',
|
||||
scmurl='https://src.fedoraproject.org/rpms/perl-List-Compare'
|
||||
'?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb',
|
||||
format='rpms',
|
||||
package="perl-List-Compare",
|
||||
scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare"
|
||||
"?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
|
||||
format="rpms",
|
||||
batch=2,
|
||||
ref='76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb',
|
||||
ref="76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
|
||||
),
|
||||
module_build_service.models.ComponentBuild(
|
||||
package='tangerine',
|
||||
scmurl='https://src.fedoraproject.org/rpms/tangerine'
|
||||
'?#fbed359411a1baa08d4a88e0d12d426fbf8f602c',
|
||||
format='rpms',
|
||||
package="tangerine",
|
||||
scmurl="https://src.fedoraproject.org/rpms/tangerine"
|
||||
"?#fbed359411a1baa08d4a88e0d12d426fbf8f602c",
|
||||
format="rpms",
|
||||
batch=3,
|
||||
ref='fbed359411a1baa08d4a88e0d12d426fbf8f602c',
|
||||
ref="fbed359411a1baa08d4a88e0d12d426fbf8f602c",
|
||||
),
|
||||
module_build_service.models.ComponentBuild(
|
||||
package='module-build-macros',
|
||||
scmurl='/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-'
|
||||
'macros-0.1-1.module_testmodule_master_20170219191323.src.rpm',
|
||||
format='rpms',
|
||||
package="module-build-macros",
|
||||
scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-"
|
||||
"macros-0.1-1.module_testmodule_master_20170219191323.src.rpm",
|
||||
format="rpms",
|
||||
task_id=90276186,
|
||||
state=koji.BUILD_STATES['COMPLETE'],
|
||||
nvr='module-build-macros-0.1-1.{0}'.format(build_two_component_release),
|
||||
state=koji.BUILD_STATES["COMPLETE"],
|
||||
nvr="module-build-macros-0.1-1.{0}".format(build_two_component_release),
|
||||
batch=1,
|
||||
tagged=True,
|
||||
build_time_only=True,
|
||||
)
|
||||
),
|
||||
])
|
||||
|
||||
with make_session(conf) as session:
|
||||
@@ -588,25 +593,25 @@ def reuse_shared_userspace_init_data():
|
||||
# are properly built.
|
||||
current_dir = os.path.dirname(__file__)
|
||||
formatted_testmodule_yml_path = os.path.join(
|
||||
current_dir, 'staged_data', 'shared-userspace-570.yaml')
|
||||
current_dir, "staged_data", "shared-userspace-570.yaml")
|
||||
mmd = load_mmd_file(formatted_testmodule_yml_path)
|
||||
|
||||
module_build = module_build_service.models.ModuleBuild(
|
||||
name=mmd.get_name(),
|
||||
stream=mmd.get_stream(),
|
||||
version=mmd.get_version(),
|
||||
build_context='e046b867a400a06a3571f3c71142d497895fefbe',
|
||||
runtime_context='50dd3eb5dde600d072e45d4120e1548ce66bc94a',
|
||||
state=BUILD_STATES['ready'],
|
||||
build_context="e046b867a400a06a3571f3c71142d497895fefbe",
|
||||
runtime_context="50dd3eb5dde600d072e45d4120e1548ce66bc94a",
|
||||
state=BUILD_STATES["ready"],
|
||||
modulemd=to_text_type(mmd.dumps()),
|
||||
koji_tag='module-shared-userspace-f26-20170601141014-75f92abb',
|
||||
scmurl='https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453',
|
||||
koji_tag="module-shared-userspace-f26-20170601141014-75f92abb",
|
||||
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453",
|
||||
batch=16,
|
||||
owner='Tom Brady',
|
||||
owner="Tom Brady",
|
||||
time_submitted=datetime(2017, 2, 15, 16, 8, 18),
|
||||
time_modified=datetime(2017, 2, 15, 16, 19, 35),
|
||||
time_completed=datetime(2017, 2, 15, 16, 19, 35),
|
||||
rebuild_strategy='changed-and-after',
|
||||
rebuild_strategy="changed-and-after",
|
||||
)
|
||||
|
||||
components = list(mmd.get_rpm_components().values())
|
||||
@@ -619,7 +624,7 @@ def reuse_shared_userspace_init_data():
|
||||
previous_buildorder = pkg.get_buildorder()
|
||||
batch += 1
|
||||
|
||||
pkgref = mmd.get_xmd()['mbs']['rpms'][pkg.get_name()]['ref']
|
||||
pkgref = mmd.get_xmd()["mbs"]["rpms"][pkg.get_name()]["ref"]
|
||||
full_url = pkg.get_repository() + "?#" + pkgref
|
||||
|
||||
module_build.component_builds.append(
|
||||
@@ -631,33 +636,35 @@ def reuse_shared_userspace_init_data():
|
||||
ref=pkgref,
|
||||
state=1,
|
||||
tagged=True,
|
||||
tagged_in_final=True
|
||||
))
|
||||
tagged_in_final=True,
|
||||
)
|
||||
)
|
||||
|
||||
session.add(module_build)
|
||||
session.commit()
|
||||
|
||||
# Create shared-userspace-577, state is WAIT, no component built
|
||||
formatted_testmodule_yml_path = os.path.join(
|
||||
current_dir, 'staged_data', 'shared-userspace-577.yaml')
|
||||
current_dir, "staged_data", "shared-userspace-577.yaml"
|
||||
)
|
||||
mmd2 = load_mmd_file(formatted_testmodule_yml_path)
|
||||
|
||||
module_build = module_build_service.models.ModuleBuild(
|
||||
name=mmd2.get_name(),
|
||||
stream=mmd2.get_stream(),
|
||||
version=mmd2.get_version(),
|
||||
build_context='e046b867a400a06a3571f3c71142d497895fefbe',
|
||||
runtime_context='50dd3eb5dde600d072e45d4120e1548ce66bc94a',
|
||||
state=BUILD_STATES['done'],
|
||||
build_context="e046b867a400a06a3571f3c71142d497895fefbe",
|
||||
runtime_context="50dd3eb5dde600d072e45d4120e1548ce66bc94a",
|
||||
state=BUILD_STATES["done"],
|
||||
modulemd=to_text_type(mmd2.dumps()),
|
||||
koji_tag='module-shared-userspace-f26-20170605091544-75f92abb',
|
||||
scmurl='https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453',
|
||||
koji_tag="module-shared-userspace-f26-20170605091544-75f92abb",
|
||||
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453",
|
||||
batch=0,
|
||||
owner='Tom Brady',
|
||||
owner="Tom Brady",
|
||||
time_submitted=datetime(2017, 2, 15, 16, 8, 18),
|
||||
time_modified=datetime(2017, 2, 15, 16, 19, 35),
|
||||
time_completed=datetime(2017, 2, 15, 16, 19, 35),
|
||||
rebuild_strategy='changed-and-after',
|
||||
rebuild_strategy="changed-and-after",
|
||||
)
|
||||
|
||||
components2 = list(mmd2.get_rpm_components().values())
|
||||
@@ -673,24 +680,28 @@ def reuse_shared_userspace_init_data():
|
||||
previous_buildorder = pkg.get_buildorder()
|
||||
batch += 1
|
||||
|
||||
pkgref = mmd2.get_xmd()['mbs']['rpms'][pkg.get_name()]['ref']
|
||||
pkgref = mmd2.get_xmd()["mbs"]["rpms"][pkg.get_name()]["ref"]
|
||||
full_url = pkg.get_repository() + "?#" + pkgref
|
||||
|
||||
module_build.component_builds.append(
|
||||
module_build_service.models.ComponentBuild(
|
||||
package=pkg.get_name(),
|
||||
format="rpms",
|
||||
scmurl=full_url,
|
||||
batch=batch,
|
||||
ref=pkgref
|
||||
))
|
||||
package=pkg.get_name(), format="rpms", scmurl=full_url, batch=batch, ref=pkgref)
|
||||
)
|
||||
|
||||
session.add(module_build)
|
||||
session.commit()
|
||||
|
||||
|
||||
def make_module(nsvc, requires_list=None, build_requires_list=None, base_module=None,
|
||||
filtered_rpms=None, xmd=None, store_to_db=True, virtual_streams=None):
|
||||
def make_module(
|
||||
nsvc,
|
||||
requires_list=None,
|
||||
build_requires_list=None,
|
||||
base_module=None,
|
||||
filtered_rpms=None,
|
||||
xmd=None,
|
||||
store_to_db=True,
|
||||
virtual_streams=None,
|
||||
):
|
||||
"""
|
||||
Creates new models.ModuleBuild defined by `nsvc` string with requires
|
||||
and buildrequires set according to ``requires_list`` and ``build_requires_list``.
|
||||
@@ -739,8 +750,7 @@ def make_module(nsvc, requires_list=None, build_requires_list=None, base_module=
|
||||
build_requires_list = [build_requires_list]
|
||||
|
||||
deps_list = []
|
||||
for requires, build_requires in zip(requires_list,
|
||||
build_requires_list):
|
||||
for requires, build_requires in zip(requires_list, build_requires_list):
|
||||
deps = Modulemd.Dependencies()
|
||||
for req_name, req_streams in requires.items():
|
||||
deps.add_requires(req_name, req_streams)
|
||||
@@ -751,19 +761,19 @@ def make_module(nsvc, requires_list=None, build_requires_list=None, base_module=
|
||||
|
||||
# Caller could pass whole xmd including mbs, but if something is missing,
|
||||
# default values are given here.
|
||||
xmd = xmd or {'mbs': {}}
|
||||
xmd_mbs = xmd['mbs']
|
||||
if 'buildrequires' not in xmd_mbs:
|
||||
xmd_mbs['buildrequires'] = {}
|
||||
if 'requires' not in xmd_mbs:
|
||||
xmd_mbs['requires'] = {}
|
||||
if 'commit' not in xmd_mbs:
|
||||
xmd_mbs['commit'] = 'ref_%s' % context
|
||||
if 'mse' not in xmd_mbs:
|
||||
xmd_mbs['mse'] = 'true'
|
||||
xmd = xmd or {"mbs": {}}
|
||||
xmd_mbs = xmd["mbs"]
|
||||
if "buildrequires" not in xmd_mbs:
|
||||
xmd_mbs["buildrequires"] = {}
|
||||
if "requires" not in xmd_mbs:
|
||||
xmd_mbs["requires"] = {}
|
||||
if "commit" not in xmd_mbs:
|
||||
xmd_mbs["commit"] = "ref_%s" % context
|
||||
if "mse" not in xmd_mbs:
|
||||
xmd_mbs["mse"] = "true"
|
||||
|
||||
if virtual_streams:
|
||||
xmd_mbs['virtual_streams'] = virtual_streams
|
||||
xmd_mbs["virtual_streams"] = virtual_streams
|
||||
|
||||
mmd.set_xmd(glib.dict_values(xmd))
|
||||
|
||||
@@ -776,17 +786,17 @@ def make_module(nsvc, requires_list=None, build_requires_list=None, base_module=
|
||||
stream_version=ModuleBuild.get_stream_version(stream),
|
||||
version=version,
|
||||
context=context,
|
||||
state=BUILD_STATES['ready'],
|
||||
scmurl='https://src.stg.fedoraproject.org/modules/unused.git?#ff1ea79',
|
||||
state=BUILD_STATES["ready"],
|
||||
scmurl="https://src.stg.fedoraproject.org/modules/unused.git?#ff1ea79",
|
||||
batch=1,
|
||||
owner='Tom Brady',
|
||||
owner="Tom Brady",
|
||||
time_submitted=datetime(2017, 2, 15, 16, 8, 18),
|
||||
time_modified=datetime(2017, 2, 15, 16, 19, 35),
|
||||
rebuild_strategy='changed-and-after',
|
||||
rebuild_strategy="changed-and-after",
|
||||
build_context=context,
|
||||
runtime_context=context,
|
||||
modulemd=to_text_type(mmd.dumps()),
|
||||
koji_tag=xmd['mbs']['koji_tag'] if 'koji_tag' in xmd['mbs'] else None
|
||||
koji_tag=xmd["mbs"]["koji_tag"] if "koji_tag" in xmd["mbs"] else None,
|
||||
)
|
||||
if base_module:
|
||||
module_build.buildrequires.append(base_module)
|
||||
|
||||
@@ -27,20 +27,17 @@ from module_build_service import Modulemd
|
||||
|
||||
|
||||
BASE_DIR = os.path.dirname(__file__)
|
||||
STAGED_DATA_DIR = os.path.join(BASE_DIR, 'staged_data')
|
||||
STAGED_DATA_DIR = os.path.join(BASE_DIR, "staged_data")
|
||||
|
||||
_mmd = Modulemd.Module().new_from_file(
|
||||
os.path.join(STAGED_DATA_DIR, 'platform.yaml'))
|
||||
_mmd = Modulemd.Module().new_from_file(os.path.join(STAGED_DATA_DIR, "platform.yaml"))
|
||||
_mmd.upgrade()
|
||||
PLATFORM_MODULEMD = _mmd.dumps()
|
||||
|
||||
_mmd2 = Modulemd.Module().new_from_file(
|
||||
os.path.join(STAGED_DATA_DIR, 'formatted_testmodule.yaml'))
|
||||
_mmd2 = Modulemd.Module().new_from_file(os.path.join(STAGED_DATA_DIR, "formatted_testmodule.yaml"))
|
||||
_mmd2.upgrade()
|
||||
TESTMODULE_MODULEMD = _mmd2.dumps()
|
||||
|
||||
_mmd3 = Modulemd.Module().new_from_file(
|
||||
os.path.join(STAGED_DATA_DIR, 'formatted_testmodule.yaml'))
|
||||
_mmd3 = Modulemd.Module().new_from_file(os.path.join(STAGED_DATA_DIR, "formatted_testmodule.yaml"))
|
||||
_mmd3.upgrade()
|
||||
_mmd3.set_context("c2c572ed")
|
||||
TESTMODULE_MODULEMD_SECOND_CONTEXT = _mmd3.dumps()
|
||||
|
||||
@@ -39,8 +39,10 @@ class TestAuthModule:
|
||||
def test_get_user_no_token(self):
|
||||
base_dir = path.abspath(path.dirname(__file__))
|
||||
client_secrets = path.join(base_dir, "client_secrets.json")
|
||||
with patch.dict('module_build_service.app.config', {'OIDC_CLIENT_SECRETS': client_secrets,
|
||||
'OIDC_REQUIRED_SCOPE': 'mbs-scope'}):
|
||||
with patch.dict(
|
||||
"module_build_service.app.config",
|
||||
{"OIDC_CLIENT_SECRETS": client_secrets, "OIDC_REQUIRED_SCOPE": "mbs-scope"},
|
||||
):
|
||||
request = mock.MagicMock()
|
||||
request.cookies.return_value = {}
|
||||
|
||||
@@ -49,18 +51,22 @@ class TestAuthModule:
|
||||
module_build_service.auth.get_user(request)
|
||||
assert str(cm.value) == "No 'authorization' header found."
|
||||
|
||||
@patch('module_build_service.auth._get_token_info')
|
||||
@patch('module_build_service.auth._get_user_info')
|
||||
@patch("module_build_service.auth._get_token_info")
|
||||
@patch("module_build_service.auth._get_user_info")
|
||||
def test_get_user_failure(self, get_user_info, get_token_info):
|
||||
base_dir = path.abspath(path.dirname(__file__))
|
||||
client_secrets = path.join(base_dir, "client_secrets.json")
|
||||
with patch.dict('module_build_service.app.config', {'OIDC_CLIENT_SECRETS': client_secrets,
|
||||
'OIDC_REQUIRED_SCOPE': 'mbs-scope'}):
|
||||
with patch.dict(
|
||||
"module_build_service.app.config",
|
||||
{"OIDC_CLIENT_SECRETS": client_secrets, "OIDC_REQUIRED_SCOPE": "mbs-scope"},
|
||||
):
|
||||
# https://www.youtube.com/watch?v=G-LtddOgUCE
|
||||
name = "Joey Jo Jo Junior Shabadoo"
|
||||
mocked_get_token_info = {"active": False, "username": name,
|
||||
"scope": ("openid https://id.fedoraproject.org/scope/groups"
|
||||
" mbs-scope")}
|
||||
mocked_get_token_info = {
|
||||
"active": False,
|
||||
"username": name,
|
||||
"scope": ("openid https://id.fedoraproject.org/scope/groups mbs-scope"),
|
||||
}
|
||||
get_token_info.return_value = mocked_get_token_info
|
||||
|
||||
get_user_info.return_value = {"groups": ["group"]}
|
||||
@@ -77,21 +83,25 @@ class TestAuthModule:
|
||||
module_build_service.auth.get_user(request)
|
||||
assert str(cm.value) == "OIDC token invalid or expired."
|
||||
|
||||
@pytest.mark.parametrize('allowed_users', (set(), set(['Joey Jo Jo Junior Shabadoo'])))
|
||||
@patch.object(mbs_config.Config, 'allowed_users', new_callable=PropertyMock)
|
||||
@patch('module_build_service.auth._get_token_info')
|
||||
@patch('module_build_service.auth._get_user_info')
|
||||
@pytest.mark.parametrize("allowed_users", (set(), set(["Joey Jo Jo Junior Shabadoo"])))
|
||||
@patch.object(mbs_config.Config, "allowed_users", new_callable=PropertyMock)
|
||||
@patch("module_build_service.auth._get_token_info")
|
||||
@patch("module_build_service.auth._get_user_info")
|
||||
def test_get_user_good(self, get_user_info, get_token_info, m_allowed_users, allowed_users):
|
||||
m_allowed_users.return_value = allowed_users
|
||||
base_dir = path.abspath(path.dirname(__file__))
|
||||
client_secrets = path.join(base_dir, "client_secrets.json")
|
||||
with patch.dict('module_build_service.app.config', {'OIDC_CLIENT_SECRETS': client_secrets,
|
||||
'OIDC_REQUIRED_SCOPE': 'mbs-scope'}):
|
||||
with patch.dict(
|
||||
"module_build_service.app.config",
|
||||
{"OIDC_CLIENT_SECRETS": client_secrets, "OIDC_REQUIRED_SCOPE": "mbs-scope"},
|
||||
):
|
||||
# https://www.youtube.com/watch?v=G-LtddOgUCE
|
||||
name = "Joey Jo Jo Junior Shabadoo"
|
||||
mocked_get_token_info = {"active": True, "username": name,
|
||||
"scope": ("openid https://id.fedoraproject.org/scope/groups"
|
||||
" mbs-scope")}
|
||||
mocked_get_token_info = {
|
||||
"active": True,
|
||||
"username": name,
|
||||
"scope": ("openid https://id.fedoraproject.org/scope/groups mbs-scope"),
|
||||
}
|
||||
get_token_info.return_value = mocked_get_token_info
|
||||
|
||||
get_user_info.return_value = {"groups": ["group"]}
|
||||
@@ -118,14 +128,14 @@ class TestAuthModule:
|
||||
assert username_second_call == username
|
||||
assert groups_second_call == groups
|
||||
|
||||
@patch.object(mbs_config.Config, 'no_auth', new_callable=PropertyMock, return_value=True)
|
||||
@patch.object(mbs_config.Config, "no_auth", new_callable=PropertyMock, return_value=True)
|
||||
def test_disable_authentication(self, conf_no_auth):
|
||||
request = mock.MagicMock()
|
||||
username, groups = module_build_service.auth.get_user(request)
|
||||
assert username == "anonymous"
|
||||
assert groups == {"packager"}
|
||||
|
||||
@patch('module_build_service.auth.client_secrets', None)
|
||||
@patch("module_build_service.auth.client_secrets", None)
|
||||
def test_misconfiguring_oidc_client_secrets_should_be_failed(self):
|
||||
request = mock.MagicMock()
|
||||
with pytest.raises(module_build_service.errors.Forbidden) as cm:
|
||||
@@ -133,18 +143,22 @@ class TestAuthModule:
|
||||
module_build_service.auth.get_user(request)
|
||||
assert str(cm.value) == "OIDC_CLIENT_SECRETS must be set in server config."
|
||||
|
||||
@patch('module_build_service.auth._get_token_info')
|
||||
@patch('module_build_service.auth._get_user_info')
|
||||
@patch("module_build_service.auth._get_token_info")
|
||||
@patch("module_build_service.auth._get_user_info")
|
||||
def test_get_required_scope_not_present(self, get_user_info, get_token_info):
|
||||
base_dir = path.abspath(path.dirname(__file__))
|
||||
client_secrets = path.join(base_dir, "client_secrets.json")
|
||||
with patch.dict('module_build_service.app.config', {'OIDC_CLIENT_SECRETS': client_secrets,
|
||||
'OIDC_REQUIRED_SCOPE': 'mbs-scope'}):
|
||||
with patch.dict(
|
||||
"module_build_service.app.config",
|
||||
{"OIDC_CLIENT_SECRETS": client_secrets, "OIDC_REQUIRED_SCOPE": "mbs-scope"},
|
||||
):
|
||||
# https://www.youtube.com/watch?v=G-LtddOgUCE
|
||||
name = "Joey Jo Jo Junior Shabadoo"
|
||||
mocked_get_token_info = {"active": True,
|
||||
"username": name,
|
||||
"scope": "openid https://id.fedoraproject.org/scope/groups"}
|
||||
mocked_get_token_info = {
|
||||
"active": True,
|
||||
"username": name,
|
||||
"scope": "openid https://id.fedoraproject.org/scope/groups",
|
||||
}
|
||||
get_token_info.return_value = mocked_get_token_info
|
||||
|
||||
get_user_info.return_value = {"groups": ["group"]}
|
||||
@@ -159,20 +173,24 @@ class TestAuthModule:
|
||||
with pytest.raises(module_build_service.errors.Unauthorized) as cm:
|
||||
with app.app_context():
|
||||
module_build_service.auth.get_user(request)
|
||||
assert str(cm.value) == ("Required OIDC scope 'mbs-scope' not present: "
|
||||
"['openid', 'https://id.fedoraproject.org/scope/groups']")
|
||||
assert str(cm.value) == (
|
||||
"Required OIDC scope 'mbs-scope' not present: "
|
||||
"['openid', 'https://id.fedoraproject.org/scope/groups']"
|
||||
)
|
||||
|
||||
@patch('module_build_service.auth._get_token_info')
|
||||
@patch('module_build_service.auth._get_user_info')
|
||||
@patch("module_build_service.auth._get_token_info")
|
||||
@patch("module_build_service.auth._get_user_info")
|
||||
def test_get_required_scope_not_set_in_cfg(self, get_user_info, get_token_info):
|
||||
base_dir = path.abspath(path.dirname(__file__))
|
||||
client_secrets = path.join(base_dir, "client_secrets.json")
|
||||
with patch.dict('module_build_service.app.config', {'OIDC_CLIENT_SECRETS': client_secrets}):
|
||||
with patch.dict("module_build_service.app.config", {"OIDC_CLIENT_SECRETS": client_secrets}):
|
||||
# https://www.youtube.com/watch?v=G-LtddOgUCE
|
||||
name = "Joey Jo Jo Junior Shabadoo"
|
||||
mocked_get_token_info = {"active": True,
|
||||
"username": name,
|
||||
"scope": "openid https://id.fedoraproject.org/scope/groups"}
|
||||
mocked_get_token_info = {
|
||||
"active": True,
|
||||
"username": name,
|
||||
"scope": "openid https://id.fedoraproject.org/scope/groups",
|
||||
}
|
||||
get_token_info.return_value = mocked_get_token_info
|
||||
|
||||
get_user_info.return_value = {"groups": ["group"]}
|
||||
@@ -191,8 +209,13 @@ class TestAuthModule:
|
||||
|
||||
|
||||
class KerberosMockConfig(object):
|
||||
def __init__(self, uri='ldaps://test.example.local:636', dn='ou=groups,dc=domain,dc=local',
|
||||
kt='/path/to/keytab', host='mbs.domain.local'):
|
||||
def __init__(
|
||||
self,
|
||||
uri="ldaps://test.example.local:636",
|
||||
dn="ou=groups,dc=domain,dc=local",
|
||||
kt="/path/to/keytab",
|
||||
host="mbs.domain.local",
|
||||
):
|
||||
"""
|
||||
:param uri: a string overriding config.ldap_uri
|
||||
:param dn: a string overriding config.ldap_groups_dn
|
||||
@@ -206,27 +229,26 @@ class KerberosMockConfig(object):
|
||||
|
||||
def __enter__(self):
|
||||
self.auth_method_p = patch.object(
|
||||
mbs_config.Config, 'auth_method', new_callable=PropertyMock)
|
||||
mbs_config.Config, "auth_method", new_callable=PropertyMock)
|
||||
mocked_auth_method = self.auth_method_p.start()
|
||||
mocked_auth_method.return_value = 'kerberos'
|
||||
mocked_auth_method.return_value = "kerberos"
|
||||
|
||||
self.ldap_uri_p = patch.object(
|
||||
mbs_config.Config, 'ldap_uri', new_callable=PropertyMock)
|
||||
self.ldap_uri_p = patch.object(mbs_config.Config, "ldap_uri", new_callable=PropertyMock)
|
||||
mocked_ldap_uri = self.ldap_uri_p.start()
|
||||
mocked_ldap_uri.return_value = self.uri
|
||||
|
||||
self.ldap_dn_p = patch.object(
|
||||
mbs_config.Config, 'ldap_groups_dn', new_callable=PropertyMock)
|
||||
mbs_config.Config, "ldap_groups_dn", new_callable=PropertyMock)
|
||||
mocked_ldap_dn = self.ldap_dn_p.start()
|
||||
mocked_ldap_dn.return_value = self.dn
|
||||
|
||||
self.kerberos_keytab_p = patch.object(
|
||||
mbs_config.Config, 'kerberos_keytab', new_callable=PropertyMock)
|
||||
mbs_config.Config, "kerberos_keytab", new_callable=PropertyMock)
|
||||
mocked_kerberos_keytab = self.kerberos_keytab_p.start()
|
||||
mocked_kerberos_keytab.return_value = self.kt
|
||||
|
||||
self.kerberos_http_host_p = patch.object(
|
||||
mbs_config.Config, 'kerberos_http_host', new_callable=PropertyMock)
|
||||
mbs_config.Config, "kerberos_http_host", new_callable=PropertyMock)
|
||||
mocked_kerberos_http_host = self.kerberos_http_host_p.start()
|
||||
mocked_kerberos_http_host.return_value = self.host
|
||||
|
||||
@@ -239,18 +261,19 @@ class KerberosMockConfig(object):
|
||||
|
||||
|
||||
class TestAuthModuleKerberos:
|
||||
@pytest.mark.parametrize('allowed_users', (set(), set(['mprahl'])))
|
||||
@patch('kerberos.authGSSServerInit', return_value=(kerberos.AUTH_GSS_COMPLETE, object()))
|
||||
@patch('kerberos.authGSSServerStep', return_value=kerberos.AUTH_GSS_COMPLETE)
|
||||
@patch('kerberos.authGSSServerResponse', return_value='STOKEN')
|
||||
@patch('kerberos.authGSSServerUserName', return_value='mprahl@EXAMPLE.ORG')
|
||||
@patch('kerberos.authGSSServerClean')
|
||||
@patch('kerberos.getServerPrincipalDetails')
|
||||
@patch.dict('os.environ')
|
||||
@patch('module_build_service.auth.stack')
|
||||
@patch.object(mbs_config.Config, 'allowed_users', new_callable=PropertyMock)
|
||||
def test_get_user_kerberos(self, m_allowed_users, stack, principal, clean, name, response,
|
||||
step, init, allowed_users):
|
||||
@pytest.mark.parametrize("allowed_users", (set(), set(["mprahl"])))
|
||||
@patch("kerberos.authGSSServerInit", return_value=(kerberos.AUTH_GSS_COMPLETE, object()))
|
||||
@patch("kerberos.authGSSServerStep", return_value=kerberos.AUTH_GSS_COMPLETE)
|
||||
@patch("kerberos.authGSSServerResponse", return_value="STOKEN")
|
||||
@patch("kerberos.authGSSServerUserName", return_value="mprahl@EXAMPLE.ORG")
|
||||
@patch("kerberos.authGSSServerClean")
|
||||
@patch("kerberos.getServerPrincipalDetails")
|
||||
@patch.dict("os.environ")
|
||||
@patch("module_build_service.auth.stack")
|
||||
@patch.object(mbs_config.Config, "allowed_users", new_callable=PropertyMock)
|
||||
def test_get_user_kerberos(
|
||||
self, m_allowed_users, stack, principal, clean, name, response, step, init, allowed_users
|
||||
):
|
||||
"""
|
||||
Test that authentication works with Kerberos and LDAP
|
||||
"""
|
||||
@@ -258,7 +281,7 @@ class TestAuthModuleKerberos:
|
||||
mock_top = Mock()
|
||||
stack.return_value = mock_top
|
||||
|
||||
headers = {'Authorization': 'foobar'}
|
||||
headers = {"Authorization": "foobar"}
|
||||
request = mock.MagicMock()
|
||||
request.headers.return_value = mock.MagicMock(spec_set=dict)
|
||||
request.headers.__getitem__.side_effect = headers.__getitem__
|
||||
@@ -266,51 +289,53 @@ class TestAuthModuleKerberos:
|
||||
request.headers.__contains__.side_effect = headers.__contains__
|
||||
|
||||
# Create the mock LDAP instance
|
||||
server = ldap3.Server('ldaps://test.domain.local')
|
||||
server = ldap3.Server("ldaps://test.domain.local")
|
||||
connection = ldap3.Connection(server, client_strategy=ldap3.MOCK_SYNC)
|
||||
base_dn = 'dc=domain,dc=local'
|
||||
base_dn = "dc=domain,dc=local"
|
||||
factory_group_attrs = {
|
||||
'objectClass': ['top', 'posixGroup'],
|
||||
'memberUid': ['mprahl', 'tbrady'],
|
||||
'gidNumber': 1234,
|
||||
'cn': ['factory2-devs']
|
||||
"objectClass": ["top", "posixGroup"],
|
||||
"memberUid": ["mprahl", "tbrady"],
|
||||
"gidNumber": 1234,
|
||||
"cn": ["factory2-devs"],
|
||||
}
|
||||
devs_group_attrs = {
|
||||
'objectClass': ['top', 'posixGroup'],
|
||||
'memberUid': ['mprahl', 'mikeb'],
|
||||
'gidNumber': 1235,
|
||||
'cn': ['devs']
|
||||
"objectClass": ["top", "posixGroup"],
|
||||
"memberUid": ["mprahl", "mikeb"],
|
||||
"gidNumber": 1235,
|
||||
"cn": ["devs"],
|
||||
}
|
||||
athletes_group_attrs = {
|
||||
'objectClass': ['top', 'posixGroup'],
|
||||
'memberUid': ['tbrady', 'rgronkowski'],
|
||||
'gidNumber': 1236,
|
||||
'cn': ['athletes']
|
||||
"objectClass": ["top", "posixGroup"],
|
||||
"memberUid": ["tbrady", "rgronkowski"],
|
||||
"gidNumber": 1236,
|
||||
"cn": ["athletes"],
|
||||
}
|
||||
mprahl_attrs = {
|
||||
'memberOf': ['cn=Employee,ou=groups,{0}'.format(base_dn)],
|
||||
'uid': ['mprahl'],
|
||||
'cn': ['mprahl'],
|
||||
'objectClass': ['top', 'person']
|
||||
"memberOf": ["cn=Employee,ou=groups,{0}".format(base_dn)],
|
||||
"uid": ["mprahl"],
|
||||
"cn": ["mprahl"],
|
||||
"objectClass": ["top", "person"],
|
||||
}
|
||||
connection.strategy.add_entry('cn=factory2-devs,ou=groups,{0}'.format(base_dn),
|
||||
factory_group_attrs)
|
||||
connection.strategy.add_entry('cn=athletes,ou=groups,{0}'.format(base_dn),
|
||||
athletes_group_attrs)
|
||||
connection.strategy.add_entry('cn=devs,ou=groups,{0}'.format(base_dn), devs_group_attrs)
|
||||
connection.strategy.add_entry('cn=mprahl,ou=users,{0}'.format(base_dn), mprahl_attrs)
|
||||
connection.strategy.add_entry(
|
||||
"cn=factory2-devs,ou=groups,{0}".format(base_dn), factory_group_attrs
|
||||
)
|
||||
connection.strategy.add_entry(
|
||||
"cn=athletes,ou=groups,{0}".format(base_dn), athletes_group_attrs
|
||||
)
|
||||
connection.strategy.add_entry("cn=devs,ou=groups,{0}".format(base_dn), devs_group_attrs)
|
||||
connection.strategy.add_entry("cn=mprahl,ou=users,{0}".format(base_dn), mprahl_attrs)
|
||||
|
||||
# If the user is in allowed_users, then group membership is not checked, and an empty set
|
||||
# is just returned for the groups
|
||||
if allowed_users:
|
||||
expected_groups = set()
|
||||
else:
|
||||
expected_groups = {'devs', 'factory2-devs'}
|
||||
expected_groups = {"devs", "factory2-devs"}
|
||||
|
||||
with patch('ldap3.Connection') as mock_ldap_con, KerberosMockConfig():
|
||||
with patch("ldap3.Connection") as mock_ldap_con, KerberosMockConfig():
|
||||
mock_ldap_con.return_value = connection
|
||||
assert module_build_service.auth.get_user_kerberos(request) == \
|
||||
('mprahl', expected_groups)
|
||||
assert module_build_service.auth.get_user_kerberos(request) == (
|
||||
"mprahl", expected_groups)
|
||||
|
||||
def test_auth_header_not_set(self):
|
||||
"""
|
||||
@@ -327,53 +352,55 @@ class TestAuthModuleKerberos:
|
||||
with KerberosMockConfig():
|
||||
try:
|
||||
module_build_service.auth.get_user_kerberos(request)
|
||||
assert False, 'Unauthorized error not raised'
|
||||
assert False, "Unauthorized error not raised"
|
||||
except FlaskUnauthorized as error:
|
||||
assert error.response.www_authenticate.to_header().strip() == 'Negotiate'
|
||||
assert error.response.status == '401 UNAUTHORIZED'
|
||||
assert error.response.www_authenticate.to_header().strip() == "Negotiate"
|
||||
assert error.response.status == "401 UNAUTHORIZED"
|
||||
|
||||
@patch.dict(environ)
|
||||
def test_keytab_not_set(self):
|
||||
"""
|
||||
Test that authentication fails when the keytab is not set
|
||||
"""
|
||||
if 'KRB5_KTNAME' in environ:
|
||||
del environ['KRB5_KTNAME']
|
||||
if "KRB5_KTNAME" in environ:
|
||||
del environ["KRB5_KTNAME"]
|
||||
|
||||
headers = {'Authorization': 'foobar'}
|
||||
headers = {"Authorization": "foobar"}
|
||||
request = mock.MagicMock()
|
||||
request.headers.return_value = mock.MagicMock(spec_set=dict)
|
||||
request.headers.__getitem__.side_effect = headers.__getitem__
|
||||
request.headers.__setitem__.side_effect = headers.__setitem__
|
||||
request.headers.__contains__.side_effect = headers.__contains__
|
||||
|
||||
with KerberosMockConfig(kt=''):
|
||||
with KerberosMockConfig(kt=""):
|
||||
try:
|
||||
module_build_service.auth.get_user_kerberos(request)
|
||||
assert False, 'Unauthorized error not raised'
|
||||
assert False, "Unauthorized error not raised"
|
||||
except module_build_service.errors.Unauthorized as error:
|
||||
assert str(error) == ('Kerberos: set the config value of "KERBEROS_KEYTAB" '
|
||||
'or the environment variable "KRB5_KTNAME" to your '
|
||||
'keytab file')
|
||||
assert str(error) == (
|
||||
'Kerberos: set the config value of "KERBEROS_KEYTAB" '
|
||||
'or the environment variable "KRB5_KTNAME" to your keytab file'
|
||||
)
|
||||
|
||||
# Set the return value to something not 0 (continue) or 1 (complete)
|
||||
@patch('kerberos.authGSSServerInit', return_value=(100, object()))
|
||||
@patch('kerberos.authGSSServerStep', return_value=kerberos.AUTH_GSS_COMPLETE)
|
||||
@patch('kerberos.authGSSServerResponse', return_value='STOKEN')
|
||||
@patch('kerberos.authGSSServerUserName', return_value='mprahl@EXAMPLE.ORG')
|
||||
@patch('kerberos.authGSSServerClean')
|
||||
@patch('kerberos.getServerPrincipalDetails')
|
||||
@patch.dict('os.environ')
|
||||
@patch('module_build_service.auth.stack')
|
||||
def test_get_user_kerberos_invalid_ticket(self, stack, principal, clean, name, response,
|
||||
step, init):
|
||||
@patch("kerberos.authGSSServerInit", return_value=(100, object()))
|
||||
@patch("kerberos.authGSSServerStep", return_value=kerberos.AUTH_GSS_COMPLETE)
|
||||
@patch("kerberos.authGSSServerResponse", return_value="STOKEN")
|
||||
@patch("kerberos.authGSSServerUserName", return_value="mprahl@EXAMPLE.ORG")
|
||||
@patch("kerberos.authGSSServerClean")
|
||||
@patch("kerberos.getServerPrincipalDetails")
|
||||
@patch.dict("os.environ")
|
||||
@patch("module_build_service.auth.stack")
|
||||
def test_get_user_kerberos_invalid_ticket(
|
||||
self, stack, principal, clean, name, response, step, init
|
||||
):
|
||||
"""
|
||||
Test that authentication fails with an invalid Kerberos ticket
|
||||
"""
|
||||
mock_top = Mock()
|
||||
stack.return_value = mock_top
|
||||
|
||||
headers = {'Authorization': 'foobar'}
|
||||
headers = {"Authorization": "foobar"}
|
||||
request = mock.MagicMock()
|
||||
request.headers.return_value = mock.MagicMock(spec_set=dict)
|
||||
request.headers.__getitem__.side_effect = headers.__getitem__
|
||||
@@ -383,6 +410,6 @@ class TestAuthModuleKerberos:
|
||||
with KerberosMockConfig():
|
||||
try:
|
||||
module_build_service.auth.get_user_kerberos(request)
|
||||
assert False, 'Forbidden error not raised'
|
||||
assert False, "Forbidden error not raised"
|
||||
except module_build_service.errors.Forbidden as error:
|
||||
assert str(error) == ('Invalid Kerberos ticket')
|
||||
assert str(error) == ("Invalid Kerberos ticket")
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -33,29 +33,22 @@ from mock import patch
|
||||
|
||||
|
||||
class TestGenericBuilder:
|
||||
|
||||
def setup_method(self, test_method):
|
||||
init_data(1)
|
||||
self.module = module_build_service.models.ModuleBuild.query.filter_by(id=1).one()
|
||||
|
||||
@patch('module_build_service.resolver.DBResolver')
|
||||
@patch('module_build_service.resolver.GenericResolver')
|
||||
@patch("module_build_service.resolver.DBResolver")
|
||||
@patch("module_build_service.resolver.GenericResolver")
|
||||
def test_default_buildroot_groups_cache(self, generic_resolver, resolver):
|
||||
mbs_groups = {
|
||||
"buildroot": [],
|
||||
"srpm-buildroot": []
|
||||
}
|
||||
mbs_groups = {"buildroot": [], "srpm-buildroot": []}
|
||||
|
||||
resolver = mock.MagicMock()
|
||||
resolver.backend = 'mbs'
|
||||
resolver.backend = "mbs"
|
||||
resolver.resolve_profiles.return_value = mbs_groups
|
||||
|
||||
expected_groups = {
|
||||
"build": [],
|
||||
"srpm-build": []
|
||||
}
|
||||
expected_groups = {"build": [], "srpm-build": []}
|
||||
|
||||
with patch.object(module_build_service.resolver, 'system_resolver', new=resolver):
|
||||
with patch.object(module_build_service.resolver, "system_resolver", new=resolver):
|
||||
# Call default_buildroot_groups, the result should be cached.
|
||||
ret = GenericBuilder.default_buildroot_groups(db.session, self.module)
|
||||
assert ret == expected_groups
|
||||
@@ -64,14 +57,14 @@ class TestGenericBuilder:
|
||||
|
||||
# Now try calling it again to verify resolve_profiles is not called,
|
||||
# because it is cached.
|
||||
with patch.object(module_build_service.resolver, 'system_resolver', new=resolver):
|
||||
with patch.object(module_build_service.resolver, "system_resolver", new=resolver):
|
||||
ret = GenericBuilder.default_buildroot_groups(db.session, self.module)
|
||||
assert ret == expected_groups
|
||||
resolver.resolve_profiles.assert_not_called()
|
||||
resolver.resolve_profiles.reset_mock()
|
||||
|
||||
# And now try clearing the cache and call it again.
|
||||
with patch.object(module_build_service.resolver, 'system_resolver', new=resolver):
|
||||
with patch.object(module_build_service.resolver, "system_resolver", new=resolver):
|
||||
GenericBuilder.clear_cache(self.module)
|
||||
ret = GenericBuilder.default_buildroot_groups(db.session, self.module)
|
||||
assert ret == expected_groups
|
||||
|
||||
@@ -30,110 +30,114 @@ from tests import conf
|
||||
|
||||
|
||||
class TestBuilderUtils:
|
||||
|
||||
@patch('requests.get')
|
||||
@patch('koji.ClientSession')
|
||||
@patch('module_build_service.builder.utils.execute_cmd')
|
||||
@patch("requests.get")
|
||||
@patch("koji.ClientSession")
|
||||
@patch("module_build_service.builder.utils.execute_cmd")
|
||||
def test_create_local_repo_from_koji_tag(self, mock_exec_cmd, mock_koji_session, mock_get):
|
||||
session = Mock()
|
||||
rpms = [
|
||||
{
|
||||
'arch': 'src',
|
||||
'build_id': 875991,
|
||||
'name': 'module-build-macros',
|
||||
'release': '1.module_92011fe6',
|
||||
'size': 6890,
|
||||
'version': '0.1'
|
||||
"arch": "src",
|
||||
"build_id": 875991,
|
||||
"name": "module-build-macros",
|
||||
"release": "1.module_92011fe6",
|
||||
"size": 6890,
|
||||
"version": "0.1",
|
||||
},
|
||||
{
|
||||
'arch': 'noarch',
|
||||
'build_id': 875991,
|
||||
'name': 'module-build-macros',
|
||||
'release': '1.module_92011fe6',
|
||||
'size': 6890,
|
||||
'version': '0.1'
|
||||
"arch": "noarch",
|
||||
"build_id": 875991,
|
||||
"name": "module-build-macros",
|
||||
"release": "1.module_92011fe6",
|
||||
"size": 6890,
|
||||
"version": "0.1",
|
||||
},
|
||||
{
|
||||
'arch': 'x86_64',
|
||||
'build_id': 875636,
|
||||
'name': 'ed-debuginfo',
|
||||
'release': '2.module_bd6e0eb1',
|
||||
'size': 81438,
|
||||
'version': '1.14.1'
|
||||
"arch": "x86_64",
|
||||
"build_id": 875636,
|
||||
"name": "ed-debuginfo",
|
||||
"release": "2.module_bd6e0eb1",
|
||||
"size": 81438,
|
||||
"version": "1.14.1",
|
||||
},
|
||||
{
|
||||
'arch': 'x86_64',
|
||||
'build_id': 875636,
|
||||
'name': 'ed',
|
||||
'release': '2.module_bd6e0eb1',
|
||||
'size': 80438,
|
||||
'version': '1.14.1'
|
||||
"arch": "x86_64",
|
||||
"build_id": 875636,
|
||||
"name": "ed",
|
||||
"release": "2.module_bd6e0eb1",
|
||||
"size": 80438,
|
||||
"version": "1.14.1",
|
||||
},
|
||||
{
|
||||
'arch': 'x86_64',
|
||||
'build_id': 875640,
|
||||
'name': 'mksh-debuginfo',
|
||||
'release': '2.module_bd6e0eb1',
|
||||
'size': 578774,
|
||||
'version': '54'
|
||||
"arch": "x86_64",
|
||||
"build_id": 875640,
|
||||
"name": "mksh-debuginfo",
|
||||
"release": "2.module_bd6e0eb1",
|
||||
"size": 578774,
|
||||
"version": "54",
|
||||
},
|
||||
{
|
||||
'arch': 'x86_64',
|
||||
'build_id': 875640,
|
||||
'name': 'mksh',
|
||||
'release': '2.module_bd6e0eb1',
|
||||
'size': 267042,
|
||||
'version': '54'
|
||||
}
|
||||
"arch": "x86_64",
|
||||
"build_id": 875640,
|
||||
"name": "mksh",
|
||||
"release": "2.module_bd6e0eb1",
|
||||
"size": 267042,
|
||||
"version": "54",
|
||||
},
|
||||
]
|
||||
|
||||
builds = [
|
||||
{
|
||||
'build_id': 875640,
|
||||
'name': 'mksh',
|
||||
'release': '2.module_bd6e0eb1',
|
||||
'version': '54',
|
||||
'volume_name': 'prod'
|
||||
"build_id": 875640,
|
||||
"name": "mksh",
|
||||
"release": "2.module_bd6e0eb1",
|
||||
"version": "54",
|
||||
"volume_name": "prod",
|
||||
},
|
||||
{
|
||||
'build_id': 875636,
|
||||
'name': 'ed',
|
||||
'release': '2.module_bd6e0eb1',
|
||||
'version': '1.14.1',
|
||||
'volume_name': 'prod'
|
||||
"build_id": 875636,
|
||||
"name": "ed",
|
||||
"release": "2.module_bd6e0eb1",
|
||||
"version": "1.14.1",
|
||||
"volume_name": "prod",
|
||||
},
|
||||
{
|
||||
'build_id': 875991,
|
||||
'name': 'module-build-macros',
|
||||
'release': '1.module_92011fe6',
|
||||
'version': '0.1',
|
||||
'volume_name': 'prod'
|
||||
}
|
||||
"build_id": 875991,
|
||||
"name": "module-build-macros",
|
||||
"release": "1.module_92011fe6",
|
||||
"version": "0.1",
|
||||
"volume_name": "prod",
|
||||
},
|
||||
]
|
||||
|
||||
session.listTaggedRPMS.return_value = (rpms, builds)
|
||||
session.opts = {'topurl': 'https://kojipkgs.stg.fedoraproject.org/'}
|
||||
session.opts = {"topurl": "https://kojipkgs.stg.fedoraproject.org/"}
|
||||
mock_koji_session.return_value = session
|
||||
|
||||
tag = 'module-testmodule-master-20170405123740-build'
|
||||
tag = "module-testmodule-master-20170405123740-build"
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
utils.create_local_repo_from_koji_tag(conf, tag, temp_dir)
|
||||
finally:
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
url_one = ('https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/module-build-macros/'
|
||||
'0.1/1.module_92011fe6/noarch/module-build-macros-0.1-1.module_92011fe6.noarch.'
|
||||
'rpm')
|
||||
url_two = ('https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/ed/1.14.1/'
|
||||
'2.module_bd6e0eb1/x86_64/ed-1.14.1-2.module_bd6e0eb1.x86_64.rpm')
|
||||
url_three = ('https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/mksh/54/'
|
||||
'2.module_bd6e0eb1/x86_64/mksh-54-2.module_bd6e0eb1.x86_64.rpm')
|
||||
url_one = (
|
||||
"https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/module-build-macros/"
|
||||
"0.1/1.module_92011fe6/noarch/module-build-macros-0.1-1.module_92011fe6.noarch.rpm"
|
||||
)
|
||||
url_two = (
|
||||
"https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/ed/1.14.1/"
|
||||
"2.module_bd6e0eb1/x86_64/ed-1.14.1-2.module_bd6e0eb1.x86_64.rpm"
|
||||
)
|
||||
url_three = (
|
||||
"https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/mksh/54/"
|
||||
"2.module_bd6e0eb1/x86_64/mksh-54-2.module_bd6e0eb1.x86_64.rpm"
|
||||
)
|
||||
|
||||
expected_calls = [
|
||||
call(url_one, stream=True, timeout=60),
|
||||
call(url_two, stream=True, timeout=60),
|
||||
call(url_three, stream=True, timeout=60)
|
||||
call(url_three, stream=True, timeout=60),
|
||||
]
|
||||
for expected_call in expected_calls:
|
||||
assert expected_call in mock_get.call_args_list
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -17,7 +17,6 @@ from tests import clean_database, make_module
|
||||
|
||||
|
||||
class TestMockModuleBuilder:
|
||||
|
||||
def setup_method(self, test_method):
|
||||
clean_database()
|
||||
self.resultdir = tempfile.mkdtemp()
|
||||
@@ -32,60 +31,63 @@ class TestMockModuleBuilder:
|
||||
"module_id": 2,
|
||||
"package": "ed",
|
||||
"format": "rpms",
|
||||
"scmurl": ("https://src.fedoraproject.org/rpms/ed"
|
||||
"?#01bf8330812fea798671925cc537f2f29b0bd216"),
|
||||
"scmurl": (
|
||||
"https://src.fedoraproject.org/rpms/ed"
|
||||
"?#01bf8330812fea798671925cc537f2f29b0bd216"
|
||||
),
|
||||
"batch": 2,
|
||||
"ref": "01bf8330812fea798671925cc537f2f29b0bd216"
|
||||
"ref": "01bf8330812fea798671925cc537f2f29b0bd216",
|
||||
},
|
||||
{
|
||||
"module_id": 2,
|
||||
"package": "mksh",
|
||||
"format": "rpms",
|
||||
"scmurl": ("https://src.fedoraproject.org/rpms/mksh"
|
||||
"?#f70fd11ddf96bce0e2c64309706c29156b39141d"),
|
||||
"scmurl": (
|
||||
"https://src.fedoraproject.org/rpms/mksh"
|
||||
"?#f70fd11ddf96bce0e2c64309706c29156b39141d"
|
||||
),
|
||||
"batch": 3,
|
||||
"ref": "f70fd11ddf96bce0e2c64309706c29156b39141d"
|
||||
"ref": "f70fd11ddf96bce0e2c64309706c29156b39141d",
|
||||
},
|
||||
]
|
||||
|
||||
base_dir = os.path.abspath(os.path.dirname(__file__))
|
||||
mmd = Modulemd.Module().new_from_file(os.path.join(
|
||||
base_dir, '..', 'staged_data', 'testmodule-with-filters.yaml'))
|
||||
mmd = Modulemd.Module().new_from_file(
|
||||
os.path.join(base_dir, "..", "staged_data", "testmodule-with-filters.yaml"))
|
||||
mmd.upgrade()
|
||||
mmd.set_xmd(glib.dict_values({
|
||||
'mbs': {
|
||||
'rpms': {
|
||||
'ed': {'ref': '01bf8330812fea798671925cc537f2f29b0bd216'},
|
||||
'mksh': {'ref': 'f70fd11ddf96bce0e2c64309706c29156b39141d'}
|
||||
"mbs": {
|
||||
"rpms": {
|
||||
"ed": {"ref": "01bf8330812fea798671925cc537f2f29b0bd216"},
|
||||
"mksh": {"ref": "f70fd11ddf96bce0e2c64309706c29156b39141d"},
|
||||
},
|
||||
'buildrequires':
|
||||
{
|
||||
'host': {
|
||||
'version': '20171024133034',
|
||||
'filtered_rpms': [],
|
||||
'stream': 'master',
|
||||
'ref': '6df253bb3c53e84706c01b8ab2d5cac24f0b6d45',
|
||||
'context': '00000000'
|
||||
"buildrequires": {
|
||||
"host": {
|
||||
"version": "20171024133034",
|
||||
"filtered_rpms": [],
|
||||
"stream": "master",
|
||||
"ref": "6df253bb3c53e84706c01b8ab2d5cac24f0b6d45",
|
||||
"context": "00000000",
|
||||
},
|
||||
'platform': {
|
||||
'version': '20171028112959',
|
||||
'filtered_rpms': [],
|
||||
'stream': 'master',
|
||||
'ref': '4f7787370a931d57421f9f9555fc41c3e31ff1fa',
|
||||
'context': '00000000'
|
||||
"platform": {
|
||||
"version": "20171028112959",
|
||||
"filtered_rpms": [],
|
||||
"stream": "master",
|
||||
"ref": "4f7787370a931d57421f9f9555fc41c3e31ff1fa",
|
||||
"context": "00000000",
|
||||
},
|
||||
},
|
||||
"scmurl": "file:///testdir",
|
||||
"commit": "5566bc792ec7a03bb0e28edd1b104a96ba342bd8",
|
||||
"requires": {
|
||||
"platform": {
|
||||
"version": "20171028112959",
|
||||
"filtered_rpms": [],
|
||||
"stream": "master",
|
||||
"ref": "4f7787370a931d57421f9f9555fc41c3e31ff1fa",
|
||||
"context": "00000000",
|
||||
}
|
||||
},
|
||||
'scmurl': 'file:///testdir',
|
||||
'commit': '5566bc792ec7a03bb0e28edd1b104a96ba342bd8',
|
||||
'requires': {
|
||||
'platform': {
|
||||
'version': '20171028112959',
|
||||
'filtered_rpms': [],
|
||||
'stream': 'master',
|
||||
'ref': '4f7787370a931d57421f9f9555fc41c3e31ff1fa',
|
||||
'context': '00000000'
|
||||
}
|
||||
}
|
||||
}
|
||||
}))
|
||||
module = ModuleBuild.create(
|
||||
@@ -113,37 +115,39 @@ class TestMockModuleBuilder:
|
||||
@mock.patch("module_build_service.conf.system", new="mock")
|
||||
def test_createrepo_filter_last_batch(self, *args):
|
||||
with make_session(conf) as session:
|
||||
module = self._create_module_with_filters(session, 3, koji.BUILD_STATES['COMPLETE'])
|
||||
module = self._create_module_with_filters(session, 3, koji.BUILD_STATES["COMPLETE"])
|
||||
|
||||
builder = MockModuleBuilder("mcurlej", module, conf, module.koji_tag,
|
||||
module.component_builds)
|
||||
builder = MockModuleBuilder(
|
||||
"mcurlej", module, conf, module.koji_tag, module.component_builds
|
||||
)
|
||||
builder.resultsdir = self.resultdir
|
||||
rpms = [
|
||||
"ed-1.14.1-4.module+24957a32.x86_64.rpm",
|
||||
"mksh-56b-1.module+24957a32.x86_64.rpm",
|
||||
"module-build-macros-0.1-1.module+24957a32.noarch.rpm"
|
||||
"module-build-macros-0.1-1.module+24957a32.noarch.rpm",
|
||||
]
|
||||
rpm_qf_output = dedent("""\
|
||||
ed 0 1.14.1 4.module+24957a32 x86_64
|
||||
mksh 0 56b-1 module+24957a32 x86_64
|
||||
module-build-macros 0 0.1 1.module+24957a32 noarch
|
||||
""")
|
||||
""")
|
||||
with mock.patch("os.listdir", return_value=rpms):
|
||||
with mock.patch("subprocess.check_output", return_value=rpm_qf_output):
|
||||
builder._createrepo()
|
||||
|
||||
with open(os.path.join(self.resultdir, "pkglist"), "r") as fd:
|
||||
pkglist = fd.read().strip()
|
||||
rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split('\n')]
|
||||
rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split("\n")]
|
||||
assert "ed" not in rpm_names
|
||||
|
||||
@mock.patch("module_build_service.conf.system", new="mock")
|
||||
def test_createrepo_not_last_batch(self):
|
||||
with make_session(conf) as session:
|
||||
module = self._create_module_with_filters(session, 2, koji.BUILD_STATES['COMPLETE'])
|
||||
module = self._create_module_with_filters(session, 2, koji.BUILD_STATES["COMPLETE"])
|
||||
|
||||
builder = MockModuleBuilder("mcurlej", module, conf, module.koji_tag,
|
||||
module.component_builds)
|
||||
builder = MockModuleBuilder(
|
||||
"mcurlej", module, conf, module.koji_tag, module.component_builds
|
||||
)
|
||||
builder.resultsdir = self.resultdir
|
||||
rpms = [
|
||||
"ed-1.14.1-4.module+24957a32.x86_64.rpm",
|
||||
@@ -152,23 +156,23 @@ class TestMockModuleBuilder:
|
||||
rpm_qf_output = dedent("""\
|
||||
ed 0 1.14.1 4.module+24957a32 x86_64
|
||||
mksh 0 56b-1 module+24957a32 x86_64
|
||||
""")
|
||||
""")
|
||||
with mock.patch("os.listdir", return_value=rpms):
|
||||
with mock.patch("subprocess.check_output", return_value=rpm_qf_output):
|
||||
builder._createrepo()
|
||||
|
||||
with open(os.path.join(self.resultdir, "pkglist"), "r") as fd:
|
||||
pkglist = fd.read().strip()
|
||||
rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split('\n')]
|
||||
rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split("\n")]
|
||||
assert "ed" in rpm_names
|
||||
|
||||
@mock.patch("module_build_service.conf.system", new="mock")
|
||||
def test_createrepo_empty_rmp_list(self, *args):
|
||||
with make_session(conf) as session:
|
||||
module = self._create_module_with_filters(session, 3, koji.BUILD_STATES['COMPLETE'])
|
||||
module = self._create_module_with_filters(session, 3, koji.BUILD_STATES["COMPLETE"])
|
||||
|
||||
builder = MockModuleBuilder("mcurlej", module, conf, module.koji_tag,
|
||||
module.component_builds)
|
||||
builder = MockModuleBuilder(
|
||||
"mcurlej", module, conf, module.koji_tag, module.component_builds)
|
||||
builder.resultsdir = self.resultdir
|
||||
rpms = []
|
||||
with mock.patch("os.listdir", return_value=rpms):
|
||||
@@ -180,7 +184,6 @@ class TestMockModuleBuilder:
|
||||
|
||||
|
||||
class TestMockModuleBuilderAddRepos:
|
||||
|
||||
def setup_method(self, test_method):
|
||||
clean_database(add_platform_module=False)
|
||||
import_fake_base_module("platform:f29:1:000000")
|
||||
@@ -190,27 +193,32 @@ class TestMockModuleBuilderAddRepos:
|
||||
|
||||
@mock.patch("module_build_service.conf.system", new="mock")
|
||||
@mock.patch(
|
||||
'module_build_service.config.Config.base_module_repofiles',
|
||||
"module_build_service.config.Config.base_module_repofiles",
|
||||
new_callable=mock.PropertyMock,
|
||||
return_value=["/etc/yum.repos.d/bar.repo", "/etc/yum.repos.d/bar-updates.repo"],
|
||||
create=True)
|
||||
create=True,
|
||||
)
|
||||
@mock.patch("module_build_service.builder.MockModuleBuilder.open", create=True)
|
||||
@mock.patch(
|
||||
"module_build_service.builder.MockModuleBuilder.MockModuleBuilder._load_mock_config")
|
||||
"module_build_service.builder.MockModuleBuilder.MockModuleBuilder._load_mock_config"
|
||||
)
|
||||
@mock.patch(
|
||||
"module_build_service.builder.MockModuleBuilder.MockModuleBuilder._write_mock_config")
|
||||
def test_buildroot_add_repos(self, write_config, load_config, patched_open,
|
||||
base_module_repofiles):
|
||||
"module_build_service.builder.MockModuleBuilder.MockModuleBuilder._write_mock_config"
|
||||
)
|
||||
def test_buildroot_add_repos(
|
||||
self, write_config, load_config, patched_open, base_module_repofiles
|
||||
):
|
||||
patched_open.side_effect = [
|
||||
mock.mock_open(read_data="[fake]\nrepofile 1\n").return_value,
|
||||
mock.mock_open(read_data="[fake]\nrepofile 2\n").return_value,
|
||||
mock.mock_open(read_data="[fake]\nrepofile 3\n").return_value]
|
||||
mock.mock_open(read_data="[fake]\nrepofile 3\n").return_value,
|
||||
]
|
||||
|
||||
builder = MockModuleBuilder("user", self.app, conf, "module-app", [])
|
||||
|
||||
dependencies = {
|
||||
"repofile://": [self.platform.mmd()],
|
||||
"repofile:///etc/yum.repos.d/foo.repo": [self.foo.mmd(), self.app.mmd()]
|
||||
"repofile:///etc/yum.repos.d/foo.repo": [self.foo.mmd(), self.app.mmd()],
|
||||
}
|
||||
|
||||
builder.buildroot_add_repos(dependencies)
|
||||
|
||||
@@ -30,7 +30,7 @@ from os import path
|
||||
from module_build_service.utils import to_text_type
|
||||
|
||||
import module_build_service.messaging
|
||||
import module_build_service.scheduler.handlers.repos # noqa
|
||||
import module_build_service.scheduler.handlers.repos # noqa
|
||||
from module_build_service import models, conf, build_logs, Modulemd, glib
|
||||
|
||||
from mock import patch, Mock, call, mock_open
|
||||
@@ -48,23 +48,25 @@ GET_USER_RV = {
|
||||
"krb_principal": "mszyslak@FEDORAPROJECT.ORG",
|
||||
"name": "Moe Szyslak",
|
||||
"status": 0,
|
||||
"usertype": 0
|
||||
"usertype": 0,
|
||||
}
|
||||
|
||||
|
||||
class TestBuild:
|
||||
|
||||
def setup_method(self, test_method):
|
||||
init_data(1, contexts=True)
|
||||
module = models.ModuleBuild.query.filter_by(id=2).one()
|
||||
module.cg_build_koji_tag = "f27-module-candidate"
|
||||
self.cg = KojiContentGenerator(module, conf)
|
||||
|
||||
self.p_read_config = patch('koji.read_config', return_value={
|
||||
'authtype': 'kerberos',
|
||||
'timeout': 60,
|
||||
'server': 'http://koji.example.com/'
|
||||
})
|
||||
self.p_read_config = patch(
|
||||
"koji.read_config",
|
||||
return_value={
|
||||
"authtype": "kerberos",
|
||||
"timeout": 60,
|
||||
"server": "http://koji.example.com/",
|
||||
},
|
||||
)
|
||||
self.mock_read_config = self.p_read_config.start()
|
||||
|
||||
# Ensure that there is no build log from other tests
|
||||
@@ -79,10 +81,12 @@ class TestBuild:
|
||||
|
||||
# Necessary to restart the twisted reactor for the next test.
|
||||
import sys
|
||||
del sys.modules['twisted.internet.reactor']
|
||||
del sys.modules['moksha.hub.reactor']
|
||||
del sys.modules['moksha.hub']
|
||||
import moksha.hub.reactor # noqa
|
||||
|
||||
del sys.modules["twisted.internet.reactor"]
|
||||
del sys.modules["moksha.hub.reactor"]
|
||||
del sys.modules["moksha.hub"]
|
||||
import moksha.hub.reactor # noqa
|
||||
|
||||
try:
|
||||
file_path = build_logs.path(self.cg.module)
|
||||
os.remove(file_path)
|
||||
@@ -91,15 +95,17 @@ class TestBuild:
|
||||
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
@patch("subprocess.Popen")
|
||||
@patch("subprocess.check_output", return_value='1.4')
|
||||
@patch("subprocess.check_output", return_value="1.4")
|
||||
@patch("pkg_resources.get_distribution")
|
||||
@patch("platform.linux_distribution")
|
||||
@patch("platform.machine")
|
||||
@patch(("module_build_service.builder.KojiContentGenerator.KojiContentGenerator."
|
||||
"_koji_rpms_in_tag"))
|
||||
@patch(
|
||||
"module_build_service.builder.KojiContentGenerator.KojiContentGenerator._koji_rpms_in_tag"
|
||||
)
|
||||
@pytest.mark.parametrize("devel", (False, True))
|
||||
def test_get_generator_json(self, rpms_in_tag, machine, distro, pkg_res, coutput, popen,
|
||||
ClientSession, devel):
|
||||
def test_get_generator_json(
|
||||
self, rpms_in_tag, machine, distro, pkg_res, coutput, popen, ClientSession, devel
|
||||
):
|
||||
""" Test generation of content generator json """
|
||||
koji_session = ClientSession.return_value
|
||||
koji_session.getUser.return_value = GET_USER_RV
|
||||
@@ -109,21 +115,21 @@ class TestBuild:
|
||||
pkg_res.return_value = Mock()
|
||||
pkg_res.return_value.version = "current-tested-version"
|
||||
rpm_mock = Mock()
|
||||
rpm_out = b"rpm-name;1.0;r1;x86_64;(none);sigmd5:1;sigpgp:p;siggpg:g\n" \
|
||||
b"rpm-name-2;2.0;r2;i686;1;sigmd5:2;sigpgp:p2;siggpg:g2"
|
||||
attrs = {'communicate.return_value': (rpm_out, 'error'),
|
||||
'wait.return_value': 0}
|
||||
rpm_out = (
|
||||
b"rpm-name;1.0;r1;x86_64;(none);sigmd5:1;sigpgp:p;siggpg:g\n"
|
||||
b"rpm-name-2;2.0;r2;i686;1;sigmd5:2;sigpgp:p2;siggpg:g2"
|
||||
)
|
||||
attrs = {"communicate.return_value": (rpm_out, "error"), "wait.return_value": 0}
|
||||
rpm_mock.configure_mock(**attrs)
|
||||
popen.return_value = rpm_mock
|
||||
|
||||
tests_dir = path.abspath(path.dirname(__file__))
|
||||
rpm_in_tag_path = path.join(tests_dir,
|
||||
"test_get_generator_json_rpms_in_tag.json")
|
||||
rpm_in_tag_path = path.join(tests_dir, "test_get_generator_json_rpms_in_tag.json")
|
||||
with open(rpm_in_tag_path) as rpms_in_tag_file:
|
||||
rpms_in_tag.return_value = json.load(rpms_in_tag_file)
|
||||
|
||||
expected_output_path = path.join(tests_dir,
|
||||
"test_get_generator_json_expected_output_with_log.json")
|
||||
expected_output_path = path.join(
|
||||
tests_dir, "test_get_generator_json_expected_output_with_log.json")
|
||||
with open(expected_output_path) as expected_output_file:
|
||||
expected_output = json.load(expected_output_file)
|
||||
|
||||
@@ -148,14 +154,16 @@ class TestBuild:
|
||||
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
@patch("subprocess.Popen")
|
||||
@patch("subprocess.check_output", return_value='1.4')
|
||||
@patch("subprocess.check_output", return_value="1.4")
|
||||
@patch("pkg_resources.get_distribution")
|
||||
@patch("platform.linux_distribution")
|
||||
@patch("platform.machine")
|
||||
@patch(("module_build_service.builder.KojiContentGenerator.KojiContentGenerator."
|
||||
"_koji_rpms_in_tag"))
|
||||
def test_get_generator_json_no_log(self, rpms_in_tag, machine, distro, pkg_res, coutput, popen,
|
||||
ClientSession):
|
||||
@patch(
|
||||
"module_build_service.builder.KojiContentGenerator.KojiContentGenerator._koji_rpms_in_tag"
|
||||
)
|
||||
def test_get_generator_json_no_log(
|
||||
self, rpms_in_tag, machine, distro, pkg_res, coutput, popen, ClientSession
|
||||
):
|
||||
""" Test generation of content generator json """
|
||||
koji_session = ClientSession.return_value
|
||||
koji_session.getUser.return_value = GET_USER_RV
|
||||
@@ -165,21 +173,20 @@ class TestBuild:
|
||||
pkg_res.return_value = Mock()
|
||||
pkg_res.return_value.version = "current-tested-version"
|
||||
rpm_mock = Mock()
|
||||
rpm_out = b"rpm-name;1.0;r1;x86_64;(none);sigmd5:1;sigpgp:p;siggpg:g\n" \
|
||||
b"rpm-name-2;2.0;r2;i686;1;sigmd5:2;sigpgp:p2;siggpg:g2"
|
||||
attrs = {'communicate.return_value': (rpm_out, 'error'),
|
||||
'wait.return_value': 0}
|
||||
rpm_out = (
|
||||
b"rpm-name;1.0;r1;x86_64;(none);sigmd5:1;sigpgp:p;siggpg:g\n"
|
||||
b"rpm-name-2;2.0;r2;i686;1;sigmd5:2;sigpgp:p2;siggpg:g2"
|
||||
)
|
||||
attrs = {"communicate.return_value": (rpm_out, "error"), "wait.return_value": 0}
|
||||
rpm_mock.configure_mock(**attrs)
|
||||
popen.return_value = rpm_mock
|
||||
|
||||
tests_dir = path.abspath(path.dirname(__file__))
|
||||
rpm_in_tag_path = path.join(tests_dir,
|
||||
"test_get_generator_json_rpms_in_tag.json")
|
||||
rpm_in_tag_path = path.join(tests_dir, "test_get_generator_json_rpms_in_tag.json")
|
||||
with open(rpm_in_tag_path) as rpms_in_tag_file:
|
||||
rpms_in_tag.return_value = json.load(rpms_in_tag_file)
|
||||
|
||||
expected_output_path = path.join(tests_dir,
|
||||
"test_get_generator_json_expected_output.json")
|
||||
expected_output_path = path.join(tests_dir, "test_get_generator_json_expected_output.json")
|
||||
with open(expected_output_path) as expected_output_file:
|
||||
expected_output = json.load(expected_output_file)
|
||||
self.cg._load_koji_tag(koji_session)
|
||||
@@ -216,7 +223,7 @@ class TestBuild:
|
||||
""" Test that the CG build is tagged. """
|
||||
koji_session = ClientSession.return_value
|
||||
koji_session.getUser.return_value = GET_USER_RV
|
||||
koji_session.getTag.return_value = {'id': 123}
|
||||
koji_session.getTag.return_value = {"id": 123}
|
||||
|
||||
self.cg._tag_cg_build()
|
||||
|
||||
@@ -232,13 +239,14 @@ class TestBuild:
|
||||
""" Test that the CG build is tagged to default tag. """
|
||||
koji_session = ClientSession.return_value
|
||||
koji_session.getUser.return_value = GET_USER_RV
|
||||
koji_session.getTag.side_effect = [{}, {'id': 123}]
|
||||
koji_session.getTag.side_effect = [{}, {"id": 123}]
|
||||
|
||||
self.cg._tag_cg_build()
|
||||
|
||||
assert koji_session.getTag.mock_calls == [
|
||||
call(self.cg.module.cg_build_koji_tag),
|
||||
call(conf.koji_cg_default_build_tag)]
|
||||
call(conf.koji_cg_default_build_tag),
|
||||
]
|
||||
koji_session.tagBuild.assert_called_once_with(123, "nginx-0-2.10e50d06")
|
||||
|
||||
# tagBuild requires logging into a session in advance.
|
||||
@@ -250,7 +258,7 @@ class TestBuild:
|
||||
""" Test that the CG build is not tagged when no tag set. """
|
||||
koji_session = ClientSession.return_value
|
||||
koji_session.getUser.return_value = GET_USER_RV
|
||||
koji_session.getTag.side_effect = [{}, {'id': 123}]
|
||||
koji_session.getTag.side_effect = [{}, {"id": 123}]
|
||||
|
||||
self.cg.module.cg_build_koji_tag = None
|
||||
self.cg._tag_cg_build()
|
||||
@@ -275,19 +283,18 @@ class TestBuild:
|
||||
|
||||
@patch("module_build_service.builder.KojiContentGenerator.open", create=True)
|
||||
def test_get_arch_mmd_output(self, patched_open):
|
||||
patched_open.return_value = mock_open(
|
||||
read_data=self.cg.mmd.encode("utf-8")).return_value
|
||||
patched_open.return_value = mock_open(read_data=self.cg.mmd.encode("utf-8")).return_value
|
||||
ret = self.cg._get_arch_mmd_output("./fake-dir", "x86_64")
|
||||
assert ret == {
|
||||
'arch': 'x86_64',
|
||||
'buildroot_id': 1,
|
||||
'checksum': '96b7739ffa3918e6ac3e3bd422b064ea',
|
||||
'checksum_type': 'md5',
|
||||
'components': [],
|
||||
'extra': {'typeinfo': {'module': {}}},
|
||||
'filename': 'modulemd.x86_64.txt',
|
||||
'filesize': 1138,
|
||||
'type': 'file'
|
||||
"arch": "x86_64",
|
||||
"buildroot_id": 1,
|
||||
"checksum": "96b7739ffa3918e6ac3e3bd422b064ea",
|
||||
"checksum_type": "md5",
|
||||
"components": [],
|
||||
"extra": {"typeinfo": {"module": {}}},
|
||||
"filename": "modulemd.x86_64.txt",
|
||||
"filesize": 1138,
|
||||
"type": "file",
|
||||
}
|
||||
|
||||
@patch("module_build_service.builder.KojiContentGenerator.open", create=True)
|
||||
@@ -298,17 +305,18 @@ class TestBuild:
|
||||
mmd.set_rpm_artifacts(rpm_artifacts)
|
||||
mmd_data = to_text_type(mmd.dumps()).encode("utf-8")
|
||||
|
||||
patched_open.return_value = mock_open(
|
||||
read_data=mmd_data).return_value
|
||||
patched_open.return_value = mock_open(read_data=mmd_data).return_value
|
||||
|
||||
self.cg.rpms = [{
|
||||
"name": "dhcp",
|
||||
"version": "4.3.5",
|
||||
"release": "5.module_2118aef6",
|
||||
"arch": "x86_64",
|
||||
"epoch": "12",
|
||||
"payloadhash": "hash",
|
||||
}]
|
||||
self.cg.rpms = [
|
||||
{
|
||||
"name": "dhcp",
|
||||
"version": "4.3.5",
|
||||
"release": "5.module_2118aef6",
|
||||
"arch": "x86_64",
|
||||
"epoch": "12",
|
||||
"payloadhash": "hash",
|
||||
}
|
||||
]
|
||||
|
||||
self.cg.rpms_dict = {
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64": {
|
||||
@@ -323,21 +331,25 @@ class TestBuild:
|
||||
|
||||
ret = self.cg._get_arch_mmd_output("./fake-dir", "x86_64")
|
||||
assert ret == {
|
||||
'arch': 'x86_64',
|
||||
'buildroot_id': 1,
|
||||
'checksum': '502e46889affec24d98a281289104d4d',
|
||||
'checksum_type': 'md5',
|
||||
'components': [{u'arch': 'x86_64',
|
||||
u'epoch': '12',
|
||||
u'name': 'dhcp',
|
||||
u'release': '5.module_2118aef6',
|
||||
u'sigmd5': 'hash',
|
||||
u'type': u'rpm',
|
||||
u'version': '4.3.5'}],
|
||||
'extra': {'typeinfo': {'module': {}}},
|
||||
'filename': 'modulemd.x86_64.txt',
|
||||
'filesize': 319,
|
||||
'type': 'file'
|
||||
"arch": "x86_64",
|
||||
"buildroot_id": 1,
|
||||
"checksum": "502e46889affec24d98a281289104d4d",
|
||||
"checksum_type": "md5",
|
||||
"components": [
|
||||
{
|
||||
u"arch": "x86_64",
|
||||
u"epoch": "12",
|
||||
u"name": "dhcp",
|
||||
u"release": "5.module_2118aef6",
|
||||
u"sigmd5": "hash",
|
||||
u"type": u"rpm",
|
||||
u"version": "4.3.5",
|
||||
}
|
||||
],
|
||||
"extra": {"typeinfo": {"module": {}}},
|
||||
"filename": "modulemd.x86_64.txt",
|
||||
"filesize": 319,
|
||||
"type": "file",
|
||||
}
|
||||
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
@@ -348,69 +360,71 @@ class TestBuild:
|
||||
|
||||
rpms = [
|
||||
{
|
||||
'id': 1,
|
||||
'arch': 'src',
|
||||
'epoch': None,
|
||||
'build_id': 875991,
|
||||
'name': 'module-build-macros',
|
||||
'release': '1.module_92011fe6',
|
||||
'version': '0.1'
|
||||
"id": 1,
|
||||
"arch": "src",
|
||||
"epoch": None,
|
||||
"build_id": 875991,
|
||||
"name": "module-build-macros",
|
||||
"release": "1.module_92011fe6",
|
||||
"version": "0.1",
|
||||
},
|
||||
{
|
||||
'id': 2,
|
||||
'arch': 'noarch',
|
||||
'epoch': None,
|
||||
'build_id': 875991,
|
||||
'name': 'module-build-macros',
|
||||
'release': '1.module_92011fe6',
|
||||
'version': '0.1'
|
||||
"id": 2,
|
||||
"arch": "noarch",
|
||||
"epoch": None,
|
||||
"build_id": 875991,
|
||||
"name": "module-build-macros",
|
||||
"release": "1.module_92011fe6",
|
||||
"version": "0.1",
|
||||
},
|
||||
{
|
||||
'id': 3,
|
||||
'arch': 'src',
|
||||
'epoch': 3,
|
||||
'build_id': 875636,
|
||||
'name': 'ed',
|
||||
'release': '2.module_bd6e0eb1',
|
||||
'version': '1.14.1'
|
||||
"id": 3,
|
||||
"arch": "src",
|
||||
"epoch": 3,
|
||||
"build_id": 875636,
|
||||
"name": "ed",
|
||||
"release": "2.module_bd6e0eb1",
|
||||
"version": "1.14.1",
|
||||
},
|
||||
{
|
||||
'id': 4,
|
||||
'arch': 'x86_64',
|
||||
'epoch': 3,
|
||||
'build_id': 875636,
|
||||
'name': 'ed',
|
||||
'release': '2.module_bd6e0eb1',
|
||||
'version': '1.14.1'
|
||||
"id": 4,
|
||||
"arch": "x86_64",
|
||||
"epoch": 3,
|
||||
"build_id": 875636,
|
||||
"name": "ed",
|
||||
"release": "2.module_bd6e0eb1",
|
||||
"version": "1.14.1",
|
||||
},
|
||||
]
|
||||
|
||||
builds = [
|
||||
{
|
||||
'build_id': 875636,
|
||||
'epoch': 3,
|
||||
'name': 'ed',
|
||||
'release': '2.module_bd6e0eb1',
|
||||
'version': '1.14.1',
|
||||
'nvr': 'ed-2.module_bd6e0eb1-1.14.1',
|
||||
"build_id": 875636,
|
||||
"epoch": 3,
|
||||
"name": "ed",
|
||||
"release": "2.module_bd6e0eb1",
|
||||
"version": "1.14.1",
|
||||
"nvr": "ed-2.module_bd6e0eb1-1.14.1",
|
||||
},
|
||||
{
|
||||
'build_id': 875991,
|
||||
'epoch': None,
|
||||
'name': 'module-build-macros',
|
||||
'release': '1.module_92011fe6',
|
||||
'version': '0.1',
|
||||
'nvr': 'module-build-macros-0.1-1.module_92011fe6',
|
||||
}
|
||||
"build_id": 875991,
|
||||
"epoch": None,
|
||||
"name": "module-build-macros",
|
||||
"release": "1.module_92011fe6",
|
||||
"version": "0.1",
|
||||
"nvr": "module-build-macros-0.1-1.module_92011fe6",
|
||||
},
|
||||
]
|
||||
|
||||
koji_session.listTaggedRPMS.return_value = (rpms, builds)
|
||||
koji_session.multiCall.side_effect = [
|
||||
# getRPMHeaders response
|
||||
[[{'excludearch': ["x86_64"], 'exclusivearch': [], 'license': 'MIT'}],
|
||||
[{'excludearch': [], 'exclusivearch': ["x86_64"], 'license': 'GPL'}],
|
||||
[{'license': 'MIT'}],
|
||||
[{'license': 'GPL'}]]
|
||||
[
|
||||
[{"excludearch": ["x86_64"], "exclusivearch": [], "license": "MIT"}],
|
||||
[{"excludearch": [], "exclusivearch": ["x86_64"], "license": "GPL"}],
|
||||
[{"license": "MIT"}],
|
||||
[{"license": "GPL"}],
|
||||
]
|
||||
]
|
||||
|
||||
rpms = self.cg._koji_rpms_in_tag("tag")
|
||||
@@ -448,33 +462,33 @@ class TestBuild:
|
||||
|
||||
rpms = [
|
||||
{
|
||||
'id': 1,
|
||||
'arch': 'src',
|
||||
'epoch': None,
|
||||
'build_id': 875991,
|
||||
'name': 'module-build-macros',
|
||||
'release': '1.module_92011fe6',
|
||||
'version': '0.1'
|
||||
"id": 1,
|
||||
"arch": "src",
|
||||
"epoch": None,
|
||||
"build_id": 875991,
|
||||
"name": "module-build-macros",
|
||||
"release": "1.module_92011fe6",
|
||||
"version": "0.1",
|
||||
},
|
||||
{
|
||||
'id': 2,
|
||||
'arch': 'noarch',
|
||||
'epoch': None,
|
||||
'build_id': 875991,
|
||||
'name': 'module-build-macros',
|
||||
'release': '1.module_92011fe6',
|
||||
'version': '0.1'
|
||||
"id": 2,
|
||||
"arch": "noarch",
|
||||
"epoch": None,
|
||||
"build_id": 875991,
|
||||
"name": "module-build-macros",
|
||||
"release": "1.module_92011fe6",
|
||||
"version": "0.1",
|
||||
},
|
||||
]
|
||||
|
||||
builds = [
|
||||
{
|
||||
'build_id': 875991,
|
||||
'epoch': None,
|
||||
'name': 'module-build-macros',
|
||||
'release': '1.module_92011fe6',
|
||||
'version': '0.1',
|
||||
'nvr': 'module-build-macros-0.1-1.module_92011fe6',
|
||||
"build_id": 875991,
|
||||
"epoch": None,
|
||||
"name": "module-build-macros",
|
||||
"release": "1.module_92011fe6",
|
||||
"version": "0.1",
|
||||
"nvr": "module-build-macros-0.1-1.module_92011fe6",
|
||||
}
|
||||
]
|
||||
|
||||
@@ -487,8 +501,7 @@ class TestBuild:
|
||||
|
||||
with pytest.raises(RuntimeError) as cm:
|
||||
self.cg._koji_rpms_in_tag("tag")
|
||||
assert str(cm.value) == (
|
||||
"No RPM headers received from Koji for RPM module-build-macros")
|
||||
assert str(cm.value) == ("No RPM headers received from Koji for RPM module-build-macros")
|
||||
|
||||
koji_session.multiCall.side_effect = [
|
||||
# getRPMHeaders response
|
||||
@@ -498,11 +511,19 @@ class TestBuild:
|
||||
with pytest.raises(RuntimeError) as cm:
|
||||
self.cg._koji_rpms_in_tag("tag")
|
||||
assert str(cm.value) == (
|
||||
"No RPM 'license' header received from Koji for RPM module-build-macros")
|
||||
"No RPM 'license' header received from Koji for RPM module-build-macros"
|
||||
)
|
||||
|
||||
def _add_test_rpm(self, nevra, srpm_nevra, multilib=None,
|
||||
koji_srpm_nevra=None, excludearch=None, exclusivearch=None,
|
||||
license=None):
|
||||
def _add_test_rpm(
|
||||
self,
|
||||
nevra,
|
||||
srpm_nevra,
|
||||
multilib=None,
|
||||
koji_srpm_nevra=None,
|
||||
excludearch=None,
|
||||
exclusivearch=None,
|
||||
license=None,
|
||||
):
|
||||
"""
|
||||
Helper method to add test RPM to ModuleBuild used by KojiContentGenerator
|
||||
and also to Koji tag used to generate the Content Generator build.
|
||||
@@ -553,24 +574,34 @@ class TestBuild:
|
||||
|
||||
@pytest.mark.parametrize("devel", (False, True))
|
||||
def test_fill_in_rpms_list(self, devel):
|
||||
self._add_test_rpm("dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.i686",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.s390x",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.i686",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.s390x",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm(
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src", "dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64", "dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.i686", "dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.s390x", "dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.i686",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.s390x",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
|
||||
self.cg.devel = devel
|
||||
mmd = self.cg.module.mmd()
|
||||
@@ -589,41 +620,52 @@ class TestBuild:
|
||||
# is not enabled for them - therefore we want to include them in -devel.
|
||||
assert set(mmd.get_rpm_artifacts().get()) == set([
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.i686",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.i686"])
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.i686",
|
||||
])
|
||||
|
||||
def test_fill_in_rpms_exclusivearch(self):
|
||||
self._add_test_rpm("dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.noarch",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
exclusivearch=["x86_64"])
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.noarch",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
exclusivearch=["ppc64le"])
|
||||
self._add_test_rpm(
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src", "dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.noarch",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
exclusivearch=["x86_64"],
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.noarch",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
exclusivearch=["ppc64le"],
|
||||
)
|
||||
|
||||
mmd = self.cg.module.mmd()
|
||||
mmd = self.cg._fill_in_rpms_list(mmd, "x86_64")
|
||||
|
||||
# Only dhcp-libs should be filled in, because perl-Tangerine has different
|
||||
# exclusivearch.
|
||||
assert set(mmd.get_rpm_artifacts().get()) == set([
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.noarch",
|
||||
])
|
||||
assert set(mmd.get_rpm_artifacts().get()) == set(
|
||||
["dhcp-12:4.3.5-5.module_2118aef6.src", "dhcp-libs-12:4.3.5-5.module_2118aef6.noarch"])
|
||||
|
||||
def test_fill_in_rpms_excludearch(self):
|
||||
self._add_test_rpm("dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.noarch",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
excludearch=["x86_64"])
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.noarch",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
excludearch=["ppc64le"])
|
||||
self._add_test_rpm(
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src", "dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.noarch",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
excludearch=["x86_64"],
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.noarch",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
excludearch=["ppc64le"],
|
||||
)
|
||||
|
||||
mmd = self.cg.module.mmd()
|
||||
mmd = self.cg._fill_in_rpms_list(mmd, "x86_64")
|
||||
@@ -636,24 +678,36 @@ class TestBuild:
|
||||
|
||||
@pytest.mark.parametrize("devel", (False, True))
|
||||
def test_fill_in_rpms_rpm_whitelist(self, devel):
|
||||
self._add_test_rpm("python27-dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
koji_srpm_nevra="python27-dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("python27-dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
koji_srpm_nevra="python27-dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("python27-dhcp-libs-12:4.3.5-5.module_2118aef6.i686",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
koji_srpm_nevra="python27-dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
koji_srpm_nevra="foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
koji_srpm_nevra="foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.i686",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
koji_srpm_nevra="foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm(
|
||||
"python27-dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
koji_srpm_nevra="python27-dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"python27-dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
koji_srpm_nevra="python27-dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"python27-dhcp-libs-12:4.3.5-5.module_2118aef6.i686",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
koji_srpm_nevra="python27-dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
koji_srpm_nevra="foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
koji_srpm_nevra="foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.i686",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
koji_srpm_nevra="foo-perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
|
||||
self.cg.devel = devel
|
||||
mmd = self.cg.module.mmd()
|
||||
@@ -680,34 +734,56 @@ class TestBuild:
|
||||
|
||||
@pytest.mark.parametrize("devel", (False, True))
|
||||
def test_fill_in_rpms_list_filters(self, devel):
|
||||
self._add_test_rpm("dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("dhcp-libs-debuginfo-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("dhcp-libs-debugsource-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.i686",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("dhcp-libs-debuginfo-12:4.3.5-5.module_2118aef6.i686",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("dhcp-libs-debugsource-12:4.3.5-5.module_2118aef6.i686",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-debuginfo-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-debugsource-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.i686",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-debuginfo-12:4.3.5-5.module_2118aef6.i686",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-debugsource-12:4.3.5-5.module_2118aef6.i686",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm(
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src", "dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64", "dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-debuginfo-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-debugsource-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.i686", "dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-debuginfo-12:4.3.5-5.module_2118aef6.i686",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-debugsource-12:4.3.5-5.module_2118aef6.i686",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-debuginfo-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-debugsource-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.i686",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-debuginfo-12:4.3.5-5.module_2118aef6.i686",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-debugsource-12:4.3.5-5.module_2118aef6.i686",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
|
||||
self.cg.devel = devel
|
||||
mmd = self.cg.module.mmd()
|
||||
@@ -741,24 +817,36 @@ class TestBuild:
|
||||
|
||||
@pytest.mark.parametrize("devel", (False, True))
|
||||
def test_fill_in_rpms_list_multilib(self, devel):
|
||||
self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.src",
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.src",
|
||||
multilib=["x86_64"])
|
||||
self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.src",
|
||||
multilib=["x86_64"])
|
||||
self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.i686",
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.src",
|
||||
multilib=["x86_64"])
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
multilib=["ppc64le"])
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
multilib=["ppc64le"])
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.i686",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
multilib=["ppc64le"])
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.src",
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.src",
|
||||
multilib=["x86_64"],
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.src",
|
||||
multilib=["x86_64"],
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.i686",
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.src",
|
||||
multilib=["x86_64"],
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
multilib=["ppc64le"],
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
multilib=["ppc64le"],
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.i686",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
multilib=["ppc64le"],
|
||||
)
|
||||
|
||||
self.cg.devel = devel
|
||||
mmd = self.cg.module.mmd()
|
||||
@@ -775,31 +863,36 @@ class TestBuild:
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
])
|
||||
else:
|
||||
assert set(mmd.get_rpm_artifacts().get()) == set([
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.i686"])
|
||||
assert set(mmd.get_rpm_artifacts().get()) == set(
|
||||
["perl-Tangerine-12:4.3.5-5.module_2118aef6.i686"])
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"licenses, expected", (
|
||||
(["GPL", "MIT"], ["GPL", "MIT"]),
|
||||
(["GPL", ""], ["GPL"]),
|
||||
(["GPL", "GPL"], ["GPL"]),
|
||||
)
|
||||
"licenses, expected",
|
||||
((["GPL", "MIT"], ["GPL", "MIT"]), (["GPL", ""], ["GPL"]), (["GPL", "GPL"], ["GPL"])),
|
||||
)
|
||||
def test_fill_in_rpms_list_license(self, licenses, expected):
|
||||
self._add_test_rpm("dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
license=licenses[0])
|
||||
self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.i686",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
license=licenses[1])
|
||||
self._add_test_rpm("perl-Tangerine-12:4.3.5-5.module_2118aef6.i686",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm(
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src", "dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
license=licenses[0],
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.i686", "dhcp-12:4.3.5-5.module_2118aef6.src")
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.x86_64",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
license=licenses[1],
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.i686",
|
||||
"perl-Tangerine-12:4.3.5-5.module_2118aef6.src",
|
||||
)
|
||||
|
||||
mmd = self.cg.module.mmd()
|
||||
mmd = self.cg._fill_in_rpms_list(mmd, "x86_64")
|
||||
@@ -812,12 +905,16 @@ class TestBuild:
|
||||
# A build has ExcludeArch: i686 (because it only works on 64 bit arches).
|
||||
# A noarch package is built there, and this noarch packages should be
|
||||
# included in x86_64 repo.
|
||||
self._add_test_rpm("dhcp-libs-12:4.3.5-5.module_2118aef6.noarch",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
excludearch=["i686"])
|
||||
self._add_test_rpm("dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
excludearch=["i686"])
|
||||
self._add_test_rpm(
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.noarch",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
excludearch=["i686"],
|
||||
)
|
||||
self._add_test_rpm(
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
excludearch=["i686"],
|
||||
)
|
||||
|
||||
self.cg.devel = devel
|
||||
mmd = self.cg.module.mmd()
|
||||
@@ -828,7 +925,8 @@ class TestBuild:
|
||||
# multilib set. The "dhcp" SRPM should be also included.
|
||||
assert set(mmd.get_rpm_artifacts().get()) == set([
|
||||
"dhcp-libs-12:4.3.5-5.module_2118aef6.noarch",
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src"])
|
||||
"dhcp-12:4.3.5-5.module_2118aef6.src",
|
||||
])
|
||||
else:
|
||||
assert set(mmd.get_rpm_artifacts().get()) == set([])
|
||||
|
||||
@@ -849,14 +947,19 @@ class TestBuild:
|
||||
|
||||
assert "mbs" not in mmd.get_xmd().keys()
|
||||
|
||||
@patch('module_build_service.builder.KojiContentGenerator.SCM')
|
||||
@patch("module_build_service.builder.KojiContentGenerator.SCM")
|
||||
def test_prepare_file_directory_modulemd_src(self, mocked_scm):
|
||||
FakeSCM(mocked_scm, 'testmodule', 'testmodule_init.yaml',
|
||||
'620ec77321b2ea7b0d67d82992dda3e1d67055b4')
|
||||
FakeSCM(
|
||||
mocked_scm,
|
||||
"testmodule",
|
||||
"testmodule_init.yaml",
|
||||
"620ec77321b2ea7b0d67d82992dda3e1d67055b4",
|
||||
)
|
||||
mmd = self.cg.module.mmd()
|
||||
mmd.set_xmd(glib.dict_values({"mbs": {
|
||||
"commit": "foo",
|
||||
"scmurl": "git://localhost/modules/foo.git#master"}}))
|
||||
mmd.set_xmd(
|
||||
glib.dict_values(
|
||||
{"mbs": {"commit": "foo", "scmurl": "git://localhost/modules/foo.git#master"}})
|
||||
)
|
||||
self.cg.module.modulemd = to_text_type(mmd.dumps())
|
||||
file_dir = self.cg._prepare_file_directory()
|
||||
with io.open(path.join(file_dir, "modulemd.src.txt"), encoding="utf-8") as mmd:
|
||||
@@ -883,8 +986,8 @@ class TestBuild:
|
||||
@patch("module_build_service.builder.KojiContentGenerator.KojiContentGenerator._tag_cg_build")
|
||||
@patch("module_build_service.builder.KojiContentGenerator.KojiContentGenerator._load_koji_tag")
|
||||
def test_koji_cg_koji_import(self, tag_loader, tagger, cl_session):
|
||||
''' Tests whether build is still tagged even if there's an exception in CGImport '''
|
||||
""" Tests whether build is still tagged even if there's an exception in CGImport """
|
||||
cl_session.return_value.CGImport = Mock(
|
||||
side_effect=koji.GenericError('Build already exists asdv'))
|
||||
side_effect=koji.GenericError("Build already exists asdv"))
|
||||
self.cg.koji_import()
|
||||
tagger.assert_called()
|
||||
|
||||
@@ -33,24 +33,25 @@ from tests import init_data
|
||||
|
||||
|
||||
class TestLogger:
|
||||
|
||||
def setup_method(self, test_method):
|
||||
init_data(1)
|
||||
log.debug(test_method.__module__)
|
||||
try:
|
||||
# py2
|
||||
test_id = '.'.join([
|
||||
test_id = ".".join([
|
||||
path.splitext(path.basename(__file__))[0],
|
||||
test_method.im_class.__name__,
|
||||
test_method.im_func.__name__])
|
||||
test_method.im_func.__name__,
|
||||
])
|
||||
except AttributeError:
|
||||
# py3
|
||||
test_id = '.'.join([
|
||||
test_id = ".".join([
|
||||
path.splitext(path.basename(__file__))[0],
|
||||
test_method.__self__.__class__.__name__,
|
||||
test_method.__self__.__class__.__name__])
|
||||
test_method.__self__.__class__.__name__,
|
||||
])
|
||||
|
||||
self.base = tempfile.mkdtemp(prefix='mbs-', suffix='-%s' % test_id)
|
||||
self.base = tempfile.mkdtemp(prefix="mbs-", suffix="-%s" % test_id)
|
||||
self.name_format = "build-{id}.log"
|
||||
print("Storing build logs in %r" % self.base)
|
||||
self.build_log = ModuleBuildLogs(self.base, self.name_format)
|
||||
|
||||
@@ -30,14 +30,17 @@ class TestMBSManage:
|
||||
def setup_method(self, test_method):
|
||||
init_data()
|
||||
|
||||
@pytest.mark.parametrize(('identifier', 'is_valid'), (
|
||||
('', False),
|
||||
('spam', False),
|
||||
('spam:bacon', True),
|
||||
('spam:bacon:eggs', True),
|
||||
('spam:bacon:eggs:ham', True),
|
||||
('spam:bacon:eggs:ham:sausage', False),
|
||||
))
|
||||
@pytest.mark.parametrize(
|
||||
("identifier", "is_valid"),
|
||||
(
|
||||
("", False),
|
||||
("spam", False),
|
||||
("spam:bacon", True),
|
||||
("spam:bacon:eggs", True),
|
||||
("spam:bacon:eggs:ham", True),
|
||||
("spam:bacon:eggs:ham:sausage", False),
|
||||
),
|
||||
)
|
||||
def test_retire_identifier_validation(self, identifier, is_valid):
|
||||
if is_valid:
|
||||
retire(identifier)
|
||||
@@ -45,29 +48,31 @@ class TestMBSManage:
|
||||
with pytest.raises(ValueError):
|
||||
retire(identifier)
|
||||
|
||||
@pytest.mark.parametrize(('overrides', 'identifier', 'changed_count'), (
|
||||
({'name': 'pickme'}, 'pickme:eggs', 1),
|
||||
({'stream': 'pickme'}, 'spam:pickme', 1),
|
||||
({'version': 'pickme'}, 'spam:eggs:pickme', 1),
|
||||
({'context': 'pickme'}, 'spam:eggs:ham:pickme', 1),
|
||||
|
||||
({}, 'spam:eggs', 3),
|
||||
({'version': 'pickme'}, 'spam:eggs', 3),
|
||||
({'context': 'pickme'}, 'spam:eggs:ham', 3),
|
||||
))
|
||||
@patch('module_build_service.manage.prompt_bool')
|
||||
@pytest.mark.parametrize(
|
||||
("overrides", "identifier", "changed_count"),
|
||||
(
|
||||
({"name": "pickme"}, "pickme:eggs", 1),
|
||||
({"stream": "pickme"}, "spam:pickme", 1),
|
||||
({"version": "pickme"}, "spam:eggs:pickme", 1),
|
||||
({"context": "pickme"}, "spam:eggs:ham:pickme", 1),
|
||||
({}, "spam:eggs", 3),
|
||||
({"version": "pickme"}, "spam:eggs", 3),
|
||||
({"context": "pickme"}, "spam:eggs:ham", 3),
|
||||
),
|
||||
)
|
||||
@patch("module_build_service.manage.prompt_bool")
|
||||
def test_retire_build(self, prompt_bool, overrides, identifier, changed_count):
|
||||
prompt_bool.return_value = True
|
||||
|
||||
with make_session(conf) as session:
|
||||
module_builds = session.query(ModuleBuild).filter_by(state=BUILD_STATES['ready']).all()
|
||||
module_builds = session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all()
|
||||
# Verify our assumption of the amount of ModuleBuilds in database
|
||||
assert len(module_builds) == 3
|
||||
|
||||
for x, build in enumerate(module_builds):
|
||||
build.name = 'spam'
|
||||
build.stream = 'eggs'
|
||||
build.version = 'ham'
|
||||
build.name = "spam"
|
||||
build.stream = "eggs"
|
||||
build.version = "ham"
|
||||
build.context = str(x)
|
||||
|
||||
for attr, value in overrides.items():
|
||||
@@ -77,38 +82,44 @@ class TestMBSManage:
|
||||
|
||||
retire(identifier)
|
||||
retired_module_builds = (
|
||||
session.query(ModuleBuild).filter_by(state=BUILD_STATES['garbage']).all())
|
||||
session.query(ModuleBuild).filter_by(state=BUILD_STATES["garbage"]).all()
|
||||
)
|
||||
|
||||
assert len(retired_module_builds) == changed_count
|
||||
for x in range(changed_count):
|
||||
assert retired_module_builds[x].id == module_builds[x].id
|
||||
assert retired_module_builds[x].state == BUILD_STATES['garbage']
|
||||
assert retired_module_builds[x].state == BUILD_STATES["garbage"]
|
||||
|
||||
@pytest.mark.parametrize(('confirm_prompt', 'confirm_arg', 'confirm_expected'), (
|
||||
(True, False, True),
|
||||
(True, True, True),
|
||||
(False, False, False),
|
||||
(False, True, True),
|
||||
))
|
||||
@patch('module_build_service.manage.prompt_bool')
|
||||
def test_retire_build_confirm_prompt(self, prompt_bool, confirm_prompt, confirm_arg,
|
||||
confirm_expected):
|
||||
@pytest.mark.parametrize(
|
||||
("confirm_prompt", "confirm_arg", "confirm_expected"),
|
||||
(
|
||||
(True, False, True),
|
||||
(True, True, True),
|
||||
(False, False, False),
|
||||
(False, True, True)
|
||||
),
|
||||
)
|
||||
@patch("module_build_service.manage.prompt_bool")
|
||||
def test_retire_build_confirm_prompt(
|
||||
self, prompt_bool, confirm_prompt, confirm_arg, confirm_expected
|
||||
):
|
||||
prompt_bool.return_value = confirm_prompt
|
||||
|
||||
with make_session(conf) as session:
|
||||
module_builds = session.query(ModuleBuild).filter_by(state=BUILD_STATES['ready']).all()
|
||||
module_builds = session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all()
|
||||
# Verify our assumption of the amount of ModuleBuilds in database
|
||||
assert len(module_builds) == 3
|
||||
|
||||
for x, build in enumerate(module_builds):
|
||||
build.name = 'spam'
|
||||
build.stream = 'eggs'
|
||||
build.name = "spam"
|
||||
build.stream = "eggs"
|
||||
|
||||
session.commit()
|
||||
|
||||
retire('spam:eggs', confirm_arg)
|
||||
retire("spam:eggs", confirm_arg)
|
||||
retired_module_builds = (
|
||||
session.query(ModuleBuild).filter_by(state=BUILD_STATES['garbage']).all())
|
||||
session.query(ModuleBuild).filter_by(state=BUILD_STATES["garbage"]).all()
|
||||
)
|
||||
|
||||
expected_changed_count = 3 if confirm_expected else 0
|
||||
assert len(retired_module_builds) == expected_changed_count
|
||||
|
||||
@@ -22,29 +22,28 @@
|
||||
|
||||
|
||||
from module_build_service import messaging
|
||||
from module_build_service.messaging import KojiRepoChange # noqa
|
||||
from module_build_service.messaging import KojiRepoChange # noqa
|
||||
|
||||
|
||||
class TestFedmsgMessaging:
|
||||
|
||||
def test_buildsys_state_change(self):
|
||||
# https://fedora-fedmsg.readthedocs.io/en/latest/topics.html#id134
|
||||
buildsys_state_change_msg = {
|
||||
'msg': {
|
||||
'attribute': 'state',
|
||||
'build_id': 614503,
|
||||
'instance': 'primary',
|
||||
'name': 'plasma-systemsettings',
|
||||
'new': 1,
|
||||
'old': 0,
|
||||
'owner': 'dvratil',
|
||||
'release': '1.fc23',
|
||||
'task_id': 9053697,
|
||||
'version': '5.2.1'
|
||||
"msg": {
|
||||
"attribute": "state",
|
||||
"build_id": 614503,
|
||||
"instance": "primary",
|
||||
"name": "plasma-systemsettings",
|
||||
"new": 1,
|
||||
"old": 0,
|
||||
"owner": "dvratil",
|
||||
"release": "1.fc23",
|
||||
"task_id": 9053697,
|
||||
"version": "5.2.1",
|
||||
},
|
||||
'msg_id': '2015-51be4c8e-8ab6-4dcb-ac0d-37b257765c71',
|
||||
'timestamp': 1424789698.0,
|
||||
'topic': 'org.fedoraproject.prod.buildsys.build.state.change'
|
||||
"msg_id": "2015-51be4c8e-8ab6-4dcb-ac0d-37b257765c71",
|
||||
"timestamp": 1424789698.0,
|
||||
"topic": "org.fedoraproject.prod.buildsys.build.state.change",
|
||||
}
|
||||
|
||||
msg = messaging.FedmsgMessageParser().parse(buildsys_state_change_msg)
|
||||
@@ -64,11 +63,11 @@ class TestFedmsgMessaging:
|
||||
"user": "mbs/mbs.fedoraproject.org",
|
||||
"version": "0.1",
|
||||
"owner": "mbs/mbs.fedoraproject.org",
|
||||
"release": "1.module_0c3d13fd"
|
||||
"release": "1.module_0c3d13fd",
|
||||
},
|
||||
'msg_id': '2015-51be4c8e-8ab6-4dcb-ac0d-37b257765c71',
|
||||
'timestamp': 1424789698.0,
|
||||
'topic': 'org.fedoraproject.prod.buildsys.tag'
|
||||
"msg_id": "2015-51be4c8e-8ab6-4dcb-ac0d-37b257765c71",
|
||||
"timestamp": 1424789698.0,
|
||||
"topic": "org.fedoraproject.prod.buildsys.tag",
|
||||
}
|
||||
|
||||
msg = messaging.FedmsgMessageParser().parse(buildsys_tag_msg)
|
||||
@@ -83,11 +82,11 @@ class TestFedmsgMessaging:
|
||||
"instance": "primary",
|
||||
"repo_id": 728809,
|
||||
"tag": "module-f0f7e44f3c6cccab-build",
|
||||
"tag_id": 653
|
||||
"tag_id": 653,
|
||||
},
|
||||
'msg_id': '2015-51be4c8e-8ab6-4dcb-ac0d-37b257765c71',
|
||||
'timestamp': 1424789698.0,
|
||||
'topic': 'org.fedoraproject.prod.buildsys.repo.done'
|
||||
"msg_id": "2015-51be4c8e-8ab6-4dcb-ac0d-37b257765c71",
|
||||
"timestamp": 1424789698.0,
|
||||
"topic": "org.fedoraproject.prod.buildsys.repo.done",
|
||||
}
|
||||
|
||||
msg = messaging.FedmsgMessageParser().parse(buildsys_tag_msg)
|
||||
|
||||
@@ -32,7 +32,6 @@ from module_build_service import glib
|
||||
|
||||
|
||||
class TestMMDResolver:
|
||||
|
||||
def setup_method(self, test_method):
|
||||
self.mmd_resolver = MMDResolver()
|
||||
|
||||
@@ -87,7 +86,8 @@ class TestMMDResolver:
|
||||
return mmd
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"deps, expected", (
|
||||
"deps, expected",
|
||||
(
|
||||
([], "None"),
|
||||
([{"x": []}], "module(x)"),
|
||||
([{"x": ["1"]}], "(module(x) with module(x:1))"),
|
||||
@@ -96,7 +96,7 @@ class TestMMDResolver:
|
||||
([{"x": ["-1", "2"]}], "(module(x) with module(x:2))"),
|
||||
([{"x": [], "y": []}], "(module(x) and module(y))"),
|
||||
([{"x": []}, {"y": []}], "(module(x) or module(y))"),
|
||||
)
|
||||
),
|
||||
)
|
||||
def test_deps2reqs(self, deps, expected):
|
||||
# Sort by keys here to avoid unordered dicts
|
||||
@@ -105,48 +105,64 @@ class TestMMDResolver:
|
||||
assert str(reqs) == expected
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"buildrequires, expected", (
|
||||
({"platform": []}, [
|
||||
[["platform:f28:0:c0:x86_64"],
|
||||
["platform:f29:0:c0:x86_64"]],
|
||||
]),
|
||||
({"platform": ["f28"]}, [
|
||||
[["platform:f28:0:c0:x86_64"]],
|
||||
]),
|
||||
({"platform": ["-f28"]}, [
|
||||
[["platform:f29:0:c0:x86_64"]],
|
||||
]),
|
||||
({"gtk": [], "qt": []}, [
|
||||
[["gtk:3:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["gtk:4:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["gtk:3:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["gtk:4:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"]],
|
||||
]),
|
||||
({"gtk": [], "qt": [], "platform": []}, [
|
||||
[["gtk:3:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["gtk:4:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["gtk:3:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["gtk:4:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["gtk:3:0:c9:x86_64", "qt:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"],
|
||||
["gtk:4:0:c9:x86_64", "qt:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"],
|
||||
["gtk:3:0:c9:x86_64", "qt:5:0:c9:x86_64", "platform:f29:0:c0:x86_64"],
|
||||
["gtk:4:0:c9:x86_64", "qt:5:0:c9:x86_64", "platform:f29:0:c0:x86_64"]],
|
||||
]),
|
||||
([{"qt": [], "platform": ["f28"]},
|
||||
{"gtk": [], "platform": ["-f28"]}], [
|
||||
[["qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"]],
|
||||
[["gtk:3:0:c9:x86_64", "platform:f29:0:c0:x86_64"],
|
||||
["gtk:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"]],
|
||||
]),
|
||||
({"mess": []}, [
|
||||
[["mess:1:0:c0:x86_64", "gtk:3:0:c8:x86_64", "platform:f28:0:c0:x86_64"]],
|
||||
]),
|
||||
({"mess": [], "platform": []}, [
|
||||
[["mess:1:0:c0:x86_64", "gtk:3:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["mess:1:0:c0:x86_64", "gtk:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"]],
|
||||
]),
|
||||
)
|
||||
"buildrequires, expected",
|
||||
(
|
||||
({"platform": []}, [[["platform:f28:0:c0:x86_64"], ["platform:f29:0:c0:x86_64"]]]),
|
||||
({"platform": ["f28"]}, [[["platform:f28:0:c0:x86_64"]]]),
|
||||
({"platform": ["-f28"]}, [[["platform:f29:0:c0:x86_64"]]]),
|
||||
(
|
||||
{"gtk": [], "qt": []},
|
||||
[
|
||||
[
|
||||
["gtk:3:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["gtk:4:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["gtk:3:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["gtk:4:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
]
|
||||
],
|
||||
),
|
||||
(
|
||||
{"gtk": [], "qt": [], "platform": []},
|
||||
[
|
||||
[
|
||||
["gtk:3:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["gtk:4:0:c8:x86_64", "qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["gtk:3:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["gtk:4:0:c8:x86_64", "qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["gtk:3:0:c9:x86_64", "qt:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"],
|
||||
["gtk:4:0:c9:x86_64", "qt:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"],
|
||||
["gtk:3:0:c9:x86_64", "qt:5:0:c9:x86_64", "platform:f29:0:c0:x86_64"],
|
||||
["gtk:4:0:c9:x86_64", "qt:5:0:c9:x86_64", "platform:f29:0:c0:x86_64"],
|
||||
]
|
||||
],
|
||||
),
|
||||
(
|
||||
[{"qt": [], "platform": ["f28"]}, {"gtk": [], "platform": ["-f28"]}],
|
||||
[
|
||||
[
|
||||
["qt:4:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["qt:5:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
],
|
||||
[
|
||||
["gtk:3:0:c9:x86_64", "platform:f29:0:c0:x86_64"],
|
||||
["gtk:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"],
|
||||
],
|
||||
],
|
||||
),
|
||||
(
|
||||
{"mess": []},
|
||||
[[["mess:1:0:c0:x86_64", "gtk:3:0:c8:x86_64", "platform:f28:0:c0:x86_64"]]],
|
||||
),
|
||||
(
|
||||
{"mess": [], "platform": []},
|
||||
[
|
||||
[
|
||||
["mess:1:0:c0:x86_64", "gtk:3:0:c8:x86_64", "platform:f28:0:c0:x86_64"],
|
||||
["mess:1:0:c0:x86_64", "gtk:4:0:c9:x86_64", "platform:f29:0:c0:x86_64"],
|
||||
]
|
||||
],
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_solve(self, buildrequires, expected):
|
||||
modules = (
|
||||
@@ -160,8 +176,10 @@ class TestMMDResolver:
|
||||
("qt:4:0:c9", {"platform": ["f29"]}),
|
||||
("qt:5:0:c8", {"platform": ["f28"]}),
|
||||
("qt:5:0:c9", {"platform": ["f29"]}),
|
||||
("mess:1:0:c0", [{"gtk": ["3"], "platform": ["f28"]},
|
||||
{"gtk": ["4"], "platform": ["-f28"]}]),
|
||||
(
|
||||
"mess:1:0:c0",
|
||||
[{"gtk": ["3"], "platform": ["f28"]}, {"gtk": ["4"], "platform": ["-f28"]}],
|
||||
),
|
||||
)
|
||||
for n, req in modules:
|
||||
self.mmd_resolver.add_modules(self._make_mmd(n, req))
|
||||
@@ -169,58 +187,77 @@ class TestMMDResolver:
|
||||
app = self._make_mmd("app:1:0", buildrequires)
|
||||
expanded = self.mmd_resolver.solve(app)
|
||||
|
||||
expected = set(frozenset(["app:1:0:%d:src" % c] + e)
|
||||
for c, exp in enumerate(expected)
|
||||
for e in exp)
|
||||
expected = set(
|
||||
frozenset(["app:1:0:%d:src" % c] + e) for c, exp in enumerate(expected) for e in exp
|
||||
)
|
||||
|
||||
assert expanded == expected
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"buildrequires, xmd_buildrequires, expected", (
|
||||
"buildrequires, xmd_buildrequires, expected",
|
||||
(
|
||||
# BR all platform streams -> build for all platform streams.
|
||||
({"platform": []}, {}, [
|
||||
[["platform:el8.2.0.z:0:c0:x86_64"],
|
||||
["platform:el8.1.0:0:c0:x86_64"],
|
||||
["platform:el8.0.0:0:c0:x86_64"],
|
||||
["platform:el7.6.0:0:c0:x86_64"]],
|
||||
]),
|
||||
(
|
||||
{"platform": []},
|
||||
{},
|
||||
[
|
||||
[
|
||||
["platform:el8.2.0.z:0:c0:x86_64"],
|
||||
["platform:el8.1.0:0:c0:x86_64"],
|
||||
["platform:el8.0.0:0:c0:x86_64"],
|
||||
["platform:el7.6.0:0:c0:x86_64"],
|
||||
]
|
||||
],
|
||||
),
|
||||
# BR "el8" platform stream -> build for all el8 platform streams.
|
||||
({"platform": ["el8"]}, {}, [
|
||||
[["platform:el8.2.0.z:0:c0:x86_64"],
|
||||
["platform:el8.1.0:0:c0:x86_64"],
|
||||
["platform:el8.0.0:0:c0:x86_64"]],
|
||||
]),
|
||||
(
|
||||
{"platform": ["el8"]},
|
||||
{},
|
||||
[
|
||||
[
|
||||
["platform:el8.2.0.z:0:c0:x86_64"],
|
||||
["platform:el8.1.0:0:c0:x86_64"],
|
||||
["platform:el8.0.0:0:c0:x86_64"],
|
||||
]
|
||||
],
|
||||
),
|
||||
# BR "el8.1.0" platfrom stream -> build just for el8.1.0.
|
||||
({"platform": ["el8"]}, ["platform:el8.1.0"], [
|
||||
[["platform:el8.1.0:0:c0:x86_64"]],
|
||||
]),
|
||||
({"platform": ["el8"]}, ["platform:el8.1.0"], [[["platform:el8.1.0:0:c0:x86_64"]]]),
|
||||
# BR platform:el8.1.0 and gtk:3, which is not built against el8.1.0,
|
||||
# but it is built only against el8.0.0 -> cherry-pick gtk:3 from el8.0.0
|
||||
# and build once against platform:el8.1.0.
|
||||
({"platform": ["el8"], "gtk": ["3"]}, ["platform:el8.1.0"], [
|
||||
[["platform:el8.1.0:0:c0:x86_64", "gtk:3:0:c8:x86_64", ]],
|
||||
]),
|
||||
(
|
||||
{"platform": ["el8"], "gtk": ["3"]},
|
||||
["platform:el8.1.0"],
|
||||
[[["platform:el8.1.0:0:c0:x86_64", "gtk:3:0:c8:x86_64"]]],
|
||||
),
|
||||
# BR platform:el8.2.0 and gtk:3, this time gtk:3 build against el8.2.0 exists
|
||||
# -> use both platform and gtk from el8.2.0 and build once.
|
||||
({"platform": ["el8"], "gtk": ["3"]}, ["platform:el8.2.0.z"], [
|
||||
[["platform:el8.2.0.z:0:c0:x86_64", "gtk:3:1:c8:x86_64", ]],
|
||||
]),
|
||||
(
|
||||
{"platform": ["el8"], "gtk": ["3"]},
|
||||
["platform:el8.2.0.z"],
|
||||
[[["platform:el8.2.0.z:0:c0:x86_64", "gtk:3:1:c8:x86_64"]]],
|
||||
),
|
||||
# BR platform:el8.2.0 and mess:1 which is built against platform:el8.1.0 and
|
||||
# requires gtk:3 which is built against platform:el8.2.0 and platform:el8.0.0
|
||||
# -> Use platform:el8.2.0 and
|
||||
# -> cherry-pick mess:1 from el8.1.0 and
|
||||
# -> use gtk:3:1 from el8.2.0.
|
||||
({"platform": ["el8"], "mess": ["1"]}, ["platform:el8.2.0.z"], [
|
||||
[["platform:el8.2.0.z:0:c0:x86_64", "mess:1:0:c0:x86_64", "gtk:3:1:c8:x86_64", ]],
|
||||
]),
|
||||
(
|
||||
{"platform": ["el8"], "mess": ["1"]},
|
||||
["platform:el8.2.0.z"],
|
||||
[[["platform:el8.2.0.z:0:c0:x86_64", "mess:1:0:c0:x86_64", "gtk:3:1:c8:x86_64"]]],
|
||||
),
|
||||
# BR platform:el8.1.0 and mess:1 which is built against platform:el8.1.0 and
|
||||
# requires gtk:3 which is built against platform:el8.2.0 and platform:el8.0.0
|
||||
# -> Use platform:el8.1.0 and
|
||||
# -> Used mess:1 from el8.1.0 and
|
||||
# -> cherry-pick gtk:3:0 from el8.0.0.
|
||||
({"platform": ["el8"], "mess": ["1"]}, ["platform:el8.1.0"], [
|
||||
[["platform:el8.1.0:0:c0:x86_64", "mess:1:0:c0:x86_64", "gtk:3:0:c8:x86_64", ]],
|
||||
]),
|
||||
(
|
||||
{"platform": ["el8"], "mess": ["1"]},
|
||||
["platform:el8.1.0"],
|
||||
[[["platform:el8.1.0:0:c0:x86_64", "mess:1:0:c0:x86_64", "gtk:3:0:c8:x86_64"]]],
|
||||
),
|
||||
# BR platform:el8.0.0 and mess:1 which is built against platform:el8.1.0 and
|
||||
# requires gtk:3 which is built against platform:el8.2.0 and platform:el8.0.0
|
||||
# -> No valid combination, because mess:1 is only available in el8.1.0 and later.
|
||||
@@ -230,7 +267,7 @@ class TestMMDResolver:
|
||||
# ({"platform": ["el8"], "gtk": ["3"]}, {}, [
|
||||
# [["platform:el8.2.0:0:c0:x86_64", "gtk:3:1:c8:x86_64"]],
|
||||
# ]),
|
||||
)
|
||||
),
|
||||
)
|
||||
def test_solve_virtual_streams(self, buildrequires, xmd_buildrequires, expected):
|
||||
modules = (
|
||||
@@ -244,8 +281,7 @@ class TestMMDResolver:
|
||||
("mess:1:0:c0", [{"gtk": ["3"], "platform": ["el8"]}], {"platform:el8.1.0"}, None),
|
||||
)
|
||||
for n, req, xmd_br, virtual_streams in modules:
|
||||
self.mmd_resolver.add_modules(self._make_mmd(
|
||||
n, req, xmd_br, virtual_streams))
|
||||
self.mmd_resolver.add_modules(self._make_mmd(n, req, xmd_br, virtual_streams))
|
||||
|
||||
app = self._make_mmd("app:1:0", buildrequires, xmd_buildrequires)
|
||||
if not expected:
|
||||
@@ -255,69 +291,65 @@ class TestMMDResolver:
|
||||
else:
|
||||
expanded = self.mmd_resolver.solve(app)
|
||||
|
||||
expected = set(frozenset(["app:1:0:%d:src" % c] + e)
|
||||
for c, exp in enumerate(expected)
|
||||
for e in exp)
|
||||
expected = set(
|
||||
frozenset(["app:1:0:%d:src" % c] + e) for c, exp in enumerate(expected) for e in exp)
|
||||
|
||||
assert expanded == expected
|
||||
|
||||
@pytest.mark.parametrize('app_buildrequires, modules, err_msg_regex', (
|
||||
# app --br--> gtk:1 --req--> bar:1* ---req---> platform:f29
|
||||
# \--br--> foo:1 --req--> bar:2* ---req--/
|
||||
@pytest.mark.parametrize(
|
||||
"app_buildrequires, modules, err_msg_regex",
|
||||
(
|
||||
{'gtk': '1', 'foo': '1'},
|
||||
# app --br--> gtk:1 --req--> bar:1* ---req---> platform:f29
|
||||
# \--br--> foo:1 --req--> bar:2* ---req--/
|
||||
(
|
||||
('platform:f29:0:c0', {}),
|
||||
('gtk:1:1:c01', {'bar': ['1']}),
|
||||
('bar:1:0:c02', {'platform': ['f29']}),
|
||||
('foo:1:1:c03', {'bar': ['2']}),
|
||||
('bar:2:0:c04', {'platform': ['f29']}),
|
||||
{"gtk": "1", "foo": "1"},
|
||||
(
|
||||
("platform:f29:0:c0", {}),
|
||||
("gtk:1:1:c01", {"bar": ["1"]}),
|
||||
("bar:1:0:c02", {"platform": ["f29"]}),
|
||||
("foo:1:1:c03", {"bar": ["2"]}),
|
||||
("bar:2:0:c04", {"platform": ["f29"]}),
|
||||
),
|
||||
"bar:1:0:c02 and bar:2:0:c04",
|
||||
),
|
||||
'bar:1:0:c02 and bar:2:0:c04',
|
||||
),
|
||||
# app --br--> gtk:1 --req--> bar:1* ----------req----------> platform:f29
|
||||
# \--br--> foo:1 --req--> baz:1 --req--> bar:2* --req--/
|
||||
(
|
||||
{'gtk': '1', 'foo': '1'},
|
||||
# app --br--> gtk:1 --req--> bar:1* ----------req----------> platform:f29
|
||||
# \--br--> foo:1 --req--> baz:1 --req--> bar:2* --req--/
|
||||
(
|
||||
('platform:f29:0:c0', {}),
|
||||
|
||||
('gtk:1:1:c01', {'bar': ['1']}),
|
||||
('bar:1:0:c02', {'platform': ['f29']}),
|
||||
|
||||
('foo:1:1:c03', {'baz': ['1']}),
|
||||
('baz:1:1:c04', {'bar': ['2']}),
|
||||
('bar:2:0:c05', {'platform': ['f29']}),
|
||||
{"gtk": "1", "foo": "1"},
|
||||
(
|
||||
("platform:f29:0:c0", {}),
|
||||
("gtk:1:1:c01", {"bar": ["1"]}),
|
||||
("bar:1:0:c02", {"platform": ["f29"]}),
|
||||
("foo:1:1:c03", {"baz": ["1"]}),
|
||||
("baz:1:1:c04", {"bar": ["2"]}),
|
||||
("bar:2:0:c05", {"platform": ["f29"]}),
|
||||
),
|
||||
"bar:1:0:c02 and bar:2:0:c05",
|
||||
),
|
||||
'bar:1:0:c02 and bar:2:0:c05',
|
||||
),
|
||||
# Test multiple conflicts pairs are detected.
|
||||
# app --br--> gtk:1 --req--> bar:1* ---------req-----------\
|
||||
# \--br--> foo:1 --req--> baz:1 --req--> bar:2* ---req---> platform:f29
|
||||
# \--br--> pkga:1 --req--> perl:5' -------req-----------/
|
||||
# \--br--> pkgb:1 --req--> perl:6' -------req-----------/
|
||||
(
|
||||
{'gtk': '1', 'foo': '1', 'pkga': '1', 'pkgb': '1'},
|
||||
# Test multiple conflicts pairs are detected.
|
||||
# app --br--> gtk:1 --req--> bar:1* ---------req-----------\
|
||||
# \--br--> foo:1 --req--> baz:1 --req--> bar:2* ---req---> platform:f29
|
||||
# \--br--> pkga:1 --req--> perl:5' -------req-----------/
|
||||
# \--br--> pkgb:1 --req--> perl:6' -------req-----------/
|
||||
(
|
||||
('platform:f29:0:c0', {}),
|
||||
|
||||
('gtk:1:1:c01', {'bar': ['1']}),
|
||||
('bar:1:0:c02', {'platform': ['f29']}),
|
||||
|
||||
('foo:1:1:c03', {'baz': ['1']}),
|
||||
('baz:1:1:c04', {'bar': ['2']}),
|
||||
('bar:2:0:c05', {'platform': ['f29']}),
|
||||
|
||||
('pkga:1:0:c06', {'perl': ['5']}),
|
||||
('perl:5:0:c07', {'platform': ['f29']}),
|
||||
|
||||
('pkgb:1:0:c08', {'perl': ['6']}),
|
||||
('perl:6:0:c09', {'platform': ['f29']}),
|
||||
{"gtk": "1", "foo": "1", "pkga": "1", "pkgb": "1"},
|
||||
(
|
||||
("platform:f29:0:c0", {}),
|
||||
("gtk:1:1:c01", {"bar": ["1"]}),
|
||||
("bar:1:0:c02", {"platform": ["f29"]}),
|
||||
("foo:1:1:c03", {"baz": ["1"]}),
|
||||
("baz:1:1:c04", {"bar": ["2"]}),
|
||||
("bar:2:0:c05", {"platform": ["f29"]}),
|
||||
("pkga:1:0:c06", {"perl": ["5"]}),
|
||||
("perl:5:0:c07", {"platform": ["f29"]}),
|
||||
("pkgb:1:0:c08", {"perl": ["6"]}),
|
||||
("perl:6:0:c09", {"platform": ["f29"]}),
|
||||
),
|
||||
# MMD Resolver should still catch a conflict
|
||||
"bar:1:0:c02 and bar:2:0:c05",
|
||||
),
|
||||
# MMD Resolver should still catch a conflict
|
||||
'bar:1:0:c02 and bar:2:0:c05',
|
||||
),
|
||||
))
|
||||
)
|
||||
def test_solve_stream_conflicts(self, app_buildrequires, modules, err_msg_regex):
|
||||
for n, req in modules:
|
||||
self.mmd_resolver.add_modules(self._make_mmd(n, req))
|
||||
@@ -349,7 +381,8 @@ class TestMMDResolver:
|
||||
|
||||
# Build only against f28 and f29, because "gtk:3" is not built against f30.
|
||||
expected = set([
|
||||
frozenset(['gtk:3:0:c8:x86_64', 'app:1:0:0:src', 'platform:f28:0:c0:x86_64']),
|
||||
frozenset(['gtk:3:0:c9:x86_64', 'app:1:0:0:src', 'platform:f29:0:c0:x86_64'])])
|
||||
frozenset(["gtk:3:0:c8:x86_64", "app:1:0:0:src", "platform:f28:0:c0:x86_64"]),
|
||||
frozenset(["gtk:3:0:c9:x86_64", "app:1:0:0:src", "platform:f29:0:c0:x86_64"]),
|
||||
])
|
||||
|
||||
assert expanded == expected
|
||||
|
||||
@@ -34,7 +34,7 @@ app = module_build_service.app
|
||||
|
||||
conf = init_config(app)
|
||||
|
||||
datadir = os.path.dirname(__file__) + '/data/'
|
||||
datadir = os.path.dirname(__file__) + "/data/"
|
||||
|
||||
|
||||
def module_build_from_modulemd(yaml):
|
||||
@@ -43,22 +43,22 @@ def module_build_from_modulemd(yaml):
|
||||
build.name = mmd.get_name()
|
||||
build.stream = mmd.get_stream()
|
||||
build.version = mmd.get_version()
|
||||
build.state = BUILD_STATES['ready']
|
||||
build.state = BUILD_STATES["ready"]
|
||||
build.modulemd = yaml
|
||||
build.koji_tag = None
|
||||
build.batch = 0
|
||||
build.owner = 'some_other_user'
|
||||
build.owner = "some_other_user"
|
||||
build.time_submitted = datetime(2016, 9, 3, 12, 28, 33)
|
||||
build.time_modified = datetime(2016, 9, 3, 12, 28, 40)
|
||||
build.time_completed = None
|
||||
build.rebuild_strategy = 'changed-and-after'
|
||||
build.rebuild_strategy = "changed-and-after"
|
||||
return build
|
||||
|
||||
|
||||
def init_data():
|
||||
clean_database()
|
||||
for filename in os.listdir(datadir):
|
||||
with open(datadir + filename, 'r') as f:
|
||||
with open(datadir + filename, "r") as f:
|
||||
yaml = f.read()
|
||||
build = module_build_from_modulemd(yaml)
|
||||
db.session.add(build)
|
||||
|
||||
@@ -27,7 +27,7 @@ from mock import patch
|
||||
from module_build_service import conf, Modulemd
|
||||
from module_build_service.models import ComponentBuild, ModuleBuild, make_session
|
||||
from module_build_service.utils import to_text_type
|
||||
from tests import (init_data as init_data_contexts, clean_database, make_module)
|
||||
from tests import init_data as init_data_contexts, clean_database, make_module
|
||||
from tests.test_models import init_data, module_build_from_modulemd
|
||||
|
||||
|
||||
@@ -38,15 +38,16 @@ class TestModels:
|
||||
def test_app_sqlalchemy_events(self):
|
||||
with make_session(conf) as session:
|
||||
component_build = ComponentBuild()
|
||||
component_build.package = 'before_models_committed'
|
||||
component_build.scmurl = \
|
||||
('git://pkgs.domain.local/rpms/before_models_committed?'
|
||||
'#9999999999999999999999999999999999999999')
|
||||
component_build.format = 'rpms'
|
||||
component_build.package = "before_models_committed"
|
||||
component_build.scmurl = (
|
||||
"git://pkgs.domain.local/rpms/before_models_committed?"
|
||||
"#9999999999999999999999999999999999999999"
|
||||
)
|
||||
component_build.format = "rpms"
|
||||
component_build.task_id = 999999999
|
||||
component_build.state = 1
|
||||
component_build.nvr = ('before_models_committed-0.0.0-0'
|
||||
'.module_before_models_committed_0_0')
|
||||
component_build.nvr = \
|
||||
"before_models_committed-0.0.0-0.module_before_models_committed_0_0"
|
||||
component_build.batch = 1
|
||||
component_build.module_id = 1
|
||||
|
||||
@@ -66,16 +67,20 @@ class TestModels:
|
||||
determined"""
|
||||
build = ModuleBuild.query.filter_by(id=1).one()
|
||||
yaml_path = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'staged_data', 'testmodule_dependencies.yaml')
|
||||
os.path.dirname(__file__), "..", "staged_data", "testmodule_dependencies.yaml")
|
||||
mmd = Modulemd.Module.new_from_file(yaml_path)
|
||||
mmd.upgrade()
|
||||
build.modulemd = to_text_type(mmd.dumps())
|
||||
(build.ref_build_context, build.build_context, build.runtime_context,
|
||||
build.context) = ModuleBuild.contexts_from_mmd(build.modulemd)
|
||||
assert build.ref_build_context == 'f6e2aeec7576196241b9afa0b6b22acf2b6873d7'
|
||||
assert build.build_context == '089df24993c037e10174f3fa7342ab4dc191a4d4'
|
||||
assert build.runtime_context == 'bbc84c7b817ab3dd54916c0bcd6c6bdf512f7f9c'
|
||||
assert build.context == '3ee22b28'
|
||||
(
|
||||
build.ref_build_context,
|
||||
build.build_context,
|
||||
build.runtime_context,
|
||||
build.context,
|
||||
) = ModuleBuild.contexts_from_mmd(build.modulemd)
|
||||
assert build.ref_build_context == "f6e2aeec7576196241b9afa0b6b22acf2b6873d7"
|
||||
assert build.build_context == "089df24993c037e10174f3fa7342ab4dc191a4d4"
|
||||
assert build.runtime_context == "bbc84c7b817ab3dd54916c0bcd6c6bdf512f7f9c"
|
||||
assert build.context == "3ee22b28"
|
||||
|
||||
def test_siblings_property(self):
|
||||
""" Tests that the siblings property returns the ID of all modules with
|
||||
@@ -83,73 +88,75 @@ class TestModels:
|
||||
"""
|
||||
clean_database()
|
||||
yaml_path = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'staged_data', 'formatted_testmodule.yaml')
|
||||
os.path.dirname(__file__), "..", "staged_data", "formatted_testmodule.yaml")
|
||||
mmd = Modulemd.Module.new_from_file(yaml_path)
|
||||
mmd.upgrade()
|
||||
with make_session(conf) as session:
|
||||
for i in range(3):
|
||||
build = module_build_from_modulemd(to_text_type(mmd.dumps()))
|
||||
build.build_context = 'f6e2aeec7576196241b9afa0b6b22acf2b6873d' + str(i)
|
||||
build.runtime_context = 'bbc84c7b817ab3dd54916c0bcd6c6bdf512f7f9c' + str(i)
|
||||
build.build_context = "f6e2aeec7576196241b9afa0b6b22acf2b6873d" + str(i)
|
||||
build.runtime_context = "bbc84c7b817ab3dd54916c0bcd6c6bdf512f7f9c" + str(i)
|
||||
session.add(build)
|
||||
session.commit()
|
||||
build_one = ModuleBuild.query.get(2)
|
||||
assert build_one.siblings == [3, 4]
|
||||
|
||||
@pytest.mark.parametrize('stream,right_pad,expected', [
|
||||
['f27', True, 270000.0],
|
||||
['f27.02.30', True, 270230.0],
|
||||
['f27', False, 27.0],
|
||||
['f27.02.30', False, 270230.0],
|
||||
['el8', True, 080000.0],
|
||||
['el8.1.0', True, 080100.0],
|
||||
['el8.z', True, 080000.2],
|
||||
['el8.1.0.z', True, 080100.3],
|
||||
])
|
||||
@patch.object(conf, 'stream_suffixes', new={
|
||||
r'el\d+\.z': 0.2, r'el\d+\.\d+\.\d+\.z': 0.3
|
||||
})
|
||||
@pytest.mark.parametrize(
|
||||
"stream,right_pad,expected",
|
||||
[
|
||||
["f27", True, 270000.0],
|
||||
["f27.02.30", True, 270230.0],
|
||||
["f27", False, 27.0],
|
||||
["f27.02.30", False, 270230.0],
|
||||
["el8", True, 080000.0],
|
||||
["el8.1.0", True, 080100.0],
|
||||
["el8.z", True, 080000.2],
|
||||
["el8.1.0.z", True, 080100.3],
|
||||
],
|
||||
)
|
||||
@patch.object(conf, "stream_suffixes", new={r"el\d+\.z": 0.2, r"el\d+\.\d+\.\d+\.z": 0.3})
|
||||
def test_get_stream_version(self, stream, right_pad, expected):
|
||||
assert expected == ModuleBuild.get_stream_version(stream, right_pad)
|
||||
|
||||
|
||||
class TestModelsGetStreamsContexts:
|
||||
|
||||
def test_get_last_build_in_all_streams(self):
|
||||
init_data_contexts(contexts=True)
|
||||
with make_session(conf) as session:
|
||||
builds = ModuleBuild.get_last_build_in_all_streams(
|
||||
session, "nginx")
|
||||
builds = ["%s:%s:%s" % (build.name, build.stream, str(build.version))
|
||||
for build in builds]
|
||||
builds = ModuleBuild.get_last_build_in_all_streams(session, "nginx")
|
||||
builds = [
|
||||
"%s:%s:%s" % (build.name, build.stream, str(build.version)) for build in builds
|
||||
]
|
||||
assert builds == ["nginx:%d:%d" % (i, i + 2) for i in range(10)]
|
||||
|
||||
def test_get_last_build_in_all_stream_last_version(self):
|
||||
init_data_contexts(contexts=False)
|
||||
with make_session(conf) as session:
|
||||
builds = ModuleBuild.get_last_build_in_all_streams(
|
||||
session, "nginx")
|
||||
builds = ["%s:%s:%s" % (build.name, build.stream, str(build.version))
|
||||
for build in builds]
|
||||
builds = ModuleBuild.get_last_build_in_all_streams(session, "nginx")
|
||||
builds = [
|
||||
"%s:%s:%s" % (build.name, build.stream, str(build.version)) for build in builds
|
||||
]
|
||||
assert builds == ["nginx:1:11"]
|
||||
|
||||
def test_get_last_builds_in_stream(self):
|
||||
init_data_contexts(contexts=True)
|
||||
with make_session(conf) as session:
|
||||
builds = ModuleBuild.get_last_builds_in_stream(
|
||||
session, "nginx", "1")
|
||||
builds = ["%s:%s:%s:%s" % (build.name, build.stream, str(build.version),
|
||||
build.context) for build in builds]
|
||||
assert builds == ['nginx:1:3:d5a6c0fa', 'nginx:1:3:795e97c1']
|
||||
builds = ModuleBuild.get_last_builds_in_stream(session, "nginx", "1")
|
||||
builds = [
|
||||
"%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)
|
||||
for build in builds
|
||||
]
|
||||
assert builds == ["nginx:1:3:d5a6c0fa", "nginx:1:3:795e97c1"]
|
||||
|
||||
def test_get_last_builds_in_stream_version_lte(self):
|
||||
init_data_contexts(1, multiple_stream_versions=True)
|
||||
with make_session(conf) as session:
|
||||
builds = ModuleBuild.get_last_builds_in_stream_version_lte(
|
||||
session, "platform", 290100)
|
||||
builds = set(["%s:%s:%s:%s" % (build.name, build.stream, str(build.version),
|
||||
build.context) for build in builds])
|
||||
assert builds == set(['platform:f29.0.0:3:00000000', 'platform:f29.1.0:3:00000000'])
|
||||
builds = ModuleBuild.get_last_builds_in_stream_version_lte(session, "platform", 290100)
|
||||
builds = set([
|
||||
"%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)
|
||||
for build in builds
|
||||
])
|
||||
assert builds == set(["platform:f29.0.0:3:00000000", "platform:f29.1.0:3:00000000"])
|
||||
|
||||
def test_get_last_builds_in_stream_version_lte_different_versions(self):
|
||||
"""
|
||||
@@ -166,12 +173,16 @@ class TestModelsGetStreamsContexts:
|
||||
make_module("platform:f29.3.0:20:c11", {}, {}, virtual_streams=["f29"])
|
||||
|
||||
with make_session(conf) as session:
|
||||
builds = ModuleBuild.get_last_builds_in_stream_version_lte(
|
||||
session, "platform", 290200)
|
||||
builds = set(["%s:%s:%s:%s" % (build.name, build.stream, str(build.version),
|
||||
build.context) for build in builds])
|
||||
assert builds == set(['platform:f29.1.0:15:c11', 'platform:f29.1.0:15:c11.another',
|
||||
'platform:f29.2.0:1:c11'])
|
||||
builds = ModuleBuild.get_last_builds_in_stream_version_lte(session, "platform", 290200)
|
||||
builds = set([
|
||||
"%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)
|
||||
for build in builds
|
||||
])
|
||||
assert builds == set([
|
||||
"platform:f29.1.0:15:c11",
|
||||
"platform:f29.1.0:15:c11.another",
|
||||
"platform:f29.2.0:1:c11",
|
||||
])
|
||||
|
||||
def test_get_module_count(self):
|
||||
clean_database(False)
|
||||
|
||||
@@ -41,47 +41,52 @@ class TestViews:
|
||||
init_data(2)
|
||||
|
||||
def test_metrics(self):
|
||||
rv = self.client.get('/module-build-service/1/monitor/metrics')
|
||||
rv = self.client.get("/module-build-service/1/monitor/metrics")
|
||||
|
||||
assert len([l for l in rv.get_data(as_text=True).splitlines()
|
||||
if (l.startswith('# TYPE') and '_created ' not in l)]) == num_of_metrics
|
||||
count = len([
|
||||
l for l in rv.get_data(as_text=True).splitlines()
|
||||
if (l.startswith("# TYPE") and "_created " not in l)
|
||||
])
|
||||
assert count == num_of_metrics
|
||||
|
||||
|
||||
def test_standalone_metrics_server_disabled_by_default():
|
||||
with pytest.raises(requests.exceptions.ConnectionError):
|
||||
requests.get('http://127.0.0.1:10040/metrics')
|
||||
requests.get("http://127.0.0.1:10040/metrics")
|
||||
|
||||
|
||||
def test_standalone_metrics_server():
|
||||
os.environ['MONITOR_STANDALONE_METRICS_SERVER_ENABLE'] = 'true'
|
||||
os.environ["MONITOR_STANDALONE_METRICS_SERVER_ENABLE"] = "true"
|
||||
reload_module(module_build_service.monitor)
|
||||
|
||||
r = requests.get('http://127.0.0.1:10040/metrics')
|
||||
|
||||
assert len([l for l in r.text.splitlines()
|
||||
if (l.startswith('# TYPE') and '_created ' not in l)]) == num_of_metrics
|
||||
r = requests.get("http://127.0.0.1:10040/metrics")
|
||||
count = len([
|
||||
l for l in r.text.splitlines()
|
||||
if (l.startswith("# TYPE") and "_created " not in l)
|
||||
])
|
||||
assert count == num_of_metrics
|
||||
|
||||
|
||||
@mock.patch('module_build_service.monitor.builder_failed_counter.labels')
|
||||
@mock.patch('module_build_service.monitor.builder_success_counter.inc')
|
||||
@mock.patch("module_build_service.monitor.builder_failed_counter.labels")
|
||||
@mock.patch("module_build_service.monitor.builder_success_counter.inc")
|
||||
def test_monitor_state_changing_success(succ_cnt, failed_cnt):
|
||||
conf = mbs_config.Config(TestConfiguration)
|
||||
b = make_module('pkg:0.1:1:c1', requires_list={'platform': 'el8'})
|
||||
b.transition(conf, models.BUILD_STATES['wait'])
|
||||
b.transition(conf, models.BUILD_STATES['build'])
|
||||
b.transition(conf, models.BUILD_STATES['done'])
|
||||
b = make_module("pkg:0.1:1:c1", requires_list={"platform": "el8"})
|
||||
b.transition(conf, models.BUILD_STATES["wait"])
|
||||
b.transition(conf, models.BUILD_STATES["build"])
|
||||
b.transition(conf, models.BUILD_STATES["done"])
|
||||
succ_cnt.assert_called_once()
|
||||
failed_cnt.assert_not_called()
|
||||
|
||||
|
||||
@mock.patch('module_build_service.monitor.builder_failed_counter.labels')
|
||||
@mock.patch('module_build_service.monitor.builder_success_counter.inc')
|
||||
@mock.patch("module_build_service.monitor.builder_failed_counter.labels")
|
||||
@mock.patch("module_build_service.monitor.builder_success_counter.inc")
|
||||
def test_monitor_state_changing_failure(succ_cnt, failed_cnt):
|
||||
failure_type = 'user'
|
||||
failure_type = "user"
|
||||
conf = mbs_config.Config(TestConfiguration)
|
||||
b = make_module('pkg:0.1:1:c1', requires_list={'platform': 'el8'})
|
||||
b.transition(conf, models.BUILD_STATES['wait'])
|
||||
b.transition(conf, models.BUILD_STATES['build'])
|
||||
b.transition(conf, models.BUILD_STATES['failed'], failure_type=failure_type)
|
||||
b = make_module("pkg:0.1:1:c1", requires_list={"platform": "el8"})
|
||||
b.transition(conf, models.BUILD_STATES["wait"])
|
||||
b.transition(conf, models.BUILD_STATES["build"])
|
||||
b.transition(conf, models.BUILD_STATES["failed"], failure_type=failure_type)
|
||||
succ_cnt.assert_not_called()
|
||||
failed_cnt.assert_called_once_with(reason=failure_type)
|
||||
|
||||
@@ -38,64 +38,63 @@ base_dir = os.path.join(os.path.dirname(__file__), "..")
|
||||
|
||||
|
||||
class TestDBModule:
|
||||
|
||||
def setup_method(self):
|
||||
tests.reuse_component_init_data()
|
||||
|
||||
def test_get_buildrequired_modulemds(self):
|
||||
mmd = load_mmd_file(os.path.join(base_dir, 'staged_data', 'platform.yaml'))
|
||||
mmd.set_stream('f30.1.3')
|
||||
mmd = load_mmd_file(os.path.join(base_dir, "staged_data", "platform.yaml"))
|
||||
mmd.set_stream("f30.1.3")
|
||||
import_mmd(db.session, mmd)
|
||||
platform_f300103 = ModuleBuild.query.filter_by(stream='f30.1.3').one()
|
||||
platform_f300103 = ModuleBuild.query.filter_by(stream="f30.1.3").one()
|
||||
mmd.set_name("testmodule")
|
||||
mmd.set_stream("master")
|
||||
mmd.set_version(20170109091357)
|
||||
mmd.set_context("123")
|
||||
build = ModuleBuild(
|
||||
name='testmodule',
|
||||
stream='master',
|
||||
name="testmodule",
|
||||
stream="master",
|
||||
version=20170109091357,
|
||||
state=5,
|
||||
build_context='dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3',
|
||||
runtime_context='ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7',
|
||||
context='7c29193d',
|
||||
koji_tag='module-testmodule-master-20170109091357-7c29193d',
|
||||
scmurl='https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79',
|
||||
build_context="dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3",
|
||||
runtime_context="ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7",
|
||||
context="7c29193d",
|
||||
koji_tag="module-testmodule-master-20170109091357-7c29193d",
|
||||
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79",
|
||||
batch=3,
|
||||
owner='Dr. Pepper',
|
||||
owner="Dr. Pepper",
|
||||
time_submitted=datetime(2018, 11, 15, 16, 8, 18),
|
||||
time_modified=datetime(2018, 11, 15, 16, 19, 35),
|
||||
rebuild_strategy='changed-and-after',
|
||||
modulemd=to_text_type(mmd.dumps())
|
||||
rebuild_strategy="changed-and-after",
|
||||
modulemd=to_text_type(mmd.dumps()),
|
||||
)
|
||||
build.buildrequires.append(platform_f300103)
|
||||
db.session.add(build)
|
||||
db.session.commit()
|
||||
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db')
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
|
||||
result = resolver.get_buildrequired_modulemds(
|
||||
"testmodule", "master", platform_f300103.mmd().dup_nsvc())
|
||||
nsvcs = set([m.dup_nsvc() for m in result])
|
||||
assert nsvcs == set(['testmodule:master:20170109091357:123'])
|
||||
assert nsvcs == set(["testmodule:master:20170109091357:123"])
|
||||
|
||||
@pytest.mark.parametrize('stream_versions', [False, True])
|
||||
@pytest.mark.parametrize("stream_versions", [False, True])
|
||||
def test_get_module_modulemds_stream_versions(self, stream_versions):
|
||||
tests.init_data(1, multiple_stream_versions=True)
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db')
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
|
||||
result = resolver.get_module_modulemds(
|
||||
"platform", "f29.1.0", stream_version_lte=stream_versions)
|
||||
nsvcs = set([mmd.dup_nsvc() for mmd in result])
|
||||
if stream_versions:
|
||||
assert nsvcs == set(['platform:f29.1.0:3:00000000', 'platform:f29.0.0:3:00000000'])
|
||||
assert nsvcs == set(["platform:f29.1.0:3:00000000", "platform:f29.0.0:3:00000000"])
|
||||
else:
|
||||
assert nsvcs == set(['platform:f29.1.0:3:00000000'])
|
||||
assert nsvcs == set(["platform:f29.1.0:3:00000000"])
|
||||
|
||||
@pytest.mark.parametrize('empty_buildrequires', [False, True])
|
||||
@pytest.mark.parametrize("empty_buildrequires", [False, True])
|
||||
def test_get_module_build_dependencies(self, empty_buildrequires):
|
||||
"""
|
||||
Tests that the buildrequires of testmodule are returned
|
||||
"""
|
||||
expected = set(['module-f28-build'])
|
||||
expected = set(["module-f28-build"])
|
||||
module = models.ModuleBuild.query.get(2)
|
||||
if empty_buildrequires:
|
||||
expected = set()
|
||||
@@ -104,14 +103,14 @@ class TestDBModule:
|
||||
# Wipe out the dependencies
|
||||
mmd.set_dependencies()
|
||||
xmd = glib.from_variant_dict(mmd.get_xmd())
|
||||
xmd['mbs']['buildrequires'] = {}
|
||||
xmd["mbs"]["buildrequires"] = {}
|
||||
mmd.set_xmd(glib.dict_values(xmd))
|
||||
module.modulemd = to_text_type(mmd.dumps())
|
||||
db.session.add(module)
|
||||
db.session.commit()
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db')
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
|
||||
result = resolver.get_module_build_dependencies(
|
||||
'testmodule', 'master', '20170109091357', '78e4a6fd').keys()
|
||||
"testmodule", "master", "20170109091357", "78e4a6fd").keys()
|
||||
assert set(result) == expected
|
||||
|
||||
def test_get_module_build_dependencies_recursive(self):
|
||||
@@ -121,126 +120,150 @@ class TestDBModule:
|
||||
# Add testmodule2 that requires testmodule
|
||||
module = models.ModuleBuild.query.get(3)
|
||||
mmd = module.mmd()
|
||||
mmd.set_name('testmodule2')
|
||||
mmd.set_name("testmodule2")
|
||||
mmd.set_version(20180123171545)
|
||||
requires = mmd.get_dependencies()[0].get_requires()
|
||||
requires['testmodule'] = Modulemd.SimpleSet()
|
||||
requires['testmodule'].add('master')
|
||||
requires["testmodule"] = Modulemd.SimpleSet()
|
||||
requires["testmodule"].add("master")
|
||||
mmd.get_dependencies()[0].set_requires(requires)
|
||||
xmd = glib.from_variant_dict(mmd.get_xmd())
|
||||
xmd['mbs']['requires']['testmodule'] = {
|
||||
'filtered_rpms': [],
|
||||
'ref': '620ec77321b2ea7b0d67d82992dda3e1d67055b4',
|
||||
'stream': 'master',
|
||||
'version': '20180205135154'
|
||||
xmd["mbs"]["requires"]["testmodule"] = {
|
||||
"filtered_rpms": [],
|
||||
"ref": "620ec77321b2ea7b0d67d82992dda3e1d67055b4",
|
||||
"stream": "master",
|
||||
"version": "20180205135154",
|
||||
}
|
||||
mmd.set_xmd(glib.dict_values(xmd))
|
||||
module.modulemd = to_text_type(mmd.dumps())
|
||||
module.name = 'testmodule2'
|
||||
module.name = "testmodule2"
|
||||
module.version = str(mmd.get_version())
|
||||
module.koji_tag = 'module-ae2adf69caf0e1b6'
|
||||
module.koji_tag = "module-ae2adf69caf0e1b6"
|
||||
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db')
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
|
||||
result = resolver.get_module_build_dependencies(
|
||||
'testmodule2', 'master', '20180123171545', 'c40c156c').keys()
|
||||
assert set(result) == set(['module-f28-build'])
|
||||
"testmodule2", "master", "20180123171545", "c40c156c").keys()
|
||||
assert set(result) == set(["module-f28-build"])
|
||||
|
||||
@patch("module_build_service.config.Config.system",
|
||||
new_callable=PropertyMock, return_value="test")
|
||||
@patch("module_build_service.config.Config.mock_resultsdir",
|
||||
new_callable=PropertyMock,
|
||||
return_value=os.path.join(base_dir, 'staged_data', "local_builds"))
|
||||
def test_get_module_build_dependencies_recursive_requires(
|
||||
self, resultdir, conf_system):
|
||||
@patch(
|
||||
"module_build_service.config.Config.system", new_callable=PropertyMock, return_value="test"
|
||||
)
|
||||
@patch(
|
||||
"module_build_service.config.Config.mock_resultsdir",
|
||||
new_callable=PropertyMock,
|
||||
return_value=os.path.join(base_dir, "staged_data", "local_builds"),
|
||||
)
|
||||
def test_get_module_build_dependencies_recursive_requires(self, resultdir, conf_system):
|
||||
"""
|
||||
Tests that it returns the requires of the buildrequires recursively
|
||||
"""
|
||||
with app.app_context():
|
||||
utils.load_local_builds(["platform", "parent", "child", "testmodule"])
|
||||
|
||||
build = models.ModuleBuild.local_modules(
|
||||
db.session, "child", "master")
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db')
|
||||
build = models.ModuleBuild.local_modules(db.session, "child", "master")
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
|
||||
result = resolver.get_module_build_dependencies(mmd=build[0].mmd()).keys()
|
||||
|
||||
local_path = os.path.join(base_dir, 'staged_data', "local_builds")
|
||||
local_path = os.path.join(base_dir, "staged_data", "local_builds")
|
||||
|
||||
expected = [
|
||||
os.path.join(
|
||||
local_path,
|
||||
'module-parent-master-20170816080815/results'),
|
||||
]
|
||||
expected = [os.path.join(local_path, "module-parent-master-20170816080815/results")]
|
||||
assert set(result) == set(expected)
|
||||
|
||||
def test_resolve_requires(self):
|
||||
build = models.ModuleBuild.query.get(2)
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db')
|
||||
result = resolver.resolve_requires([":".join([
|
||||
build.name, build.stream, build.version, build.context])])
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
|
||||
result = resolver.resolve_requires(
|
||||
[":".join([build.name, build.stream, build.version, build.context])]
|
||||
)
|
||||
|
||||
assert result == {
|
||||
'testmodule': {
|
||||
'stream': 'master', 'version': '20170109091357', 'context': u'78e4a6fd',
|
||||
'ref': 'ff1ea79fc952143efeed1851aa0aa006559239ba',
|
||||
'koji_tag': 'module-testmodule-master-20170109091357-78e4a6fd'
|
||||
}}
|
||||
"testmodule": {
|
||||
"stream": "master",
|
||||
"version": "20170109091357",
|
||||
"context": u"78e4a6fd",
|
||||
"ref": "ff1ea79fc952143efeed1851aa0aa006559239ba",
|
||||
"koji_tag": "module-testmodule-master-20170109091357-78e4a6fd",
|
||||
}
|
||||
}
|
||||
|
||||
def test_resolve_profiles(self):
|
||||
"""
|
||||
Tests that the profiles get resolved recursively
|
||||
"""
|
||||
mmd = models.ModuleBuild.query.get(2).mmd()
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db')
|
||||
result = resolver.resolve_profiles(mmd, ('buildroot', 'srpm-buildroot'))
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
|
||||
result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))
|
||||
expected = {
|
||||
'buildroot':
|
||||
set(['unzip', 'tar', 'cpio', 'gawk', 'gcc', 'xz', 'sed',
|
||||
'findutils', 'util-linux', 'bash', 'info', 'bzip2',
|
||||
'grep', 'redhat-rpm-config', 'fedora-release',
|
||||
'diffutils', 'make', 'patch', 'shadow-utils', 'coreutils',
|
||||
'which', 'rpm-build', 'gzip', 'gcc-c++']),
|
||||
'srpm-buildroot':
|
||||
set(['shadow-utils', 'redhat-rpm-config', 'rpm-build',
|
||||
'fedora-release', 'fedpkg-minimal', 'gnupg2',
|
||||
'bash'])
|
||||
"buildroot": set([
|
||||
"unzip",
|
||||
"tar",
|
||||
"cpio",
|
||||
"gawk",
|
||||
"gcc",
|
||||
"xz",
|
||||
"sed",
|
||||
"findutils",
|
||||
"util-linux",
|
||||
"bash",
|
||||
"info",
|
||||
"bzip2",
|
||||
"grep",
|
||||
"redhat-rpm-config",
|
||||
"fedora-release",
|
||||
"diffutils",
|
||||
"make",
|
||||
"patch",
|
||||
"shadow-utils",
|
||||
"coreutils",
|
||||
"which",
|
||||
"rpm-build",
|
||||
"gzip",
|
||||
"gcc-c++",
|
||||
]),
|
||||
"srpm-buildroot": set([
|
||||
"shadow-utils",
|
||||
"redhat-rpm-config",
|
||||
"rpm-build",
|
||||
"fedora-release",
|
||||
"fedpkg-minimal",
|
||||
"gnupg2",
|
||||
"bash",
|
||||
]),
|
||||
}
|
||||
assert result == expected
|
||||
|
||||
@patch("module_build_service.config.Config.system",
|
||||
new_callable=PropertyMock, return_value="test")
|
||||
@patch("module_build_service.config.Config.mock_resultsdir",
|
||||
new_callable=PropertyMock,
|
||||
return_value=os.path.join(base_dir, 'staged_data', "local_builds"))
|
||||
@patch(
|
||||
"module_build_service.config.Config.system", new_callable=PropertyMock, return_value="test"
|
||||
)
|
||||
@patch(
|
||||
"module_build_service.config.Config.mock_resultsdir",
|
||||
new_callable=PropertyMock,
|
||||
return_value=os.path.join(base_dir, "staged_data", "local_builds"),
|
||||
)
|
||||
def test_resolve_profiles_local_module(self, local_builds, conf_system):
|
||||
"""
|
||||
Test that profiles get resolved recursively on local builds
|
||||
"""
|
||||
with app.app_context():
|
||||
utils.load_local_builds(['platform'])
|
||||
utils.load_local_builds(["platform"])
|
||||
mmd = models.ModuleBuild.query.get(2).mmd()
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
result = resolver.resolve_profiles(mmd, ('buildroot', 'srpm-buildroot'))
|
||||
expected = {
|
||||
'buildroot':
|
||||
set(['foo']),
|
||||
'srpm-buildroot':
|
||||
set(['bar'])
|
||||
}
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
|
||||
result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))
|
||||
expected = {"buildroot": set(["foo"]), "srpm-buildroot": set(["bar"])}
|
||||
assert result == expected
|
||||
|
||||
def test_get_latest_with_virtual_stream(self):
|
||||
tests.init_data(1, multiple_stream_versions=True)
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db')
|
||||
mmd = resolver.get_latest_with_virtual_stream('platform', 'f29')
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
|
||||
mmd = resolver.get_latest_with_virtual_stream("platform", "f29")
|
||||
assert mmd
|
||||
assert mmd.get_stream() == 'f29.2.0'
|
||||
assert mmd.get_stream() == "f29.2.0"
|
||||
|
||||
def test_get_latest_with_virtual_stream_none(self):
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db')
|
||||
mmd = resolver.get_latest_with_virtual_stream('platform', 'doesnotexist')
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
|
||||
mmd = resolver.get_latest_with_virtual_stream("platform", "doesnotexist")
|
||||
assert not mmd
|
||||
|
||||
def test_get_module_count(self):
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db')
|
||||
count = resolver.get_module_count(name='platform', stream='f28')
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
|
||||
count = resolver.get_module_count(name="platform", stream="f28")
|
||||
assert count == 1
|
||||
|
||||
@@ -35,42 +35,41 @@ base_dir = os.path.join(os.path.dirname(__file__), "..")
|
||||
|
||||
|
||||
class TestLocalResolverModule:
|
||||
|
||||
def setup_method(self):
|
||||
tests.reuse_component_init_data()
|
||||
|
||||
def test_get_buildrequired_modulemds(self):
|
||||
mmd = load_mmd_file(os.path.join(base_dir, 'staged_data', 'platform.yaml'))
|
||||
mmd.set_stream('f8')
|
||||
mmd = load_mmd_file(os.path.join(base_dir, "staged_data", "platform.yaml"))
|
||||
mmd.set_stream("f8")
|
||||
import_mmd(db.session, mmd)
|
||||
platform_f8 = ModuleBuild.query.filter_by(stream='f8').one()
|
||||
platform_f8 = ModuleBuild.query.filter_by(stream="f8").one()
|
||||
mmd.set_name("testmodule")
|
||||
mmd.set_stream("master")
|
||||
mmd.set_version(20170109091357)
|
||||
mmd.set_context("123")
|
||||
build = ModuleBuild(
|
||||
name='testmodule',
|
||||
stream='master',
|
||||
name="testmodule",
|
||||
stream="master",
|
||||
version=20170109091357,
|
||||
state=5,
|
||||
build_context='dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3',
|
||||
runtime_context='ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7',
|
||||
context='7c29193d',
|
||||
koji_tag='module-testmodule-master-20170109091357-7c29193d',
|
||||
scmurl='https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79',
|
||||
build_context="dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3",
|
||||
runtime_context="ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7",
|
||||
context="7c29193d",
|
||||
koji_tag="module-testmodule-master-20170109091357-7c29193d",
|
||||
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79",
|
||||
batch=3,
|
||||
owner='Dr. Pepper',
|
||||
owner="Dr. Pepper",
|
||||
time_submitted=datetime(2018, 11, 15, 16, 8, 18),
|
||||
time_modified=datetime(2018, 11, 15, 16, 19, 35),
|
||||
rebuild_strategy='changed-and-after',
|
||||
modulemd=to_text_type(mmd.dumps())
|
||||
rebuild_strategy="changed-and-after",
|
||||
modulemd=to_text_type(mmd.dumps()),
|
||||
)
|
||||
db.session.add(build)
|
||||
db.session.commit()
|
||||
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='local')
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="local")
|
||||
result = resolver.get_buildrequired_modulemds(
|
||||
"testmodule", "master", platform_f8.mmd().dup_nsvc())
|
||||
nsvcs = set([m.dup_nsvc() for m in result])
|
||||
assert nsvcs == set(['testmodule:master:20170109091357:9c690d0e',
|
||||
'testmodule:master:20170109091357:123'])
|
||||
assert nsvcs == set(
|
||||
["testmodule:master:20170109091357:9c690d0e", "testmodule:master:20170109091357:123"])
|
||||
|
||||
@@ -33,7 +33,6 @@ base_dir = os.path.join(os.path.dirname(__file__), "..")
|
||||
|
||||
|
||||
class TestMBSModule:
|
||||
|
||||
@patch("requests.Session")
|
||||
def test_get_module_modulemds_nsvc(self, mock_session, testmodule_mmd_9c690d0e):
|
||||
""" Tests for querying a module from mbs """
|
||||
@@ -46,20 +45,20 @@ class TestMBSModule:
|
||||
"stream": "master",
|
||||
"version": "20180205135154",
|
||||
"context": "9c690d0e",
|
||||
"modulemd": testmodule_mmd_9c690d0e
|
||||
"modulemd": testmodule_mmd_9c690d0e,
|
||||
}
|
||||
],
|
||||
"meta": {"next": None}
|
||||
"meta": {"next": None},
|
||||
}
|
||||
|
||||
mock_session().get.return_value = mock_res
|
||||
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
|
||||
module_mmds = resolver.get_module_modulemds(
|
||||
'testmodule', 'master', '20180205135154', '9c690d0e', virtual_streams=["f28"])
|
||||
"testmodule", "master", "20180205135154", "9c690d0e", virtual_streams=["f28"]
|
||||
)
|
||||
nsvcs = set(
|
||||
'{}:{}:{}:{}'.format(m.peek_name(), m.peek_stream(),
|
||||
m.peek_version(), m.peek_context())
|
||||
"{}:{}:{}:{}".format(m.peek_name(), m.peek_stream(), m.peek_version(), m.peek_context())
|
||||
for m in module_mmds
|
||||
)
|
||||
expected = set(["testmodule:master:20180205135154:9c690d0e"])
|
||||
@@ -80,8 +79,9 @@ class TestMBSModule:
|
||||
assert nsvcs == expected
|
||||
|
||||
@patch("requests.Session")
|
||||
def test_get_module_modulemds_partial(self, mock_session, testmodule_mmd_9c690d0e,
|
||||
testmodule_mmd_c2c572ed):
|
||||
def test_get_module_modulemds_partial(
|
||||
self, mock_session, testmodule_mmd_9c690d0e, testmodule_mmd_c2c572ed
|
||||
):
|
||||
""" Test for querying MBS without the context of a module """
|
||||
|
||||
version = "20180205135154"
|
||||
@@ -95,29 +95,30 @@ class TestMBSModule:
|
||||
"stream": "master",
|
||||
"version": version,
|
||||
"context": "9c690d0e",
|
||||
"modulemd": testmodule_mmd_9c690d0e
|
||||
"modulemd": testmodule_mmd_9c690d0e,
|
||||
},
|
||||
{
|
||||
"name": "testmodule",
|
||||
"stream": "master",
|
||||
"version": version,
|
||||
"context": "c2c572ed",
|
||||
"modulemd": testmodule_mmd_c2c572ed
|
||||
}
|
||||
"modulemd": testmodule_mmd_c2c572ed,
|
||||
},
|
||||
],
|
||||
"meta": {"next": None}
|
||||
"meta": {"next": None},
|
||||
}
|
||||
|
||||
mock_session().get.return_value = mock_res
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
ret = resolver.get_module_modulemds('testmodule', 'master', version)
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
|
||||
ret = resolver.get_module_modulemds("testmodule", "master", version)
|
||||
nsvcs = set(
|
||||
'{}:{}:{}:{}'.format(m.peek_name(), m.peek_stream(),
|
||||
m.peek_version(), m.peek_context())
|
||||
"{}:{}:{}:{}".format(m.peek_name(), m.peek_stream(), m.peek_version(), m.peek_context())
|
||||
for m in ret
|
||||
)
|
||||
expected = set(["testmodule:master:20180205135154:9c690d0e",
|
||||
"testmodule:master:20180205135154:c2c572ed"])
|
||||
expected = set([
|
||||
"testmodule:master:20180205135154:9c690d0e",
|
||||
"testmodule:master:20180205135154:c2c572ed",
|
||||
])
|
||||
mbs_url = tests.conf.mbs_url
|
||||
expected_query = {
|
||||
"name": "testmodule",
|
||||
@@ -127,14 +128,15 @@ class TestMBSModule:
|
||||
"order_desc_by": "version",
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"state": "ready"
|
||||
"state": "ready",
|
||||
}
|
||||
mock_session().get.assert_called_once_with(mbs_url, params=expected_query)
|
||||
assert nsvcs == expected
|
||||
|
||||
@patch("requests.Session")
|
||||
def test_get_module_build_dependencies(self, mock_session, platform_mmd,
|
||||
testmodule_mmd_9c690d0e):
|
||||
def test_get_module_build_dependencies(
|
||||
self, mock_session, platform_mmd, testmodule_mmd_9c690d0e
|
||||
):
|
||||
"""
|
||||
Tests that we return just direct build-time dependencies of testmodule.
|
||||
"""
|
||||
@@ -148,11 +150,12 @@ class TestMBSModule:
|
||||
"stream": "master",
|
||||
"version": "20180205135154",
|
||||
"context": "9c690d0e",
|
||||
"modulemd": testmodule_mmd_9c690d0e
|
||||
"modulemd": testmodule_mmd_9c690d0e,
|
||||
}
|
||||
],
|
||||
"meta": {"next": None}
|
||||
}, {
|
||||
"meta": {"next": None},
|
||||
},
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"name": "platform",
|
||||
@@ -160,57 +163,63 @@ class TestMBSModule:
|
||||
"version": "3",
|
||||
"context": "00000000",
|
||||
"modulemd": platform_mmd,
|
||||
"koji_tag": "module-f28-build"
|
||||
"koji_tag": "module-f28-build",
|
||||
}
|
||||
],
|
||||
"meta": {"next": None}
|
||||
}
|
||||
"meta": {"next": None},
|
||||
},
|
||||
]
|
||||
|
||||
mock_session().get.return_value = mock_res
|
||||
expected = set(['module-f28-build'])
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
expected = set(["module-f28-build"])
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
|
||||
result = resolver.get_module_build_dependencies(
|
||||
'testmodule', 'master', '20180205135154', '9c690d0e').keys()
|
||||
"testmodule", "master", "20180205135154", "9c690d0e").keys()
|
||||
|
||||
expected_queries = [{
|
||||
"name": "testmodule",
|
||||
"stream": "master",
|
||||
"version": "20180205135154",
|
||||
"context": "9c690d0e",
|
||||
"verbose": True,
|
||||
"order_desc_by": "version",
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"state": "ready"
|
||||
}, {
|
||||
"name": "platform",
|
||||
"stream": "f28",
|
||||
"version": "3",
|
||||
"context": "00000000",
|
||||
"verbose": True,
|
||||
"order_desc_by": "version",
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"state": "ready"
|
||||
}]
|
||||
expected_queries = [
|
||||
{
|
||||
"name": "testmodule",
|
||||
"stream": "master",
|
||||
"version": "20180205135154",
|
||||
"context": "9c690d0e",
|
||||
"verbose": True,
|
||||
"order_desc_by": "version",
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"state": "ready",
|
||||
},
|
||||
{
|
||||
"name": "platform",
|
||||
"stream": "f28",
|
||||
"version": "3",
|
||||
"context": "00000000",
|
||||
"verbose": True,
|
||||
"order_desc_by": "version",
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"state": "ready",
|
||||
},
|
||||
]
|
||||
|
||||
mbs_url = tests.conf.mbs_url
|
||||
expected_calls = [call(mbs_url, params=expected_queries[0]),
|
||||
call(mbs_url, params=expected_queries[1])]
|
||||
expected_calls = [
|
||||
call(mbs_url, params=expected_queries[0]),
|
||||
call(mbs_url, params=expected_queries[1]),
|
||||
]
|
||||
mock_session().get.mock_calls = expected_calls
|
||||
assert mock_session().get.call_count == 2
|
||||
assert set(result) == expected
|
||||
|
||||
@patch("requests.Session")
|
||||
def test_get_module_build_dependencies_empty_buildrequires(self, mock_session,
|
||||
testmodule_mmd_9c690d0e):
|
||||
def test_get_module_build_dependencies_empty_buildrequires(
|
||||
self, mock_session, testmodule_mmd_9c690d0e
|
||||
):
|
||||
|
||||
mmd = module_build_service.utils.load_mmd(testmodule_mmd_9c690d0e)
|
||||
# Wipe out the dependencies
|
||||
mmd.set_dependencies()
|
||||
xmd = glib.from_variant_dict(mmd.get_xmd())
|
||||
xmd['mbs']['buildrequires'] = {}
|
||||
xmd["mbs"]["buildrequires"] = {}
|
||||
mmd.set_xmd(glib.dict_values(xmd))
|
||||
|
||||
mock_res = Mock()
|
||||
@@ -224,10 +233,10 @@ class TestMBSModule:
|
||||
"version": "20180205135154",
|
||||
"context": "9c690d0e",
|
||||
"modulemd": mmd.dumps(),
|
||||
"build_deps": []
|
||||
"build_deps": [],
|
||||
}
|
||||
],
|
||||
"meta": {"next": None}
|
||||
"meta": {"next": None},
|
||||
}
|
||||
]
|
||||
|
||||
@@ -235,9 +244,10 @@ class TestMBSModule:
|
||||
|
||||
expected = set()
|
||||
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
|
||||
result = resolver.get_module_build_dependencies(
|
||||
'testmodule', 'master', '20180205135154', '9c690d0e').keys()
|
||||
"testmodule", "master", "20180205135154", "9c690d0e"
|
||||
).keys()
|
||||
mbs_url = tests.conf.mbs_url
|
||||
expected_query = {
|
||||
"name": "testmodule",
|
||||
@@ -248,7 +258,7 @@ class TestMBSModule:
|
||||
"order_desc_by": "version",
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"state": "ready"
|
||||
"state": "ready",
|
||||
}
|
||||
mock_session().get.assert_called_once_with(mbs_url, params=expected_query)
|
||||
assert set(result) == expected
|
||||
@@ -265,27 +275,53 @@ class TestMBSModule:
|
||||
"stream": "f28",
|
||||
"version": "3",
|
||||
"context": "00000000",
|
||||
"modulemd": platform_mmd
|
||||
"modulemd": platform_mmd,
|
||||
}
|
||||
],
|
||||
"meta": {"next": None}
|
||||
"meta": {"next": None},
|
||||
}
|
||||
|
||||
mock_session().get.return_value = mock_res
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
result = resolver.resolve_profiles(formatted_testmodule_mmd,
|
||||
('buildroot', 'srpm-buildroot'))
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
|
||||
result = resolver.resolve_profiles(
|
||||
formatted_testmodule_mmd, ("buildroot", "srpm-buildroot")
|
||||
)
|
||||
expected = {
|
||||
'buildroot':
|
||||
set(['unzip', 'tar', 'cpio', 'gawk', 'gcc', 'xz', 'sed',
|
||||
'findutils', 'util-linux', 'bash', 'info', 'bzip2',
|
||||
'grep', 'redhat-rpm-config', 'fedora-release',
|
||||
'diffutils', 'make', 'patch', 'shadow-utils', 'coreutils',
|
||||
'which', 'rpm-build', 'gzip', 'gcc-c++']),
|
||||
'srpm-buildroot':
|
||||
set(['shadow-utils', 'redhat-rpm-config', 'rpm-build',
|
||||
'fedora-release', 'fedpkg-minimal', 'gnupg2',
|
||||
'bash'])
|
||||
"buildroot": set([
|
||||
"unzip",
|
||||
"tar",
|
||||
"cpio",
|
||||
"gawk",
|
||||
"gcc",
|
||||
"xz",
|
||||
"sed",
|
||||
"findutils",
|
||||
"util-linux",
|
||||
"bash",
|
||||
"info",
|
||||
"bzip2",
|
||||
"grep",
|
||||
"redhat-rpm-config",
|
||||
"fedora-release",
|
||||
"diffutils",
|
||||
"make",
|
||||
"patch",
|
||||
"shadow-utils",
|
||||
"coreutils",
|
||||
"which",
|
||||
"rpm-build",
|
||||
"gzip",
|
||||
"gcc-c++",
|
||||
]),
|
||||
"srpm-buildroot": set([
|
||||
"shadow-utils",
|
||||
"redhat-rpm-config",
|
||||
"rpm-build",
|
||||
"fedora-release",
|
||||
"fedpkg-minimal",
|
||||
"gnupg2",
|
||||
"bash",
|
||||
]),
|
||||
}
|
||||
|
||||
mbs_url = tests.conf.mbs_url
|
||||
@@ -298,92 +334,84 @@ class TestMBSModule:
|
||||
"order_desc_by": "version",
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"state": "ready"
|
||||
"state": "ready",
|
||||
}
|
||||
|
||||
mock_session().get.assert_called_once_with(mbs_url, params=expected_query)
|
||||
assert result == expected
|
||||
|
||||
@patch("module_build_service.config.Config.system",
|
||||
new_callable=PropertyMock, return_value="test")
|
||||
@patch("module_build_service.config.Config.mock_resultsdir",
|
||||
new_callable=PropertyMock,
|
||||
return_value=os.path.join(base_dir, 'staged_data', "local_builds"))
|
||||
def test_resolve_profiles_local_module(self, local_builds, conf_system,
|
||||
formatted_testmodule_mmd):
|
||||
@patch(
|
||||
"module_build_service.config.Config.system", new_callable=PropertyMock, return_value="test"
|
||||
)
|
||||
@patch(
|
||||
"module_build_service.config.Config.mock_resultsdir",
|
||||
new_callable=PropertyMock,
|
||||
return_value=os.path.join(base_dir, "staged_data", "local_builds"),
|
||||
)
|
||||
def test_resolve_profiles_local_module(
|
||||
self, local_builds, conf_system, formatted_testmodule_mmd
|
||||
):
|
||||
tests.clean_database()
|
||||
with app.app_context():
|
||||
module_build_service.utils.load_local_builds(['platform'])
|
||||
module_build_service.utils.load_local_builds(["platform"])
|
||||
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
result = resolver.resolve_profiles(formatted_testmodule_mmd,
|
||||
('buildroot', 'srpm-buildroot'))
|
||||
expected = {
|
||||
'buildroot':
|
||||
set(['foo']),
|
||||
'srpm-buildroot':
|
||||
set(['bar'])
|
||||
}
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
|
||||
result = resolver.resolve_profiles(
|
||||
formatted_testmodule_mmd, ("buildroot", "srpm-buildroot"))
|
||||
expected = {"buildroot": set(["foo"]), "srpm-buildroot": set(["bar"])}
|
||||
assert result == expected
|
||||
|
||||
def test_get_empty_buildrequired_modulemds(self):
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
|
||||
|
||||
with patch.object(resolver, 'session') as session:
|
||||
with patch.object(resolver, "session") as session:
|
||||
session.get.return_value = Mock(ok=True)
|
||||
session.get.return_value.json.return_value = {
|
||||
'items': [], 'meta': {'next': None}
|
||||
}
|
||||
session.get.return_value.json.return_value = {"items": [], "meta": {"next": None}}
|
||||
|
||||
result = resolver.get_buildrequired_modulemds(
|
||||
'nodejs', '10', 'platform:el8:1:00000000')
|
||||
result = resolver.get_buildrequired_modulemds("nodejs", "10", "platform:el8:1:00000000")
|
||||
assert [] == result
|
||||
|
||||
def test_get_buildrequired_modulemds(self):
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
|
||||
|
||||
with patch.object(resolver, 'session') as session:
|
||||
with patch.object(resolver, "session") as session:
|
||||
session.get.return_value = Mock(ok=True)
|
||||
session.get.return_value.json.return_value = {
|
||||
'items': [{
|
||||
'name': 'nodejs', 'stream': '10',
|
||||
'version': 1, 'context': 'c1',
|
||||
'modulemd': tests.make_module(
|
||||
'nodejs:10:1:c1', store_to_db=False).dumps(),
|
||||
}, {
|
||||
'name': 'nodejs', 'stream': '10',
|
||||
'version': 2, 'context': 'c1',
|
||||
'modulemd': tests.make_module(
|
||||
'nodejs:10:2:c1', store_to_db=False).dumps(),
|
||||
}], 'meta': {'next': None}
|
||||
"items": [
|
||||
{
|
||||
"name": "nodejs",
|
||||
"stream": "10",
|
||||
"version": 1,
|
||||
"context": "c1",
|
||||
"modulemd": tests.make_module("nodejs:10:1:c1", store_to_db=False).dumps(),
|
||||
},
|
||||
{
|
||||
"name": "nodejs",
|
||||
"stream": "10",
|
||||
"version": 2,
|
||||
"context": "c1",
|
||||
"modulemd": tests.make_module("nodejs:10:2:c1", store_to_db=False).dumps(),
|
||||
},
|
||||
],
|
||||
"meta": {"next": None},
|
||||
}
|
||||
|
||||
result = resolver.get_buildrequired_modulemds(
|
||||
'nodejs', '10', 'platform:el8:1:00000000')
|
||||
result = resolver.get_buildrequired_modulemds("nodejs", "10", "platform:el8:1:00000000")
|
||||
|
||||
assert 1 == len(result)
|
||||
mmd = result[0]
|
||||
assert 'nodejs' == mmd.get_name()
|
||||
assert '10' == mmd.get_stream()
|
||||
assert "nodejs" == mmd.get_name()
|
||||
assert "10" == mmd.get_stream()
|
||||
assert 1 == mmd.get_version()
|
||||
assert 'c1' == mmd.get_context()
|
||||
assert "c1" == mmd.get_context()
|
||||
|
||||
@patch("requests.Session")
|
||||
def test_get_module_count(self, mock_session):
|
||||
mock_res = Mock()
|
||||
mock_res.ok.return_value = True
|
||||
mock_res.json.return_value = {
|
||||
"items": [
|
||||
{
|
||||
"name": "platform",
|
||||
"stream": "f28",
|
||||
"version": "3",
|
||||
"context": "00000000",
|
||||
}
|
||||
],
|
||||
"meta": {
|
||||
"total": 5
|
||||
}
|
||||
"items": [{"name": "platform", "stream": "f28", "version": "3", "context": "00000000"}],
|
||||
"meta": {"total": 5},
|
||||
}
|
||||
mock_session.return_value.get.return_value = mock_res
|
||||
|
||||
@@ -393,13 +421,7 @@ class TestMBSModule:
|
||||
assert count == 5
|
||||
mock_session.return_value.get.assert_called_once_with(
|
||||
"https://mbs.fedoraproject.org/module-build-service/1/module-builds/",
|
||||
params={
|
||||
"name": "platform",
|
||||
"page": 1,
|
||||
"per_page": 1,
|
||||
"short": True,
|
||||
"stream": "f28",
|
||||
}
|
||||
params={"name": "platform", "page": 1, "per_page": 1, "short": True, "stream": "f28"},
|
||||
)
|
||||
|
||||
@patch("requests.Session")
|
||||
@@ -416,9 +438,7 @@ class TestMBSModule:
|
||||
"version": "3",
|
||||
}
|
||||
],
|
||||
"meta": {
|
||||
"total": 5
|
||||
}
|
||||
"meta": {"total": 5},
|
||||
}
|
||||
mock_session.return_value.get.return_value = mock_res
|
||||
|
||||
@@ -435,5 +455,5 @@ class TestMBSModule:
|
||||
"per_page": 1,
|
||||
"verbose": True,
|
||||
"virtual_stream": "virtualf28",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@@ -24,7 +24,6 @@ from module_build_service.messaging import KojiTagChange, KojiRepoChange
|
||||
|
||||
|
||||
class TestConsumer:
|
||||
|
||||
def test_get_abstracted_msg_fedmsg(self):
|
||||
"""
|
||||
Test the output of get_abstracted_msg() when using the
|
||||
@@ -51,17 +50,17 @@ class TestConsumer:
|
||||
"user": "bodhi",
|
||||
"version": "15.1.0",
|
||||
"owner": "orion",
|
||||
"release": "1.el7"
|
||||
}
|
||||
"release": "1.el7",
|
||||
},
|
||||
}
|
||||
msg_obj = consumer.get_abstracted_msg(msg)
|
||||
assert isinstance(msg_obj, KojiTagChange)
|
||||
assert msg_obj.msg_id == msg['msg_id']
|
||||
assert msg_obj.tag == msg['msg']['tag']
|
||||
assert msg_obj.artifact == msg['msg']['name']
|
||||
assert msg_obj.msg_id == msg["msg_id"]
|
||||
assert msg_obj.tag == msg["msg"]["tag"]
|
||||
assert msg_obj.artifact == msg["msg"]["name"]
|
||||
|
||||
@patch('module_build_service.scheduler.consumer.models')
|
||||
@patch.object(MBSConsumer, 'process_message')
|
||||
@patch("module_build_service.scheduler.consumer.models")
|
||||
@patch.object(MBSConsumer, "process_message")
|
||||
def test_consume_fedmsg(self, process_message, models):
|
||||
"""
|
||||
Test the MBSConsumer.consume() method when using the
|
||||
@@ -86,13 +85,13 @@ class TestConsumer:
|
||||
"instance": "primary",
|
||||
"repo_id": 400859,
|
||||
"tag": "f22-build",
|
||||
"tag_id": 278
|
||||
}
|
||||
}
|
||||
"tag_id": 278,
|
||||
},
|
||||
},
|
||||
}
|
||||
consumer.consume(msg)
|
||||
assert process_message.call_count == 1
|
||||
msg_obj = process_message.call_args[0][1]
|
||||
assert isinstance(msg_obj, KojiRepoChange)
|
||||
assert msg_obj.msg_id == msg['body']['msg_id']
|
||||
assert msg_obj.repo_tag == msg['body']['msg']['tag']
|
||||
assert msg_obj.msg_id == msg["body"]["msg_id"]
|
||||
assert msg_obj.repo_tag == msg["body"]["msg"]["tag"]
|
||||
|
||||
@@ -39,56 +39,50 @@ class TestGetCorrespondingModuleBuild:
|
||||
def setup_method(self, method):
|
||||
clean_database()
|
||||
|
||||
@patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession')
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_module_build_nvr_does_not_exist_in_koji(self, ClientSession):
|
||||
ClientSession.return_value.getBuild.return_value = None
|
||||
|
||||
assert get_corresponding_module_build('n-v-r') is None
|
||||
assert get_corresponding_module_build("n-v-r") is None
|
||||
|
||||
@pytest.mark.parametrize('build_info', [
|
||||
# Build info does not have key extra
|
||||
{'id': 1000, 'name': 'ed'},
|
||||
# Build info contains key extra, but it is not for the module build
|
||||
{
|
||||
'extra': {'submitter': 'osbs', 'image': {}}
|
||||
},
|
||||
# Key module_build_service_id is missing
|
||||
{
|
||||
'extra': {'typeinfo': {'module': {}}}
|
||||
}
|
||||
])
|
||||
@patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession')
|
||||
@pytest.mark.parametrize(
|
||||
"build_info",
|
||||
[
|
||||
# Build info does not have key extra
|
||||
{"id": 1000, "name": "ed"},
|
||||
# Build info contains key extra, but it is not for the module build
|
||||
{"extra": {"submitter": "osbs", "image": {}}},
|
||||
# Key module_build_service_id is missing
|
||||
{"extra": {"typeinfo": {"module": {}}}},
|
||||
],
|
||||
)
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_cannot_find_module_build_id_from_build_info(self, ClientSession, build_info):
|
||||
ClientSession.return_value.getBuild.return_value = build_info
|
||||
|
||||
assert get_corresponding_module_build('n-v-r') is None
|
||||
assert get_corresponding_module_build("n-v-r") is None
|
||||
|
||||
@patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession')
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_corresponding_module_build_id_does_not_exist_in_db(self, ClientSession):
|
||||
fake_module_build_id, = db.session.query(func.max(ModuleBuild.id)).first()
|
||||
|
||||
ClientSession.return_value.getBuild.return_value = {
|
||||
'extra': {'typeinfo': {'module': {
|
||||
'module_build_service_id': fake_module_build_id + 1
|
||||
}}}
|
||||
"extra": {"typeinfo": {"module": {"module_build_service_id": fake_module_build_id + 1}}}
|
||||
}
|
||||
|
||||
assert get_corresponding_module_build('n-v-r') is None
|
||||
assert get_corresponding_module_build("n-v-r") is None
|
||||
|
||||
@patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession')
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_find_the_module_build(self, ClientSession):
|
||||
expected_module_build = (
|
||||
db.session.query(ModuleBuild)
|
||||
.filter(ModuleBuild.name == 'platform').first()
|
||||
db.session.query(ModuleBuild).filter(ModuleBuild.name == "platform").first()
|
||||
)
|
||||
|
||||
ClientSession.return_value.getBuild.return_value = {
|
||||
'extra': {'typeinfo': {'module': {
|
||||
'module_build_service_id': expected_module_build.id
|
||||
}}}
|
||||
"extra": {"typeinfo": {"module": {"module_build_service_id": expected_module_build.id}}}
|
||||
}
|
||||
|
||||
build = get_corresponding_module_build('n-v-r')
|
||||
build = get_corresponding_module_build("n-v-r")
|
||||
|
||||
assert expected_module_build.id == build.id
|
||||
assert expected_module_build.name == build.name
|
||||
@@ -97,82 +91,80 @@ class TestGetCorrespondingModuleBuild:
|
||||
class TestDecisionUpdateHandler:
|
||||
"""Test handler decision_update"""
|
||||
|
||||
@patch('module_build_service.scheduler.handlers.greenwave.log')
|
||||
@patch("module_build_service.scheduler.handlers.greenwave.log")
|
||||
def test_decision_context_is_not_match(self, log):
|
||||
msg = Mock(msg_id='msg-id-1',
|
||||
decision_context='bodhi_update_push_testing')
|
||||
msg = Mock(msg_id="msg-id-1", decision_context="bodhi_update_push_testing")
|
||||
decision_update(conf, db.session, msg)
|
||||
log.debug.assert_called_once_with(
|
||||
'Skip Greenwave message %s as MBS only handles messages with the decision context "%s"',
|
||||
'msg-id-1', 'osci_compose_gate_modules'
|
||||
"msg-id-1",
|
||||
"osci_compose_gate_modules",
|
||||
)
|
||||
|
||||
@patch('module_build_service.scheduler.handlers.greenwave.log')
|
||||
@patch("module_build_service.scheduler.handlers.greenwave.log")
|
||||
def test_not_satisfy_policies(self, log):
|
||||
msg = Mock(msg_id='msg-id-1',
|
||||
decision_context='osci_compose_gate_modules',
|
||||
policies_satisfied=False,
|
||||
subject_identifier='pkg-0.1-1.c1')
|
||||
msg = Mock(
|
||||
msg_id="msg-id-1",
|
||||
decision_context="osci_compose_gate_modules",
|
||||
policies_satisfied=False,
|
||||
subject_identifier="pkg-0.1-1.c1",
|
||||
)
|
||||
decision_update(conf, db.session, msg)
|
||||
log.debug.assert_called_once_with(
|
||||
'Skip to handle module build %s because it has not satisfied '
|
||||
'Greenwave policies.',
|
||||
msg.subject_identifier
|
||||
"Skip to handle module build %s because it has not satisfied Greenwave policies.",
|
||||
msg.subject_identifier,
|
||||
)
|
||||
|
||||
@patch('module_build_service.messaging.publish')
|
||||
@patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession')
|
||||
@patch("module_build_service.messaging.publish")
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_transform_from_done_to_ready(self, ClientSession, publish):
|
||||
clean_database()
|
||||
|
||||
# This build should be queried and transformed to ready state
|
||||
module_build = make_module('pkg:0.1:1:c1', requires_list={'platform': 'el8'})
|
||||
module_build = make_module("pkg:0.1:1:c1", requires_list={"platform": "el8"})
|
||||
module_build.transition(
|
||||
conf, BUILD_STATES['done'], 'Move to done directly for running test.')
|
||||
conf, BUILD_STATES["done"], "Move to done directly for running test."
|
||||
)
|
||||
|
||||
# Assert this call below
|
||||
first_publish_call = call(
|
||||
service='mbs',
|
||||
topic='module.state.change',
|
||||
service="mbs",
|
||||
topic="module.state.change",
|
||||
msg=module_build.json(show_tasks=False),
|
||||
conf=conf
|
||||
conf=conf,
|
||||
)
|
||||
|
||||
db.session.refresh(module_build)
|
||||
|
||||
ClientSession.return_value.getBuild.return_value = {
|
||||
'extra': {'typeinfo': {'module': {
|
||||
'module_build_service_id': module_build.id
|
||||
}}}
|
||||
"extra": {"typeinfo": {"module": {"module_build_service_id": module_build.id}}}
|
||||
}
|
||||
|
||||
msg = {
|
||||
'msg_id': 'msg-id-1',
|
||||
'topic': 'org.fedoraproject.prod.greenwave.decision.update',
|
||||
'msg': {
|
||||
'decision_context': 'osci_compose_gate_modules',
|
||||
'policies_satisfied': True,
|
||||
'subject_identifier': 'pkg-0.1-1.c1'
|
||||
}
|
||||
"msg_id": "msg-id-1",
|
||||
"topic": "org.fedoraproject.prod.greenwave.decision.update",
|
||||
"msg": {
|
||||
"decision_context": "osci_compose_gate_modules",
|
||||
"policies_satisfied": True,
|
||||
"subject_identifier": "pkg-0.1-1.c1",
|
||||
},
|
||||
}
|
||||
hub = Mock(config={
|
||||
'validate_signatures': False
|
||||
})
|
||||
hub = Mock(config={"validate_signatures": False})
|
||||
consumer = MBSConsumer(hub)
|
||||
consumer.consume(msg)
|
||||
|
||||
# Load module build again to check its state is moved correctly
|
||||
module_build = (
|
||||
db.session.query(ModuleBuild)
|
||||
.filter(ModuleBuild.id == module_build.id).first()
|
||||
)
|
||||
db.session.query(ModuleBuild).filter(ModuleBuild.id == module_build.id).first())
|
||||
|
||||
assert BUILD_STATES['ready'] == module_build.state
|
||||
assert BUILD_STATES["ready"] == module_build.state
|
||||
|
||||
publish.assert_has_calls([
|
||||
first_publish_call,
|
||||
call(service='mbs',
|
||||
topic='module.state.change',
|
||||
msg=module_build.json(show_tasks=False),
|
||||
conf=conf),
|
||||
call(
|
||||
service="mbs",
|
||||
topic="module.state.change",
|
||||
msg=module_build.json(show_tasks=False),
|
||||
conf=conf,
|
||||
),
|
||||
])
|
||||
|
||||
@@ -35,20 +35,16 @@ from module_build_service.models import make_session, ModuleBuild, ComponentBuil
|
||||
|
||||
|
||||
class TestModuleInit:
|
||||
|
||||
def setup_method(self, test_method):
|
||||
self.fn = module_build_service.scheduler.handlers.modules.init
|
||||
self.staged_data_dir = os.path.join(
|
||||
os.path.dirname(__file__), '../', 'staged_data')
|
||||
testmodule_yml_path = os.path.join(
|
||||
self.staged_data_dir, 'testmodule_init.yaml')
|
||||
with open(testmodule_yml_path, 'r') as f:
|
||||
self.staged_data_dir = os.path.join(os.path.dirname(__file__), "../", "staged_data")
|
||||
testmodule_yml_path = os.path.join(self.staged_data_dir, "testmodule_init.yaml")
|
||||
with open(testmodule_yml_path, "r") as f:
|
||||
yaml = to_text_type(f.read())
|
||||
scmurl = 'git://pkgs.domain.local/modules/testmodule?#620ec77'
|
||||
scmurl = "git://pkgs.domain.local/modules/testmodule?#620ec77"
|
||||
clean_database()
|
||||
with make_session(conf) as session:
|
||||
ModuleBuild.create(
|
||||
session, conf, 'testmodule', '1', 3, yaml, scmurl, 'mprahl')
|
||||
ModuleBuild.create(session, conf, "testmodule", "1", 3, yaml, scmurl, "mprahl")
|
||||
|
||||
def teardown_method(self, test_method):
|
||||
try:
|
||||
@@ -57,13 +53,19 @@ class TestModuleInit:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder."
|
||||
"get_built_rpms_in_module_build")
|
||||
@patch('module_build_service.scm.SCM')
|
||||
@patch('module_build_service.scheduler.handlers.modules.handle_stream_collision_modules')
|
||||
@patch(
|
||||
"module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder."
|
||||
"get_built_rpms_in_module_build"
|
||||
)
|
||||
@patch("module_build_service.scm.SCM")
|
||||
@patch("module_build_service.scheduler.handlers.modules.handle_stream_collision_modules")
|
||||
def test_init_basic(self, rscm, mocked_scm, built_rpms):
|
||||
FakeSCM(mocked_scm, 'testmodule', 'testmodule_init.yaml',
|
||||
'620ec77321b2ea7b0d67d82992dda3e1d67055b4')
|
||||
FakeSCM(
|
||||
mocked_scm,
|
||||
"testmodule",
|
||||
"testmodule_init.yaml",
|
||||
"620ec77321b2ea7b0d67d82992dda3e1d67055b4",
|
||||
)
|
||||
|
||||
built_rpms.return_value = [
|
||||
"foo-0:2.4.48-3.el8+1308+551bfa71",
|
||||
@@ -71,7 +73,8 @@ class TestModuleInit:
|
||||
"bar-0:2.5.48-3.el8+1308+551bfa71",
|
||||
"bar-debuginfo-0:2.5.48-3.el8+1308+551bfa71",
|
||||
"x-0:2.5.48-3.el8+1308+551bfa71",
|
||||
"x-debuginfo-0:2.5.48-3.el8+1308+551bfa71"]
|
||||
"x-debuginfo-0:2.5.48-3.el8+1308+551bfa71",
|
||||
]
|
||||
|
||||
platform_build = ModuleBuild.query.get(1)
|
||||
mmd = platform_build.mmd()
|
||||
@@ -83,7 +86,8 @@ class TestModuleInit:
|
||||
db.session.commit()
|
||||
|
||||
msg = module_build_service.messaging.MBSModule(
|
||||
msg_id=None, module_build_id=2, module_build_state='init')
|
||||
msg_id=None, module_build_id=2, module_build_state="init"
|
||||
)
|
||||
|
||||
with make_session(conf) as session:
|
||||
self.fn(config=conf, session=session, msg=msg)
|
||||
@@ -91,10 +95,12 @@ class TestModuleInit:
|
||||
# Make sure the module entered the wait state
|
||||
assert build.state == 1, build.state
|
||||
# Make sure format_mmd was run properly
|
||||
xmd_mbs = build.mmd().get_xmd()['mbs']
|
||||
xmd_mbs = build.mmd().get_xmd()["mbs"]
|
||||
assert type(xmd_mbs) is GLib.Variant
|
||||
assert xmd_mbs["buildrequires"]["platform"]["filtered_rpms"] == [
|
||||
'foo-0:2.4.48-3.el8+1308+551bfa71', 'bar-0:2.5.48-3.el8+1308+551bfa71']
|
||||
"foo-0:2.4.48-3.el8+1308+551bfa71",
|
||||
"bar-0:2.5.48-3.el8+1308+551bfa71",
|
||||
]
|
||||
return build
|
||||
|
||||
def test_init_called_twice(self):
|
||||
@@ -113,16 +119,16 @@ class TestModuleInit:
|
||||
new_mmd = yaml.safe_load(build.modulemd)
|
||||
assert old_mmd == new_mmd
|
||||
|
||||
@patch('module_build_service.scm.SCM')
|
||||
@patch("module_build_service.scm.SCM")
|
||||
def test_init_scm_not_available(self, mocked_scm):
|
||||
def mocked_scm_get_latest():
|
||||
raise RuntimeError("Failed in mocked_scm_get_latest")
|
||||
|
||||
FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml',
|
||||
'620ec77321b2ea7b0d67d82992dda3e1d67055b4')
|
||||
FakeSCM(
|
||||
mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")
|
||||
mocked_scm.return_value.get_latest = mocked_scm_get_latest
|
||||
msg = module_build_service.messaging.MBSModule(
|
||||
msg_id=None, module_build_id=2, module_build_state='init')
|
||||
msg_id=None, module_build_id=2, module_build_state="init")
|
||||
with make_session(conf) as session:
|
||||
self.fn(config=conf, session=session, msg=msg)
|
||||
build = ModuleBuild.query.filter_by(id=2).one()
|
||||
@@ -130,51 +136,56 @@ class TestModuleInit:
|
||||
# since the git server is not available
|
||||
assert build.state == 4, build.state
|
||||
|
||||
@patch("module_build_service.config.Config.modules_allow_repository",
|
||||
new_callable=PropertyMock, return_value=True)
|
||||
@patch('module_build_service.scm.SCM')
|
||||
@patch(
|
||||
"module_build_service.config.Config.modules_allow_repository",
|
||||
new_callable=PropertyMock,
|
||||
return_value=True,
|
||||
)
|
||||
@patch("module_build_service.scm.SCM")
|
||||
def test_init_includedmodule(self, mocked_scm, mocked_mod_allow_repo):
|
||||
FakeSCM(mocked_scm, "includedmodules", ['testmodule_init.yaml'])
|
||||
includedmodules_yml_path = os.path.join(
|
||||
self.staged_data_dir, 'includedmodules.yaml')
|
||||
with open(includedmodules_yml_path, 'r') as f:
|
||||
FakeSCM(mocked_scm, "includedmodules", ["testmodule_init.yaml"])
|
||||
includedmodules_yml_path = os.path.join(self.staged_data_dir, "includedmodules.yaml")
|
||||
with open(includedmodules_yml_path, "r") as f:
|
||||
yaml = to_text_type(f.read())
|
||||
scmurl = 'git://pkgs.domain.local/modules/includedmodule?#da95886'
|
||||
scmurl = "git://pkgs.domain.local/modules/includedmodule?#da95886"
|
||||
with make_session(conf) as session:
|
||||
ModuleBuild.create(
|
||||
session, conf, 'includemodule', '1', 3, yaml, scmurl, 'mprahl')
|
||||
ModuleBuild.create(session, conf, "includemodule", "1", 3, yaml, scmurl, "mprahl")
|
||||
msg = module_build_service.messaging.MBSModule(
|
||||
msg_id=None, module_build_id=3, module_build_state='init')
|
||||
msg_id=None, module_build_id=3, module_build_state="init")
|
||||
self.fn(config=conf, session=session, msg=msg)
|
||||
build = ModuleBuild.query.filter_by(id=3).one()
|
||||
assert build.state == 1
|
||||
assert build.name == 'includemodule'
|
||||
assert build.name == "includemodule"
|
||||
batches = {}
|
||||
for comp_build in ComponentBuild.query.filter_by(module_id=3).all():
|
||||
batches[comp_build.package] = comp_build.batch
|
||||
assert batches['perl-List-Compare'] == 2
|
||||
assert batches['perl-Tangerine'] == 2
|
||||
assert batches['foo'] == 2
|
||||
assert batches['tangerine'] == 3
|
||||
assert batches['file'] == 4
|
||||
assert batches["perl-List-Compare"] == 2
|
||||
assert batches["perl-Tangerine"] == 2
|
||||
assert batches["foo"] == 2
|
||||
assert batches["tangerine"] == 3
|
||||
assert batches["file"] == 4
|
||||
# Test that the RPMs are properly merged in xmd
|
||||
xmd_rpms = {
|
||||
'perl-List-Compare': {'ref': '4f26aeafdb'},
|
||||
'perl-Tangerine': {'ref': '4f26aeafdb'},
|
||||
'tangerine': {'ref': '4f26aeafdb'},
|
||||
'foo': {'ref': '93dea37599'},
|
||||
'file': {'ref': 'a2740663f8'},
|
||||
"perl-List-Compare": {"ref": "4f26aeafdb"},
|
||||
"perl-Tangerine": {"ref": "4f26aeafdb"},
|
||||
"tangerine": {"ref": "4f26aeafdb"},
|
||||
"foo": {"ref": "93dea37599"},
|
||||
"file": {"ref": "a2740663f8"},
|
||||
}
|
||||
assert build.mmd().get_xmd()['mbs']['rpms'] == xmd_rpms
|
||||
assert build.mmd().get_xmd()["mbs"]["rpms"] == xmd_rpms
|
||||
|
||||
@patch('module_build_service.models.ModuleBuild.from_module_event')
|
||||
@patch('module_build_service.scm.SCM')
|
||||
@patch("module_build_service.models.ModuleBuild.from_module_event")
|
||||
@patch("module_build_service.scm.SCM")
|
||||
def test_init_when_get_latest_raises(self, mocked_scm, mocked_from_module_event):
|
||||
FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml',
|
||||
'7035bd33614972ac66559ac1fdd019ff6027ad22',
|
||||
get_latest_raise=True)
|
||||
FakeSCM(
|
||||
mocked_scm,
|
||||
"testmodule",
|
||||
"testmodule.yaml",
|
||||
"7035bd33614972ac66559ac1fdd019ff6027ad22",
|
||||
get_latest_raise=True,
|
||||
)
|
||||
msg = module_build_service.messaging.MBSModule(
|
||||
msg_id=None, module_build_id=2, module_build_state='init')
|
||||
msg_id=None, module_build_id=2, module_build_state="init")
|
||||
with make_session(conf) as session:
|
||||
build = session.query(ModuleBuild).filter_by(id=2).one()
|
||||
mocked_from_module_event.return_value = build
|
||||
@@ -183,4 +194,4 @@ class TestModuleInit:
|
||||
session.refresh(build)
|
||||
# Make sure the module entered the failed state
|
||||
assert build.state == 4, build.state
|
||||
assert 'Failed to get the latest commit for' in build.state_reason
|
||||
assert "Failed to get the latest commit for" in build.state_reason
|
||||
|
||||
@@ -48,31 +48,31 @@ class TestModuleWait:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@patch('module_build_service.builder.GenericBuilder.create_from_module')
|
||||
@patch('module_build_service.models.ModuleBuild.from_module_event')
|
||||
@patch("module_build_service.builder.GenericBuilder.create_from_module")
|
||||
@patch("module_build_service.models.ModuleBuild.from_module_event")
|
||||
def test_init_basic(self, from_module_event, create_builder):
|
||||
builder = mock.Mock()
|
||||
builder.get_disttag_srpm.return_value = 'some srpm disttag'
|
||||
builder.get_disttag_srpm.return_value = "some srpm disttag"
|
||||
builder.build.return_value = 1234, 1, "", None
|
||||
builder.module_build_tag = {'name': 'some-tag-build'}
|
||||
builder.module_build_tag = {"name": "some-tag-build"}
|
||||
create_builder.return_value = builder
|
||||
mocked_module_build = mock.Mock()
|
||||
mocked_module_build.name = 'foo'
|
||||
mocked_module_build.stream = 'stream'
|
||||
mocked_module_build.version = '1'
|
||||
mocked_module_build.context = '1234567'
|
||||
mocked_module_build.name = "foo"
|
||||
mocked_module_build.stream = "stream"
|
||||
mocked_module_build.version = "1"
|
||||
mocked_module_build.context = "1234567"
|
||||
mocked_module_build.state = 1
|
||||
mocked_module_build.id = 1
|
||||
mocked_module_build.json.return_value = {
|
||||
'name': 'foo',
|
||||
'stream': '1',
|
||||
'version': 1,
|
||||
'state': 'some state',
|
||||
'id': 1
|
||||
"name": "foo",
|
||||
"stream": "1",
|
||||
"version": 1,
|
||||
"state": "some state",
|
||||
"id": 1,
|
||||
}
|
||||
|
||||
formatted_testmodule_yml_path = os.path.join(
|
||||
base_dir, 'staged_data', 'formatted_testmodule.yaml')
|
||||
base_dir, "staged_data", "formatted_testmodule.yaml")
|
||||
mmd = Modulemd.Module().new_from_file(formatted_testmodule_yml_path)
|
||||
mmd.upgrade()
|
||||
mocked_module_build.id = 1
|
||||
@@ -81,18 +81,21 @@ class TestModuleWait:
|
||||
|
||||
from_module_event.return_value = mocked_module_build
|
||||
|
||||
msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=1,
|
||||
module_build_state='some state')
|
||||
with patch.object(module_build_service.resolver, 'system_resolver'):
|
||||
msg = module_build_service.messaging.MBSModule(
|
||||
msg_id=None, module_build_id=1, module_build_state="some state")
|
||||
with patch.object(module_build_service.resolver, "system_resolver"):
|
||||
self.fn(config=self.config, session=self.session, msg=msg)
|
||||
|
||||
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={'build': [], 'srpm-build': []})
|
||||
@patch(
|
||||
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={"build": [], "srpm-build": []},
|
||||
)
|
||||
@patch("module_build_service.builder.GenericBuilder.create_from_module")
|
||||
@patch('module_build_service.resolver.DBResolver')
|
||||
@patch('module_build_service.resolver.GenericResolver')
|
||||
@patch("module_build_service.resolver.DBResolver")
|
||||
@patch("module_build_service.resolver.GenericResolver")
|
||||
def test_new_repo_called_when_macros_reused(
|
||||
self, generic_resolver, resolver, create_builder, dbg):
|
||||
self, generic_resolver, resolver, create_builder, dbg
|
||||
):
|
||||
"""
|
||||
Test that newRepo is called when module-build-macros build is reused.
|
||||
"""
|
||||
@@ -104,35 +107,45 @@ class TestModuleWait:
|
||||
builder = mock.MagicMock()
|
||||
builder.koji_session = koji_session
|
||||
builder.module_build_tag = {"name": "module-123-build"}
|
||||
builder.get_disttag_srpm.return_value = 'some srpm disttag'
|
||||
builder.build.return_value = (1234, koji.BUILD_STATES['COMPLETE'], "",
|
||||
"module-build-macros-1-1")
|
||||
builder.get_disttag_srpm.return_value = "some srpm disttag"
|
||||
builder.build.return_value = (
|
||||
1234,
|
||||
koji.BUILD_STATES["COMPLETE"],
|
||||
"",
|
||||
"module-build-macros-1-1",
|
||||
)
|
||||
create_builder.return_value = builder
|
||||
|
||||
resolver = mock.MagicMock()
|
||||
resolver.backend = 'db'
|
||||
resolver.backend = "db"
|
||||
resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"
|
||||
|
||||
with patch.object(module_build_service.resolver, 'system_resolver', new=resolver):
|
||||
msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=2,
|
||||
module_build_state='some state')
|
||||
with patch.object(module_build_service.resolver, "system_resolver", new=resolver):
|
||||
msg = module_build_service.messaging.MBSModule(
|
||||
msg_id=None, module_build_id=2, module_build_state="some state")
|
||||
module_build_service.scheduler.handlers.modules.wait(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
koji_session.newRepo.assert_called_once_with("module-123-build")
|
||||
|
||||
# When module-build-macros is reused, it still has to appear only
|
||||
# once in database.
|
||||
builds_count = db.session.query(ComponentBuild).filter_by(
|
||||
package="module-build-macros", module_id=2).count()
|
||||
builds_count = (
|
||||
db.session.query(ComponentBuild)
|
||||
.filter_by(package="module-build-macros", module_id=2)
|
||||
.count()
|
||||
)
|
||||
assert builds_count == 1
|
||||
|
||||
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={'build': [], 'srpm-build': []})
|
||||
@patch(
|
||||
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={"build": [], "srpm-build": []},
|
||||
)
|
||||
@patch("module_build_service.builder.GenericBuilder.create_from_module")
|
||||
@patch('module_build_service.resolver.DBResolver')
|
||||
@patch('module_build_service.resolver.GenericResolver')
|
||||
@patch("module_build_service.resolver.DBResolver")
|
||||
@patch("module_build_service.resolver.GenericResolver")
|
||||
def test_new_repo_not_called_when_macros_not_reused(
|
||||
self, generic_resolver, resolver, create_builder, dbg):
|
||||
self, generic_resolver, resolver, create_builder, dbg
|
||||
):
|
||||
"""
|
||||
Test that newRepo is called everytime for module-build-macros
|
||||
"""
|
||||
@@ -144,29 +157,36 @@ class TestModuleWait:
|
||||
builder = mock.MagicMock()
|
||||
builder.koji_session = koji_session
|
||||
builder.module_build_tag = {"name": "module-123-build"}
|
||||
builder.get_disttag_srpm.return_value = 'some srpm disttag'
|
||||
builder.build.return_value = (1234, koji.BUILD_STATES['BUILDING'], "",
|
||||
"module-build-macros-1-1")
|
||||
builder.get_disttag_srpm.return_value = "some srpm disttag"
|
||||
builder.build.return_value = (
|
||||
1234,
|
||||
koji.BUILD_STATES["BUILDING"],
|
||||
"",
|
||||
"module-build-macros-1-1",
|
||||
)
|
||||
create_builder.return_value = builder
|
||||
|
||||
resolver = mock.MagicMock()
|
||||
resolver.backend = 'db'
|
||||
resolver.backend = "db"
|
||||
resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"
|
||||
|
||||
with patch.object(module_build_service.resolver, 'system_resolver', new=resolver):
|
||||
msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=2,
|
||||
module_build_state='some state')
|
||||
with patch.object(module_build_service.resolver, "system_resolver", new=resolver):
|
||||
msg = module_build_service.messaging.MBSModule(
|
||||
msg_id=None, module_build_id=2, module_build_state="some state")
|
||||
module_build_service.scheduler.handlers.modules.wait(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
assert koji_session.newRepo.called
|
||||
|
||||
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={'build': [], 'srpm-build': []})
|
||||
@patch(
|
||||
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={"build": [], "srpm-build": []},
|
||||
)
|
||||
@patch("module_build_service.builder.GenericBuilder.create_from_module")
|
||||
@patch('module_build_service.resolver.DBResolver')
|
||||
@patch('module_build_service.resolver.GenericResolver')
|
||||
@patch("module_build_service.resolver.DBResolver")
|
||||
@patch("module_build_service.resolver.GenericResolver")
|
||||
def test_set_cg_build_koji_tag_fallback_to_default(
|
||||
self, generic_resolver, resolver, create_builder, dbg):
|
||||
self, generic_resolver, resolver, create_builder, dbg
|
||||
):
|
||||
"""
|
||||
Test that build.cg_build_koji_tag fallbacks to default tag.
|
||||
"""
|
||||
@@ -182,39 +202,59 @@ class TestModuleWait:
|
||||
builder = mock.MagicMock()
|
||||
builder.koji_session = koji_session
|
||||
builder.module_build_tag = {"name": "module-123-build"}
|
||||
builder.get_disttag_srpm.return_value = 'some srpm disttag'
|
||||
builder.build.return_value = (1234, koji.BUILD_STATES['BUILDING'], "",
|
||||
"module-build-macros-1-1")
|
||||
builder.get_disttag_srpm.return_value = "some srpm disttag"
|
||||
builder.build.return_value = (
|
||||
1234,
|
||||
koji.BUILD_STATES["BUILDING"],
|
||||
"",
|
||||
"module-build-macros-1-1",
|
||||
)
|
||||
create_builder.return_value = builder
|
||||
|
||||
resolver = mock.MagicMock()
|
||||
resolver.backend = 'db'
|
||||
resolver.backend = "db"
|
||||
resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"
|
||||
resolver.get_module_build_dependencies.return_value = {
|
||||
"module-bootstrap-tag": [base_mmd]}
|
||||
"module-bootstrap-tag": [base_mmd]
|
||||
}
|
||||
|
||||
with patch.object(module_build_service.resolver, 'system_resolver', new=resolver):
|
||||
msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=2,
|
||||
module_build_state='some state')
|
||||
with patch.object(module_build_service.resolver, "system_resolver", new=resolver):
|
||||
msg = module_build_service.messaging.MBSModule(
|
||||
msg_id=None, module_build_id=2, module_build_state="some state")
|
||||
module_build_service.scheduler.handlers.modules.wait(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
module_build = ModuleBuild.query.filter_by(id=2).one()
|
||||
assert module_build.cg_build_koji_tag == "modular-updates-candidate"
|
||||
|
||||
@pytest.mark.parametrize('koji_cg_tag_build,expected_cg_koji_build_tag', [
|
||||
[True, 'f27-modular-updates-candidate'],
|
||||
[False, None],
|
||||
])
|
||||
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={'build': [], 'srpm-build': []})
|
||||
@pytest.mark.parametrize(
|
||||
"koji_cg_tag_build,expected_cg_koji_build_tag",
|
||||
[
|
||||
[True, "f27-modular-updates-candidate"],
|
||||
[False, None]
|
||||
],
|
||||
)
|
||||
@patch(
|
||||
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={"build": [], "srpm-build": []},
|
||||
)
|
||||
@patch("module_build_service.builder.GenericBuilder.create_from_module")
|
||||
@patch('module_build_service.resolver.DBResolver')
|
||||
@patch('module_build_service.resolver.GenericResolver')
|
||||
@patch("module_build_service.config.Config.base_module_names",
|
||||
new_callable=mock.PropertyMock, return_value=["base-runtime", "platform"])
|
||||
@patch("module_build_service.resolver.DBResolver")
|
||||
@patch("module_build_service.resolver.GenericResolver")
|
||||
@patch(
|
||||
"module_build_service.config.Config.base_module_names",
|
||||
new_callable=mock.PropertyMock,
|
||||
return_value=["base-runtime", "platform"],
|
||||
)
|
||||
def test_set_cg_build_koji_tag(
|
||||
self, cfg, generic_resolver, resolver, create_builder, dbg,
|
||||
koji_cg_tag_build, expected_cg_koji_build_tag):
|
||||
self,
|
||||
cfg,
|
||||
generic_resolver,
|
||||
resolver,
|
||||
create_builder,
|
||||
dbg,
|
||||
koji_cg_tag_build,
|
||||
expected_cg_koji_build_tag,
|
||||
):
|
||||
"""
|
||||
Test that build.cg_build_koji_tag is set.
|
||||
"""
|
||||
@@ -230,23 +270,33 @@ class TestModuleWait:
|
||||
builder = mock.MagicMock()
|
||||
builder.koji_session = koji_session
|
||||
builder.module_build_tag = {"name": "module-123-build"}
|
||||
builder.get_disttag_srpm.return_value = 'some srpm disttag'
|
||||
builder.build.return_value = (1234, koji.BUILD_STATES['BUILDING'], "",
|
||||
"module-build-macros-1-1")
|
||||
builder.get_disttag_srpm.return_value = "some srpm disttag"
|
||||
builder.build.return_value = (
|
||||
1234,
|
||||
koji.BUILD_STATES["BUILDING"],
|
||||
"",
|
||||
"module-build-macros-1-1",
|
||||
)
|
||||
create_builder.return_value = builder
|
||||
|
||||
resolver = mock.MagicMock()
|
||||
resolver.backend = 'db'
|
||||
resolver.backend = "db"
|
||||
resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"
|
||||
resolver.get_module_build_dependencies.return_value = {
|
||||
"module-bootstrap-tag": [base_mmd]}
|
||||
"module-bootstrap-tag": [base_mmd]
|
||||
}
|
||||
|
||||
with patch.object(module_build_service.scheduler.handlers.modules.conf,
|
||||
'koji_cg_tag_build', new=koji_cg_tag_build):
|
||||
with patch.object(module_build_service.resolver, 'system_resolver', new=resolver):
|
||||
msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=2,
|
||||
module_build_state='some state')
|
||||
with patch.object(
|
||||
module_build_service.scheduler.handlers.modules.conf,
|
||||
"koji_cg_tag_build",
|
||||
new=koji_cg_tag_build,
|
||||
):
|
||||
with patch.object(module_build_service.resolver, "system_resolver", new=resolver):
|
||||
msg = module_build_service.messaging.MBSModule(
|
||||
msg_id=None, module_build_id=2, module_build_state="some state"
|
||||
)
|
||||
module_build_service.scheduler.handlers.modules.wait(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
config=conf, session=db.session, msg=msg
|
||||
)
|
||||
module_build = ModuleBuild.query.filter_by(id=2).one()
|
||||
assert module_build.cg_build_koji_tag == expected_cg_koji_build_tag
|
||||
|
||||
@@ -30,30 +30,35 @@ import six.moves.queue as queue
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={'build': [], 'srpm-build': []})
|
||||
@patch(
|
||||
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={"build": [], "srpm-build": []},
|
||||
)
|
||||
@patch("module_build_service.scheduler.consumer.get_global_consumer")
|
||||
@patch("module_build_service.builder.GenericBuilder.create_from_module")
|
||||
class TestPoller:
|
||||
|
||||
def setup_method(self, test_method):
|
||||
reuse_component_init_data()
|
||||
|
||||
self.p_read_config = patch('koji.read_config', return_value={
|
||||
'authtype': 'kerberos',
|
||||
'timeout': 60,
|
||||
'server': 'http://koji.example.com/'
|
||||
})
|
||||
self.p_read_config = patch(
|
||||
"koji.read_config",
|
||||
return_value={
|
||||
"authtype": "kerberos",
|
||||
"timeout": 60,
|
||||
"server": "http://koji.example.com/",
|
||||
},
|
||||
)
|
||||
self.mock_read_config = self.p_read_config.start()
|
||||
|
||||
def teardown_method(self, test_method):
|
||||
self.p_read_config.stop()
|
||||
clean_database()
|
||||
|
||||
@pytest.mark.parametrize('fresh', [True, False])
|
||||
@patch('module_build_service.utils.batches.start_build_component')
|
||||
def test_process_paused_module_builds(self, start_build_component, create_builder,
|
||||
global_consumer, dbg, fresh):
|
||||
@pytest.mark.parametrize("fresh", [True, False])
|
||||
@patch("module_build_service.utils.batches.start_build_component")
|
||||
def test_process_paused_module_builds(
|
||||
self, start_build_component, create_builder, global_consumer, dbg, fresh
|
||||
):
|
||||
"""
|
||||
Tests general use-case of process_paused_module_builds.
|
||||
"""
|
||||
@@ -102,7 +107,8 @@ class TestPoller:
|
||||
@patch.dict("sys.modules", krbV=mock.MagicMock())
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_trigger_new_repo_when_failed(
|
||||
self, ClientSession, create_builder, global_consumer, dbg):
|
||||
self, ClientSession, create_builder, global_consumer, dbg
|
||||
):
|
||||
"""
|
||||
Tests that we call koji_sesion.newRepo when newRepo task failed.
|
||||
"""
|
||||
@@ -111,8 +117,8 @@ class TestPoller:
|
||||
global_consumer.return_value = consumer
|
||||
|
||||
koji_session = ClientSession.return_value
|
||||
koji_session.getTag = lambda tag_name: {'name': tag_name}
|
||||
koji_session.getTaskInfo.return_value = {'state': koji.TASK_STATES['FAILED']}
|
||||
koji_session.getTag = lambda tag_name: {"name": tag_name}
|
||||
koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["FAILED"]}
|
||||
koji_session.newRepo.return_value = 123456
|
||||
|
||||
builder = mock.MagicMock()
|
||||
@@ -133,10 +139,11 @@ class TestPoller:
|
||||
koji_session.newRepo.assert_called_once_with(
|
||||
"module-testmodule-master-20170219191323-c40c156c-build")
|
||||
|
||||
@patch.dict('sys.modules', krbV=mock.MagicMock())
|
||||
@patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession')
|
||||
@patch.dict("sys.modules", krbV=mock.MagicMock())
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_trigger_new_repo_when_succeeded(
|
||||
self, ClientSession, create_builder, global_consumer, dbg):
|
||||
self, ClientSession, create_builder, global_consumer, dbg
|
||||
):
|
||||
"""
|
||||
Tests that we do not call koji_sesion.newRepo when newRepo task
|
||||
succeeded.
|
||||
@@ -146,8 +153,8 @@ class TestPoller:
|
||||
global_consumer.return_value = consumer
|
||||
|
||||
koji_session = ClientSession.return_value
|
||||
koji_session.getTag = lambda tag_name: {'name': tag_name}
|
||||
koji_session.getTaskInfo.return_value = {'state': koji.TASK_STATES['CLOSED']}
|
||||
koji_session.getTag = lambda tag_name: {"name": tag_name}
|
||||
koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["CLOSED"]}
|
||||
koji_session.newRepo.return_value = 123456
|
||||
|
||||
builder = mock.MagicMock()
|
||||
@@ -173,7 +180,8 @@ class TestPoller:
|
||||
assert module_build.new_repo_task_id == 0
|
||||
|
||||
def test_process_paused_module_builds_waiting_for_repo(
|
||||
self, create_builder, global_consumer, dbg):
|
||||
self, create_builder, global_consumer, dbg
|
||||
):
|
||||
"""
|
||||
Tests that process_paused_module_builds does not start new batch
|
||||
when we are waiting for repo.
|
||||
@@ -206,10 +214,11 @@ class TestPoller:
|
||||
for component in components:
|
||||
assert component.state is None
|
||||
|
||||
@patch.dict('sys.modules', krbV=mock.MagicMock())
|
||||
@patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession')
|
||||
@patch.dict("sys.modules", krbV=mock.MagicMock())
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_old_build_targets_are_not_associated_with_any_module_builds(
|
||||
self, ClientSession, create_builder, global_consumer, dbg):
|
||||
self, ClientSession, create_builder, global_consumer, dbg
|
||||
):
|
||||
consumer = mock.MagicMock()
|
||||
consumer.incoming = queue.Queue()
|
||||
global_consumer.return_value = consumer
|
||||
@@ -217,8 +226,8 @@ class TestPoller:
|
||||
koji_session = ClientSession.return_value
|
||||
# No created module build has any of these tags.
|
||||
koji_session.getBuildTargets.return_value = [
|
||||
{'dest_tag_name': 'module-xxx-1'},
|
||||
{'dest_tag_name': 'module-yyy-2'},
|
||||
{"dest_tag_name": "module-xxx-1"},
|
||||
{"dest_tag_name": "module-yyy-2"},
|
||||
]
|
||||
|
||||
hub = mock.MagicMock()
|
||||
@@ -227,17 +236,16 @@ class TestPoller:
|
||||
|
||||
koji_session.deleteBuildTarget.assert_not_called()
|
||||
|
||||
@patch.dict('sys.modules', krbV=mock.MagicMock())
|
||||
@patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession')
|
||||
@patch.dict("sys.modules", krbV=mock.MagicMock())
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_dont_delete_base_module_build_target(
|
||||
self, ClientSession, create_builder, global_consumer, dbg):
|
||||
self, ClientSession, create_builder, global_consumer, dbg
|
||||
):
|
||||
module_build = models.ModuleBuild.query.filter_by(id=3).one()
|
||||
|
||||
koji_session = ClientSession.return_value
|
||||
# No created module build has any of these tags.
|
||||
koji_session.getBuildTargets.return_value = [
|
||||
{'dest_tag_name': module_build.koji_tag},
|
||||
]
|
||||
koji_session.getBuildTargets.return_value = [{"dest_tag_name": module_build.koji_tag}]
|
||||
|
||||
consumer = mock.MagicMock()
|
||||
consumer.incoming = queue.Queue()
|
||||
@@ -245,7 +253,7 @@ class TestPoller:
|
||||
|
||||
# If module build's name is one of base module names, build target
|
||||
# should not be deleted.
|
||||
with patch.object(conf, 'base_module_names', new=[module_build.name]):
|
||||
with patch.object(conf, "base_module_names", new=[module_build.name]):
|
||||
|
||||
hub = mock.MagicMock()
|
||||
poller = MBSProducer(hub)
|
||||
@@ -253,17 +261,16 @@ class TestPoller:
|
||||
|
||||
koji_session.deleteBuildTarget.assert_not_called()
|
||||
|
||||
@patch.dict('sys.modules', krbV=mock.MagicMock())
|
||||
@patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession')
|
||||
@patch.dict("sys.modules", krbV=mock.MagicMock())
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_dont_delete_build_target_for_unfinished_module_builds(
|
||||
self, ClientSession, create_builder, global_consumer, dbg):
|
||||
self, ClientSession, create_builder, global_consumer, dbg
|
||||
):
|
||||
module_build = models.ModuleBuild.query.filter_by(id=3).one()
|
||||
|
||||
koji_session = ClientSession.return_value
|
||||
# No created module build has any of these tags.
|
||||
koji_session.getBuildTargets.return_value = [
|
||||
{'dest_tag_name': module_build.koji_tag},
|
||||
]
|
||||
koji_session.getBuildTargets.return_value = [{"dest_tag_name": module_build.koji_tag}]
|
||||
|
||||
consumer = mock.MagicMock()
|
||||
consumer.incoming = queue.Queue()
|
||||
@@ -271,7 +278,7 @@ class TestPoller:
|
||||
|
||||
# Each time when a module build is in one of these state, build target
|
||||
# should not be deleted.
|
||||
for state in ['init', 'wait', 'build']:
|
||||
for state in ["init", "wait", "build"]:
|
||||
module_build.state = state
|
||||
db.session.commit()
|
||||
|
||||
@@ -281,19 +288,20 @@ class TestPoller:
|
||||
|
||||
koji_session.deleteBuildTarget.assert_not_called()
|
||||
|
||||
@patch.dict('sys.modules', krbV=mock.MagicMock())
|
||||
@patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession')
|
||||
@patch.dict("sys.modules", krbV=mock.MagicMock())
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_only_delete_build_target_with_allowed_koji_tag_prefix(
|
||||
self, ClientSession, create_builder, global_consumer, dbg):
|
||||
self, ClientSession, create_builder, global_consumer, dbg
|
||||
):
|
||||
module_build_2 = models.ModuleBuild.query.filter_by(id=2).one()
|
||||
module_build_3 = models.ModuleBuild.query.filter_by(id=3).one()
|
||||
|
||||
# Only module build 1's build target should be deleted.
|
||||
module_build_2.koji_tag = 'module-tag1'
|
||||
module_build_2.state = models.BUILD_STATES['done']
|
||||
module_build_2.koji_tag = "module-tag1"
|
||||
module_build_2.state = models.BUILD_STATES["done"]
|
||||
# Ensure to exceed the koji_target_delete_time easily later for deletion
|
||||
module_build_2.time_completed = datetime.utcnow() - timedelta(hours=24)
|
||||
module_build_3.koji_tag = 'f28'
|
||||
module_build_3.koji_tag = "f28"
|
||||
db.session.commit()
|
||||
db.session.refresh(module_build_2)
|
||||
db.session.refresh(module_build_3)
|
||||
@@ -301,25 +309,16 @@ class TestPoller:
|
||||
koji_session = ClientSession.return_value
|
||||
# No created module build has any of these tags.
|
||||
koji_session.getBuildTargets.return_value = [
|
||||
{
|
||||
'id': 1,
|
||||
'dest_tag_name': module_build_2.koji_tag,
|
||||
'name': module_build_2.koji_tag
|
||||
},
|
||||
{
|
||||
'id': 2,
|
||||
'dest_tag_name': module_build_3.koji_tag,
|
||||
'name': module_build_3.koji_tag
|
||||
},
|
||||
{"id": 1, "dest_tag_name": module_build_2.koji_tag, "name": module_build_2.koji_tag},
|
||||
{"id": 2, "dest_tag_name": module_build_3.koji_tag, "name": module_build_3.koji_tag},
|
||||
]
|
||||
|
||||
consumer = mock.MagicMock()
|
||||
consumer.incoming = queue.Queue()
|
||||
global_consumer.return_value = consumer
|
||||
|
||||
with patch.object(conf, 'koji_tag_prefixes',
|
||||
new=['module', 'another-prefix']):
|
||||
with patch.object(conf, 'koji_target_delete_time', new=60):
|
||||
with patch.object(conf, "koji_tag_prefixes", new=["module", "another-prefix"]):
|
||||
with patch.object(conf, "koji_target_delete_time", new=60):
|
||||
hub = mock.MagicMock()
|
||||
poller = MBSProducer(hub)
|
||||
poller.delete_old_koji_targets(conf, db.session)
|
||||
@@ -327,15 +326,16 @@ class TestPoller:
|
||||
koji_session.deleteBuildTarget.assert_called_once_with(1)
|
||||
koji_session.krb_login.assert_called_once()
|
||||
|
||||
@patch.dict('sys.modules', krbV=mock.MagicMock())
|
||||
@patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession')
|
||||
@patch.dict("sys.modules", krbV=mock.MagicMock())
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_cant_delete_build_target_if_not_reach_delete_time(
|
||||
self, ClientSession, create_builder, global_consumer, dbg):
|
||||
self, ClientSession, create_builder, global_consumer, dbg
|
||||
):
|
||||
module_build_2 = models.ModuleBuild.query.filter_by(id=2).one()
|
||||
|
||||
# Only module build 1's build target should be deleted.
|
||||
module_build_2.koji_tag = 'module-tag1'
|
||||
module_build_2.state = models.BUILD_STATES['done']
|
||||
module_build_2.koji_tag = "module-tag1"
|
||||
module_build_2.state = models.BUILD_STATES["done"]
|
||||
# Ensure to exceed the koji_target_delete_time easily later for deletion
|
||||
module_build_2.time_completed = datetime.utcnow() - timedelta(minutes=5)
|
||||
db.session.commit()
|
||||
@@ -344,18 +344,14 @@ class TestPoller:
|
||||
koji_session = ClientSession.return_value
|
||||
# No created module build has any of these tags.
|
||||
koji_session.getBuildTargets.return_value = [
|
||||
{
|
||||
'id': 1,
|
||||
'dest_tag_name': module_build_2.koji_tag,
|
||||
'name': module_build_2.koji_tag
|
||||
},
|
||||
{"id": 1, "dest_tag_name": module_build_2.koji_tag, "name": module_build_2.koji_tag}
|
||||
]
|
||||
|
||||
consumer = mock.MagicMock()
|
||||
consumer.incoming = queue.Queue()
|
||||
global_consumer.return_value = consumer
|
||||
|
||||
with patch.object(conf, 'koji_tag_prefixes', new=['module']):
|
||||
with patch.object(conf, "koji_tag_prefixes", new=["module"]):
|
||||
# Use default koji_target_delete_time in config. That time is long
|
||||
# enough for test.
|
||||
hub = mock.MagicMock()
|
||||
@@ -364,9 +360,8 @@ class TestPoller:
|
||||
|
||||
koji_session.deleteBuildTarget.assert_not_called()
|
||||
|
||||
@pytest.mark.parametrize('state', ['init', 'wait'])
|
||||
def test_process_waiting_module_build(
|
||||
self, create_builder, global_consumer, dbg, state):
|
||||
@pytest.mark.parametrize("state", ["init", "wait"])
|
||||
def test_process_waiting_module_build(self, create_builder, global_consumer, dbg, state):
|
||||
""" Test that processing old waiting module builds works. """
|
||||
|
||||
consumer = mock.MagicMock()
|
||||
@@ -396,9 +391,10 @@ class TestPoller:
|
||||
# ensure the time_modified was changed.
|
||||
assert module_build.time_modified > original
|
||||
|
||||
@pytest.mark.parametrize('state', ['init', 'wait'])
|
||||
@pytest.mark.parametrize("state", ["init", "wait"])
|
||||
def test_process_waiting_module_build_not_old_enough(
|
||||
self, create_builder, global_consumer, dbg, state):
|
||||
self, create_builder, global_consumer, dbg, state
|
||||
):
|
||||
""" Test that we do not process young waiting builds. """
|
||||
|
||||
consumer = mock.MagicMock()
|
||||
@@ -426,8 +422,7 @@ class TestPoller:
|
||||
# Ensure we did *not* process the 9 minute-old build.
|
||||
assert consumer.incoming.qsize() == 0
|
||||
|
||||
def test_process_waiting_module_build_none_found(
|
||||
self, create_builder, global_consumer, dbg):
|
||||
def test_process_waiting_module_build_none_found(self, create_builder, global_consumer, dbg):
|
||||
""" Test nothing happens when no module builds are waiting. """
|
||||
|
||||
consumer = mock.MagicMock()
|
||||
@@ -446,8 +441,7 @@ class TestPoller:
|
||||
# Ensure we did *not* process any of the non-waiting builds.
|
||||
assert consumer.incoming.qsize() == 0
|
||||
|
||||
def test_cleanup_stale_failed_builds(
|
||||
self, create_builder, global_consumer, dbg):
|
||||
def test_cleanup_stale_failed_builds(self, create_builder, global_consumer, dbg):
|
||||
""" Test that one of the two module builds gets to the garbage state when running
|
||||
cleanup_stale_failed_builds.
|
||||
"""
|
||||
@@ -455,14 +449,14 @@ class TestPoller:
|
||||
create_builder.return_value = builder
|
||||
module_build_one = models.ModuleBuild.query.get(2)
|
||||
module_build_two = models.ModuleBuild.query.get(3)
|
||||
module_build_one.state = models.BUILD_STATES['failed']
|
||||
module_build_one.state = models.BUILD_STATES["failed"]
|
||||
module_build_one.time_modified = datetime.utcnow() - timedelta(
|
||||
days=conf.cleanup_failed_builds_time + 1)
|
||||
module_build_two.time_modified = datetime.utcnow()
|
||||
module_build_two.state = models.BUILD_STATES['failed']
|
||||
module_build_two.state = models.BUILD_STATES["failed"]
|
||||
failed_component = models.ComponentBuild.query.filter_by(
|
||||
package='tangerine', module_id=3).one()
|
||||
failed_component.state = koji.BUILD_STATES['FAILED']
|
||||
package="tangerine", module_id=3).one()
|
||||
failed_component.state = koji.BUILD_STATES["FAILED"]
|
||||
failed_component.tagged = False
|
||||
failed_component.tagged_in_final = False
|
||||
db.session.add(failed_component)
|
||||
@@ -481,34 +475,35 @@ class TestPoller:
|
||||
poller.cleanup_stale_failed_builds(conf, db.session)
|
||||
db.session.refresh(module_build_two)
|
||||
# Make sure module_build_one was transitioned to garbage
|
||||
assert module_build_one.state == models.BUILD_STATES['garbage']
|
||||
state_reason = ('The module was garbage collected since it has failed over {0} day(s) ago'
|
||||
.format(conf.cleanup_failed_builds_time))
|
||||
assert module_build_one.state == models.BUILD_STATES["garbage"]
|
||||
state_reason = (
|
||||
"The module was garbage collected since it has failed over {0} day(s) ago"
|
||||
.format(conf.cleanup_failed_builds_time)
|
||||
)
|
||||
assert module_build_one.state_reason == state_reason
|
||||
# Make sure all the components are marked as untagged in the database
|
||||
for component in module_build_one.component_builds:
|
||||
assert not component.tagged
|
||||
assert not component.tagged_in_final
|
||||
# Make sure module_build_two stayed the same
|
||||
assert module_build_two.state == models.BUILD_STATES['failed']
|
||||
assert module_build_two.state == models.BUILD_STATES["failed"]
|
||||
# Make sure the builds were untagged
|
||||
builder.untag_artifacts.assert_called_once_with([
|
||||
'perl-Tangerine-0.23-1.module+0+d027b723',
|
||||
'perl-List-Compare-0.53-5.module+0+d027b723',
|
||||
'tangerine-0.22-3.module+0+d027b723',
|
||||
'module-build-macros-0.1-1.module+0+d027b723'
|
||||
"perl-Tangerine-0.23-1.module+0+d027b723",
|
||||
"perl-List-Compare-0.53-5.module+0+d027b723",
|
||||
"tangerine-0.22-3.module+0+d027b723",
|
||||
"module-build-macros-0.1-1.module+0+d027b723",
|
||||
])
|
||||
|
||||
def test_cleanup_stale_failed_builds_no_components(
|
||||
self, create_builder, global_consumer, dbg):
|
||||
def test_cleanup_stale_failed_builds_no_components(self, create_builder, global_consumer, dbg):
|
||||
""" Test that a module build without any components built gets to the garbage state when
|
||||
running cleanup_stale_failed_builds.
|
||||
"""
|
||||
module_build_one = models.ModuleBuild.query.get(1)
|
||||
module_build_two = models.ModuleBuild.query.get(2)
|
||||
module_build_one.state = models.BUILD_STATES['failed']
|
||||
module_build_one.state = models.BUILD_STATES["failed"]
|
||||
module_build_one.time_modified = datetime.utcnow()
|
||||
module_build_two.state = models.BUILD_STATES['failed']
|
||||
module_build_two.state = models.BUILD_STATES["failed"]
|
||||
module_build_two.time_modified = datetime.utcnow() - timedelta(
|
||||
days=conf.cleanup_failed_builds_time + 1)
|
||||
module_build_two.koji_tag = None
|
||||
@@ -531,19 +526,21 @@ class TestPoller:
|
||||
poller.cleanup_stale_failed_builds(conf, db.session)
|
||||
db.session.refresh(module_build_two)
|
||||
# Make sure module_build_two was transitioned to garbage
|
||||
assert module_build_two.state == models.BUILD_STATES['garbage']
|
||||
state_reason = ('The module was garbage collected since it has failed over {0} day(s) ago'
|
||||
.format(conf.cleanup_failed_builds_time))
|
||||
assert module_build_two.state == models.BUILD_STATES["garbage"]
|
||||
state_reason = (
|
||||
"The module was garbage collected since it has failed over {0} day(s) ago"
|
||||
.format(conf.cleanup_failed_builds_time)
|
||||
)
|
||||
assert module_build_two.state_reason == state_reason
|
||||
# Make sure module_build_one stayed the same
|
||||
assert module_build_one.state == models.BUILD_STATES['failed']
|
||||
assert module_build_one.state == models.BUILD_STATES["failed"]
|
||||
# Make sure that the builder was never instantiated
|
||||
create_builder.assert_not_called()
|
||||
|
||||
@pytest.mark.parametrize('test_state', [models.BUILD_STATES[state]
|
||||
for state in conf.cleanup_stuck_builds_states])
|
||||
def test_cancel_stuck_module_builds(
|
||||
self, create_builder, global_consumer, dbg, test_state):
|
||||
@pytest.mark.parametrize(
|
||||
"test_state", [models.BUILD_STATES[state] for state in conf.cleanup_stuck_builds_states]
|
||||
)
|
||||
def test_cancel_stuck_module_builds(self, create_builder, global_consumer, dbg, test_state):
|
||||
|
||||
module_build1 = models.ModuleBuild.query.get(1)
|
||||
module_build1.state = test_state
|
||||
@@ -576,16 +573,16 @@ class TestPoller:
|
||||
assert len(module) == 1
|
||||
assert module[0].id == 2
|
||||
|
||||
@pytest.mark.parametrize('tagged, tagged_in_final', ([True, False], [True, False]))
|
||||
@pytest.mark.parametrize("tagged, tagged_in_final", ([True, False], [True, False]))
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_sync_koji_build_tags(
|
||||
self, ClientSession, create_builder, global_consumer, dbg,
|
||||
tagged, tagged_in_final):
|
||||
self, ClientSession, create_builder, global_consumer, dbg, tagged, tagged_in_final
|
||||
):
|
||||
module_build_2 = models.ModuleBuild.query.filter_by(id=2).one()
|
||||
|
||||
# Only module build 1's build target should be deleted.
|
||||
module_build_2.koji_tag = 'module-tag1'
|
||||
module_build_2.state = models.BUILD_STATES['build']
|
||||
module_build_2.koji_tag = "module-tag1"
|
||||
module_build_2.state = models.BUILD_STATES["build"]
|
||||
c = module_build_2.current_batch()[0]
|
||||
c.state = koji.BUILD_STATES["COMPLETE"]
|
||||
c.tagged_in_final = False
|
||||
@@ -598,19 +595,9 @@ class TestPoller:
|
||||
ret = []
|
||||
|
||||
if tagged:
|
||||
ret.append(
|
||||
{
|
||||
'id': 1,
|
||||
'name': module_build_2.koji_tag + "-build"
|
||||
},
|
||||
)
|
||||
ret.append({"id": 1, "name": module_build_2.koji_tag + "-build"})
|
||||
if tagged_in_final:
|
||||
ret.append(
|
||||
{
|
||||
'id': 2,
|
||||
'name': module_build_2.koji_tag
|
||||
},
|
||||
)
|
||||
ret.append({"id": 2, "name": module_build_2.koji_tag})
|
||||
koji_session.listTags.return_value = ret
|
||||
|
||||
consumer = mock.MagicMock()
|
||||
|
||||
@@ -29,8 +29,7 @@ from tests import conf, db, app, scheduler_init_data
|
||||
|
||||
|
||||
class TestRepoDone:
|
||||
|
||||
@mock.patch('module_build_service.models.ModuleBuild.from_repo_done_event')
|
||||
@mock.patch("module_build_service.models.ModuleBuild.from_repo_done_event")
|
||||
def test_no_match(self, from_repo_done_event):
|
||||
""" Test that when a repo msg hits us and we have no match,
|
||||
that we do nothing gracefully.
|
||||
@@ -38,68 +37,86 @@ class TestRepoDone:
|
||||
scheduler_init_data()
|
||||
from_repo_done_event.return_value = None
|
||||
msg = module_build_service.messaging.KojiRepoChange(
|
||||
'no matches for this...', '2016-some-nonexistent-build')
|
||||
module_build_service.scheduler.handlers.repos.done(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
"no matches for this...", "2016-some-nonexistent-build")
|
||||
module_build_service.scheduler.handlers.repos.done(config=conf, session=db.session, msg=msg)
|
||||
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.recover_orphaned_artifact', return_value=[])
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.get_average_build_time',
|
||||
return_value=0.0)
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.list_tasks_for_components',
|
||||
return_value=[])
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.buildroot_ready', return_value=True)
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.get_session')
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.build')
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.buildroot_connect')
|
||||
def test_a_single_match(self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt,
|
||||
mock_uea):
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder."
|
||||
"KojiModuleBuilder.recover_orphaned_artifact",
|
||||
return_value=[],
|
||||
)
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder."
|
||||
"KojiModuleBuilder.get_average_build_time",
|
||||
return_value=0.0,
|
||||
)
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder."
|
||||
"KojiModuleBuilder.list_tasks_for_components",
|
||||
return_value=[],
|
||||
)
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_ready",
|
||||
return_value=True,
|
||||
)
|
||||
@mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
|
||||
@mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.build")
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect"
|
||||
)
|
||||
def test_a_single_match(
|
||||
self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea
|
||||
):
|
||||
""" Test that when a repo msg hits us and we have a single match.
|
||||
"""
|
||||
scheduler_init_data()
|
||||
get_session.return_value = mock.Mock(), 'development'
|
||||
build_fn.return_value = 1234, 1, '', None
|
||||
get_session.return_value = mock.Mock(), "development"
|
||||
build_fn.return_value = 1234, 1, "", None
|
||||
|
||||
msg = module_build_service.messaging.KojiRepoChange(
|
||||
'some_msg_id', 'module-testmodule-master-20170109091357-7c29193d-build')
|
||||
module_build_service.scheduler.handlers.repos.done(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
"some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")
|
||||
module_build_service.scheduler.handlers.repos.done(config=conf, session=db.session, msg=msg)
|
||||
build_fn.assert_called_once_with(
|
||||
artifact_name='tangerine',
|
||||
source=('https://src.fedoraproject.org/rpms/tangerine?'
|
||||
'#fbed359411a1baa08d4a88e0d12d426fbf8f602c'))
|
||||
artifact_name="tangerine",
|
||||
source=(
|
||||
"https://src.fedoraproject.org/rpms/tangerine?"
|
||||
"#fbed359411a1baa08d4a88e0d12d426fbf8f602c"
|
||||
),
|
||||
)
|
||||
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.finalize')
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.recover_orphaned_artifact', return_value=[])
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.get_average_build_time',
|
||||
return_value=0.0)
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.list_tasks_for_components',
|
||||
return_value=[])
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.buildroot_ready', return_value=True)
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.get_session')
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.build')
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.buildroot_connect')
|
||||
def test_a_single_match_finalize(self, connect, build_fn, get_session, ready, list_tasks_fn,
|
||||
mock_gabt, mock_uea, finalizer):
|
||||
@mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.finalize")
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder."
|
||||
"KojiModuleBuilder.recover_orphaned_artifact",
|
||||
return_value=[],
|
||||
)
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder."
|
||||
"KojiModuleBuilder.get_average_build_time",
|
||||
return_value=0.0,
|
||||
)
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder."
|
||||
"KojiModuleBuilder.list_tasks_for_components",
|
||||
return_value=[],
|
||||
)
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_ready",
|
||||
return_value=True,
|
||||
)
|
||||
@mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
|
||||
@mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.build")
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect"
|
||||
)
|
||||
def test_a_single_match_finalize(
|
||||
self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea, finalizer
|
||||
):
|
||||
""" Test that when a repo msg hits us and we have a single match.
|
||||
"""
|
||||
scheduler_init_data(tangerine_state=1)
|
||||
get_session.return_value = mock.Mock(), 'development'
|
||||
build_fn.return_value = 1234, 1, '', None
|
||||
get_session.return_value = mock.Mock(), "development"
|
||||
build_fn.return_value = 1234, 1, "", None
|
||||
|
||||
# Ensure the time_completed is None, so we can test it is set to
|
||||
# some date once the build is finalized.
|
||||
@@ -117,94 +134,109 @@ class TestRepoDone:
|
||||
finalizer.side_effect = mocked_finalizer
|
||||
|
||||
msg = module_build_service.messaging.KojiRepoChange(
|
||||
'some_msg_id', 'module-testmodule-master-20170109091357-7c29193d-build')
|
||||
module_build_service.scheduler.handlers.repos.done(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
"some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")
|
||||
module_build_service.scheduler.handlers.repos.done(config=conf, session=db.session, msg=msg)
|
||||
|
||||
finalizer.assert_called_once()
|
||||
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.recover_orphaned_artifact', return_value=[])
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.get_average_build_time',
|
||||
return_value=0.0)
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.list_tasks_for_components',
|
||||
return_value=[])
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.buildroot_ready', return_value=True)
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.get_session')
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.build')
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.buildroot_connect')
|
||||
def test_a_single_match_build_fail(self, connect, build_fn, config, ready, list_tasks_fn,
|
||||
mock_gabt, mock_uea):
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder."
|
||||
"KojiModuleBuilder.recover_orphaned_artifact",
|
||||
return_value=[],
|
||||
)
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder."
|
||||
"KojiModuleBuilder.get_average_build_time",
|
||||
return_value=0.0,
|
||||
)
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder."
|
||||
"KojiModuleBuilder.list_tasks_for_components",
|
||||
return_value=[],
|
||||
)
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_ready",
|
||||
return_value=True,
|
||||
)
|
||||
@mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
|
||||
@mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.build")
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect"
|
||||
)
|
||||
def test_a_single_match_build_fail(
|
||||
self, connect, build_fn, config, ready, list_tasks_fn, mock_gabt, mock_uea
|
||||
):
|
||||
""" Test that when a KojiModuleBuilder.build fails, the build is
|
||||
marked as failed with proper state_reason.
|
||||
"""
|
||||
scheduler_init_data()
|
||||
config.return_value = mock.Mock(), 'development'
|
||||
build_fn.return_value = None, 4, 'Failed to submit artifact tangerine to Koji', None
|
||||
config.return_value = mock.Mock(), "development"
|
||||
build_fn.return_value = None, 4, "Failed to submit artifact tangerine to Koji", None
|
||||
|
||||
msg = module_build_service.messaging.KojiRepoChange(
|
||||
'some_msg_id', 'module-testmodule-master-20170109091357-7c29193d-build')
|
||||
module_build_service.scheduler.handlers.repos.done(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
"some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")
|
||||
module_build_service.scheduler.handlers.repos.done(config=conf, session=db.session, msg=msg)
|
||||
build_fn.assert_called_once_with(
|
||||
artifact_name='tangerine',
|
||||
source=('https://src.fedoraproject.org/rpms/tangerine?'
|
||||
'#fbed359411a1baa08d4a88e0d12d426fbf8f602c'))
|
||||
component_build = module_build_service.models.ComponentBuild.query\
|
||||
.filter_by(package='tangerine').one()
|
||||
assert component_build.state_reason == 'Failed to submit artifact tangerine to Koji'
|
||||
artifact_name="tangerine",
|
||||
source=(
|
||||
"https://src.fedoraproject.org/rpms/tangerine?"
|
||||
"#fbed359411a1baa08d4a88e0d12d426fbf8f602c"
|
||||
),
|
||||
)
|
||||
component_build = (
|
||||
module_build_service.models.ComponentBuild.query.filter_by(package="tangerine").one())
|
||||
assert component_build.state_reason == "Failed to submit artifact tangerine to Koji"
|
||||
|
||||
@mock.patch('module_build_service.scheduler.handlers.repos.log.info')
|
||||
@mock.patch("module_build_service.scheduler.handlers.repos.log.info")
|
||||
def test_erroneous_regen_repo_received(self, mock_log_info):
|
||||
""" Test that when an unexpected KojiRepoRegen message is received, the module doesn't
|
||||
complete or go to the next build batch.
|
||||
"""
|
||||
scheduler_init_data(1)
|
||||
msg = module_build_service.messaging.KojiRepoChange(
|
||||
'some_msg_id', 'module-testmodule-master-20170109091357-7c29193d-build')
|
||||
component_build = module_build_service.models.ComponentBuild.query\
|
||||
.filter_by(package='tangerine').one()
|
||||
"some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")
|
||||
component_build = (
|
||||
module_build_service.models.ComponentBuild.query.filter_by(package="tangerine").one())
|
||||
component_build.tagged = False
|
||||
db.session.add(component_build)
|
||||
db.session.commit()
|
||||
module_build_service.scheduler.handlers.repos.done(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
module_build_service.scheduler.handlers.repos.done(config=conf, session=db.session, msg=msg)
|
||||
mock_log_info.assert_called_with(
|
||||
'Ignoring repo regen, because not all components are tagged.')
|
||||
"Ignoring repo regen, because not all components are tagged."
|
||||
)
|
||||
module_build = module_build_service.models.ModuleBuild.query.get(2)
|
||||
# Make sure the module build didn't transition since all the components weren't tagged
|
||||
assert module_build.state == module_build_service.models.BUILD_STATES['build']
|
||||
assert module_build.state == module_build_service.models.BUILD_STATES["build"]
|
||||
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.list_tasks_for_components',
|
||||
return_value=[])
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.buildroot_ready', return_value=True)
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.get_session')
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.build')
|
||||
@mock.patch('module_build_service.builder.KojiModuleBuilder.'
|
||||
'KojiModuleBuilder.buildroot_connect')
|
||||
@mock.patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={'build': [], 'srpm-build': []})
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder."
|
||||
"KojiModuleBuilder.list_tasks_for_components",
|
||||
return_value=[],
|
||||
)
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_ready",
|
||||
return_value=True,
|
||||
)
|
||||
@mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
|
||||
@mock.patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.build")
|
||||
@mock.patch(
|
||||
"module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect"
|
||||
)
|
||||
@mock.patch(
|
||||
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={"build": [], "srpm-build": []},
|
||||
)
|
||||
def test_failed_component_build(self, dbg, connect, build_fn, config, ready, list_tasks_fn):
|
||||
""" Test that when a KojiModuleBuilder.build fails, the build is
|
||||
marked as failed with proper state_reason.
|
||||
"""
|
||||
with app.app_context():
|
||||
scheduler_init_data(3)
|
||||
config.return_value = mock.Mock(), 'development'
|
||||
build_fn.return_value = None, 4, 'Failed to submit artifact x to Koji', None
|
||||
config.return_value = mock.Mock(), "development"
|
||||
build_fn.return_value = None, 4, "Failed to submit artifact x to Koji", None
|
||||
|
||||
msg = module_build_service.messaging.KojiRepoChange(
|
||||
'some_msg_id', 'module-testmodule-master-20170109091357-7c29193d-build')
|
||||
"some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")
|
||||
module_build_service.scheduler.handlers.repos.done(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
module_build = module_build_service.models.ModuleBuild.query.get(2)
|
||||
|
||||
@@ -34,19 +34,17 @@ import koji
|
||||
|
||||
|
||||
class TestTagTagged:
|
||||
|
||||
def setup_method(self, test_method):
|
||||
reuse_component_init_data()
|
||||
|
||||
@mock.patch('module_build_service.models.ModuleBuild.from_tag_change_event')
|
||||
@mock.patch("module_build_service.models.ModuleBuild.from_tag_change_event")
|
||||
def test_no_matching_module(self, from_tag_change_event):
|
||||
""" Test that when a tag msg hits us and we have no match,
|
||||
that we do nothing gracefully.
|
||||
"""
|
||||
from_tag_change_event.return_value = None
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'no matches for this...', '2016-some-nonexistent-build', 'artifact',
|
||||
'artifact-1.2-1')
|
||||
"no matches for this...", "2016-some-nonexistent-build", "artifact", "artifact-1.2-1")
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
|
||||
@@ -55,13 +53,18 @@ class TestTagTagged:
|
||||
that we do nothing gracefully.
|
||||
"""
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c-build',
|
||||
'artifact', 'artifact-1.2-1')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c-build",
|
||||
"artifact",
|
||||
"artifact-1.2-1",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
|
||||
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={'build': [], 'srpm-build': []})
|
||||
@patch(
|
||||
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={"build": [], "srpm-build": []},
|
||||
)
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
|
||||
@patch("module_build_service.builder.GenericBuilder.create_from_module")
|
||||
def test_newrepo(self, create_builder, koji_get_session, dbg):
|
||||
@@ -69,8 +72,8 @@ class TestTagTagged:
|
||||
Test that newRepo is called in the expected times.
|
||||
"""
|
||||
koji_session = mock.MagicMock()
|
||||
koji_session.getTag = lambda tag_name: {'name': tag_name}
|
||||
koji_session.getTaskInfo.return_value = {'state': koji.TASK_STATES['CLOSED']}
|
||||
koji_session.getTag = lambda tag_name: {"name": tag_name}
|
||||
koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["CLOSED"]}
|
||||
koji_session.newRepo.return_value = 123456
|
||||
koji_get_session.return_value = koji_session
|
||||
|
||||
@@ -78,7 +81,8 @@ class TestTagTagged:
|
||||
builder.koji_session = koji_session
|
||||
builder.buildroot_ready.return_value = False
|
||||
builder.module_build_tag = {
|
||||
"name": "module-testmodule-master-20170219191323-c40c156c-build"}
|
||||
"name": "module-testmodule-master-20170219191323-c40c156c-build"
|
||||
}
|
||||
create_builder.return_value = builder
|
||||
|
||||
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()
|
||||
@@ -92,25 +96,33 @@ class TestTagTagged:
|
||||
|
||||
module_build.batch = 2
|
||||
for c in module_build.current_batch():
|
||||
if c.package == 'perl-Tangerine':
|
||||
c.nvr = 'perl-Tangerine-0.23-1.module+0+d027b723'
|
||||
elif c.package == 'perl-List-Compare':
|
||||
c.nvr = 'perl-List-Compare-0.53-5.module+0+d027b723'
|
||||
if c.package == "perl-Tangerine":
|
||||
c.nvr = "perl-Tangerine-0.23-1.module+0+d027b723"
|
||||
elif c.package == "perl-List-Compare":
|
||||
c.nvr = "perl-List-Compare-0.53-5.module+0+d027b723"
|
||||
c.state = koji.BUILD_STATES["COMPLETE"]
|
||||
db.session.commit()
|
||||
|
||||
# Tag the first component to the buildroot.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c-build',
|
||||
'perl-Tangerine', 'perl-Tangerine-0.23-1.module+0+d027b723')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c-build",
|
||||
"perl-Tangerine",
|
||||
"perl-Tangerine-0.23-1.module+0+d027b723",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
config=conf, session=db.session, msg=msg
|
||||
)
|
||||
# Tag the first component to the final tag.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c',
|
||||
'perl-Tangerine', 'perl-Tangerine-0.23-1.module+0+d027b723')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c",
|
||||
"perl-Tangerine",
|
||||
"perl-Tangerine-0.23-1.module+0+d027b723",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
config=conf, session=db.session, msg=msg
|
||||
)
|
||||
|
||||
# newRepo should not be called, because there are still components
|
||||
# to tag.
|
||||
@@ -118,10 +130,14 @@ class TestTagTagged:
|
||||
|
||||
# Tag the second component to the buildroot.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c-build',
|
||||
'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c-build",
|
||||
"perl-List-Compare",
|
||||
"perl-List-Compare-0.53-5.module+0+d027b723",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
config=conf, session=db.session, msg=msg
|
||||
)
|
||||
|
||||
# newRepo should not be called, because the component has not been
|
||||
# tagged to final tag so far.
|
||||
@@ -129,8 +145,11 @@ class TestTagTagged:
|
||||
|
||||
# Tag the first component to the final tag.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c',
|
||||
'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c",
|
||||
"perl-List-Compare",
|
||||
"perl-List-Compare-0.53-5.module+0+d027b723",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
|
||||
@@ -146,8 +165,10 @@ class TestTagTagged:
|
||||
# status later in poller.
|
||||
assert module_build.new_repo_task_id == 123456
|
||||
|
||||
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={'build': [], 'srpm-build': []})
|
||||
@patch(
|
||||
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={"build": [], "srpm-build": []},
|
||||
)
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
|
||||
@patch("module_build_service.builder.GenericBuilder.create_from_module")
|
||||
def test_newrepo_still_building_components(self, create_builder, koji_get_session, dbg):
|
||||
@@ -155,8 +176,8 @@ class TestTagTagged:
|
||||
Test that newRepo is called in the expected times.
|
||||
"""
|
||||
koji_session = mock.MagicMock()
|
||||
koji_session.getTag = lambda tag_name: {'name': tag_name}
|
||||
koji_session.getTaskInfo.return_value = {'state': koji.TASK_STATES['CLOSED']}
|
||||
koji_session.getTag = lambda tag_name: {"name": tag_name}
|
||||
koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["CLOSED"]}
|
||||
koji_session.newRepo.return_value = 123456
|
||||
koji_get_session.return_value = koji_session
|
||||
|
||||
@@ -164,27 +185,34 @@ class TestTagTagged:
|
||||
builder.koji_session = koji_session
|
||||
builder.buildroot_ready.return_value = False
|
||||
builder.module_build_tag = {
|
||||
"name": "module-testmodule-master-20170219191323-c40c156c-build"}
|
||||
"name": "module-testmodule-master-20170219191323-c40c156c-build"
|
||||
}
|
||||
create_builder.return_value = builder
|
||||
|
||||
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()
|
||||
module_build.batch = 2
|
||||
component = module_build_service.models.ComponentBuild.query\
|
||||
.filter_by(package='perl-Tangerine', module_id=module_build.id).one()
|
||||
component = module_build_service.models.ComponentBuild.query.filter_by(
|
||||
package="perl-Tangerine", module_id=module_build.id).one()
|
||||
component.state = koji.BUILD_STATES["BUILDING"]
|
||||
component.nvr = 'perl-Tangerine-0.23-1.module+0+d027b723'
|
||||
component.nvr = "perl-Tangerine-0.23-1.module+0+d027b723"
|
||||
db.session.commit()
|
||||
|
||||
# Tag the perl-List-Compare component to the buildroot.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c-build',
|
||||
'perl-Tangerine', 'perl-Tangerine-0.23-1.module+0+d027b723')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c-build",
|
||||
"perl-Tangerine",
|
||||
"perl-Tangerine-0.23-1.module+0+d027b723",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
# Tag the perl-List-Compare component to final tag.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c',
|
||||
'perl-Tangerine', 'perl-Tangerine-0.23-1.module+0+d027b723')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c",
|
||||
"perl-Tangerine",
|
||||
"perl-Tangerine-0.23-1.module+0+d027b723",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
|
||||
@@ -192,8 +220,10 @@ class TestTagTagged:
|
||||
# built yet.
|
||||
assert not koji_session.newRepo.called
|
||||
|
||||
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={'build': [], 'srpm-build': []})
|
||||
@patch(
|
||||
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={"build": [], "srpm-build": []},
|
||||
)
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
|
||||
@patch("module_build_service.builder.GenericBuilder.create_from_module")
|
||||
def test_newrepo_failed_components(self, create_builder, koji_get_session, dbg):
|
||||
@@ -201,8 +231,8 @@ class TestTagTagged:
|
||||
Test that newRepo is called in the expected times.
|
||||
"""
|
||||
koji_session = mock.MagicMock()
|
||||
koji_session.getTag = lambda tag_name: {'name': tag_name}
|
||||
koji_session.getTaskInfo.return_value = {'state': koji.TASK_STATES['CLOSED']}
|
||||
koji_session.getTag = lambda tag_name: {"name": tag_name}
|
||||
koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["CLOSED"]}
|
||||
koji_session.newRepo.return_value = 123456
|
||||
koji_get_session.return_value = koji_session
|
||||
|
||||
@@ -210,7 +240,8 @@ class TestTagTagged:
|
||||
builder.koji_session = koji_session
|
||||
builder.buildroot_ready.return_value = False
|
||||
builder.module_build_tag = {
|
||||
"name": "module-testmodule-master-20170219191323-c40c156c-build"}
|
||||
"name": "module-testmodule-master-20170219191323-c40c156c-build"
|
||||
}
|
||||
create_builder.return_value = builder
|
||||
|
||||
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()
|
||||
@@ -223,26 +254,33 @@ class TestTagTagged:
|
||||
c.tagged_in_final = True
|
||||
|
||||
module_build.batch = 2
|
||||
component = module_build_service.models.ComponentBuild.query\
|
||||
.filter_by(package='perl-Tangerine', module_id=module_build.id).one()
|
||||
component = module_build_service.models.ComponentBuild.query.filter_by(
|
||||
package="perl-Tangerine", module_id=module_build.id).one()
|
||||
component.state = koji.BUILD_STATES["FAILED"]
|
||||
component.nvr = 'perl-Tangerine-0.23-1.module+0+d027b723'
|
||||
component = module_build_service.models.ComponentBuild.query\
|
||||
.filter_by(package='perl-List-Compare', module_id=module_build.id).one()
|
||||
component.nvr = "perl-Tangerine-0.23-1.module+0+d027b723"
|
||||
component = module_build_service.models.ComponentBuild.query.filter_by(
|
||||
package="perl-List-Compare", module_id=module_build.id).one()
|
||||
component.state = koji.BUILD_STATES["COMPLETE"]
|
||||
component.nvr = 'perl-List-Compare-0.53-5.module+0+d027b723'
|
||||
component.nvr = "perl-List-Compare-0.53-5.module+0+d027b723"
|
||||
db.session.commit()
|
||||
|
||||
# Tag the perl-List-Compare component to the buildroot.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c-build',
|
||||
'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c-build",
|
||||
"perl-List-Compare",
|
||||
"perl-List-Compare-0.53-5.module+0+d027b723",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
config=conf, session=db.session, msg=msg
|
||||
)
|
||||
# Tag the perl-List-Compare component to final tag.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c',
|
||||
'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c",
|
||||
"perl-List-Compare",
|
||||
"perl-List-Compare-0.53-5.module+0+d027b723",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
|
||||
@@ -259,20 +297,21 @@ class TestTagTagged:
|
||||
# status later in poller.
|
||||
assert module_build.new_repo_task_id == 123456
|
||||
|
||||
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={'build': [], 'srpm-build': []})
|
||||
@patch(
|
||||
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={"build": [], "srpm-build": []},
|
||||
)
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
|
||||
@patch("module_build_service.builder.GenericBuilder.create_from_module")
|
||||
def test_newrepo_multiple_batches_tagged(
|
||||
self, create_builder, koji_get_session, dbg):
|
||||
def test_newrepo_multiple_batches_tagged(self, create_builder, koji_get_session, dbg):
|
||||
"""
|
||||
Test that newRepo is called just once and only when all components
|
||||
are tagged even if we tag components from the multiple batches in the
|
||||
same time.
|
||||
"""
|
||||
koji_session = mock.MagicMock()
|
||||
koji_session.getTag = lambda tag_name: {'name': tag_name}
|
||||
koji_session.getTaskInfo.return_value = {'state': koji.TASK_STATES['CLOSED']}
|
||||
koji_session.getTag = lambda tag_name: {"name": tag_name}
|
||||
koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["CLOSED"]}
|
||||
koji_session.newRepo.return_value = 123456
|
||||
koji_get_session.return_value = koji_session
|
||||
|
||||
@@ -280,33 +319,40 @@ class TestTagTagged:
|
||||
builder.koji_session = koji_session
|
||||
builder.buildroot_ready.return_value = False
|
||||
builder.module_build_tag = {
|
||||
"name": "module-testmodule-master-20170219191323-c40c156c-build"}
|
||||
"name": "module-testmodule-master-20170219191323-c40c156c-build"
|
||||
}
|
||||
create_builder.return_value = builder
|
||||
|
||||
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()
|
||||
module_build.batch = 2
|
||||
mbm = module_build_service.models.ComponentBuild.query.filter_by(
|
||||
module_id=3, package='module-build-macros').one()
|
||||
module_id=3, package="module-build-macros").one()
|
||||
mbm.tagged = False
|
||||
db.session.add(mbm)
|
||||
for c in module_build.current_batch():
|
||||
if c.package == 'perl-Tangerine':
|
||||
c.nvr = 'perl-Tangerine-0.23-1.module+0+d027b723'
|
||||
elif c.package == 'perl-List-Compare':
|
||||
c.nvr = 'perl-List-Compare-0.53-5.module+0+d027b723'
|
||||
if c.package == "perl-Tangerine":
|
||||
c.nvr = "perl-Tangerine-0.23-1.module+0+d027b723"
|
||||
elif c.package == "perl-List-Compare":
|
||||
c.nvr = "perl-List-Compare-0.53-5.module+0+d027b723"
|
||||
c.state = koji.BUILD_STATES["COMPLETE"]
|
||||
db.session.commit()
|
||||
|
||||
# Tag the first component to the buildroot.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c-build',
|
||||
'perl-Tangerine', 'perl-Tangerine-0.23-1.module+0+d027b723')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c-build",
|
||||
"perl-Tangerine",
|
||||
"perl-Tangerine-0.23-1.module+0+d027b723",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
# Tag the first component to the final tag.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c',
|
||||
'perl-Tangerine', 'perl-Tangerine-0.23-1.module+0+d027b723')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c",
|
||||
"perl-Tangerine",
|
||||
"perl-Tangerine-0.23-1.module+0+d027b723",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
|
||||
@@ -316,14 +362,20 @@ class TestTagTagged:
|
||||
|
||||
# Tag the second component to the buildroot.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c-build',
|
||||
'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c-build",
|
||||
"perl-List-Compare",
|
||||
"perl-List-Compare-0.53-5.module+0+d027b723",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
# Tag the second component to final tag.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c',
|
||||
'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c",
|
||||
"perl-List-Compare",
|
||||
"perl-List-Compare-0.53-5.module+0+d027b723",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
|
||||
@@ -333,14 +385,20 @@ class TestTagTagged:
|
||||
|
||||
# Tag the component from first batch to final tag.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c',
|
||||
'module-build-macros', 'module-build-macros-0.1-1.module+0+b0a1d1f7')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c",
|
||||
"module-build-macros",
|
||||
"module-build-macros-0.1-1.module+0+b0a1d1f7",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
# Tag the component from first batch to the buildroot.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c-build',
|
||||
'module-build-macros', 'module-build-macros-0.1-1.module+0+b0a1d1f7')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c-build",
|
||||
"module-build-macros",
|
||||
"module-build-macros-0.1-1.module+0+b0a1d1f7",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
|
||||
@@ -356,18 +414,19 @@ class TestTagTagged:
|
||||
# status later in poller.
|
||||
assert module_build.new_repo_task_id == 123456
|
||||
|
||||
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={'build': [], 'srpm-build': []})
|
||||
@patch(
|
||||
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
|
||||
return_value={"build": [], "srpm-build": []},
|
||||
)
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
|
||||
@patch("module_build_service.builder.GenericBuilder.create_from_module")
|
||||
def test_newrepo_build_time_only(
|
||||
self, create_builder, koji_get_session, dbg):
|
||||
def test_newrepo_build_time_only(self, create_builder, koji_get_session, dbg):
|
||||
"""
|
||||
Test the component.build_time_only is respected in tag handler.
|
||||
"""
|
||||
koji_session = mock.MagicMock()
|
||||
koji_session.getTag = lambda tag_name: {'name': tag_name}
|
||||
koji_session.getTaskInfo.return_value = {'state': koji.TASK_STATES['CLOSED']}
|
||||
koji_session.getTag = lambda tag_name: {"name": tag_name}
|
||||
koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["CLOSED"]}
|
||||
koji_session.newRepo.return_value = 123456
|
||||
koji_get_session.return_value = koji_session
|
||||
|
||||
@@ -375,7 +434,8 @@ class TestTagTagged:
|
||||
builder.koji_session = koji_session
|
||||
builder.buildroot_ready.return_value = False
|
||||
builder.module_build_tag = {
|
||||
"name": "module-testmodule-master-20170219191323-c40c156c-build"}
|
||||
"name": "module-testmodule-master-20170219191323-c40c156c-build"
|
||||
}
|
||||
create_builder.return_value = builder
|
||||
|
||||
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()
|
||||
@@ -383,43 +443,52 @@ class TestTagTagged:
|
||||
# Set previous components as COMPLETE and tagged.
|
||||
module_build.batch = 1
|
||||
for c in module_build.up_to_current_batch():
|
||||
if c.package == 'module-build-macros':
|
||||
c.nvr = 'module-build-macros-0.1-1.module+0+b0a1d1f7'
|
||||
if c.package == "module-build-macros":
|
||||
c.nvr = "module-build-macros-0.1-1.module+0+b0a1d1f7"
|
||||
c.state = koji.BUILD_STATES["COMPLETE"]
|
||||
c.tagged = True
|
||||
c.tagged_in_final = True
|
||||
|
||||
module_build.batch = 2
|
||||
component = module_build_service.models.ComponentBuild.query\
|
||||
.filter_by(package='perl-Tangerine', module_id=module_build.id).one()
|
||||
component = module_build_service.models.ComponentBuild.query.filter_by(
|
||||
package="perl-Tangerine", module_id=module_build.id).one()
|
||||
component.state = koji.BUILD_STATES["COMPLETE"]
|
||||
component.build_time_only = True
|
||||
component.tagged = False
|
||||
component.tagged_in_final = False
|
||||
component.nvr = 'perl-Tangerine-0.23-1.module+0+d027b723'
|
||||
component = module_build_service.models.ComponentBuild.query\
|
||||
.filter_by(package='perl-List-Compare', module_id=module_build.id).one()
|
||||
component.nvr = "perl-Tangerine-0.23-1.module+0+d027b723"
|
||||
component = module_build_service.models.ComponentBuild.query.filter_by(
|
||||
package="perl-List-Compare", module_id=module_build.id).one()
|
||||
component.state = koji.BUILD_STATES["COMPLETE"]
|
||||
component.nvr = 'perl-List-Compare-0.53-5.module+0+d027b723'
|
||||
component.nvr = "perl-List-Compare-0.53-5.module+0+d027b723"
|
||||
db.session.commit()
|
||||
|
||||
# Tag the perl-Tangerine component to the buildroot.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c-build',
|
||||
'perl-Tangerine', 'perl-Tangerine-0.23-1.module+0+d027b723')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c-build",
|
||||
"perl-Tangerine",
|
||||
"perl-Tangerine-0.23-1.module+0+d027b723",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
assert not koji_session.newRepo.called
|
||||
# Tag the perl-List-Compare component to the buildroot.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c-build',
|
||||
'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c-build",
|
||||
"perl-List-Compare",
|
||||
"perl-List-Compare-0.53-5.module+0+d027b723",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
# Tag the perl-List-Compare component to final tag.
|
||||
msg = module_build_service.messaging.KojiTagChange(
|
||||
'id', 'module-testmodule-master-20170219191323-c40c156c',
|
||||
'perl-List-Compare', 'perl-List-Compare-0.53-5.module+0+d027b723')
|
||||
"id",
|
||||
"module-testmodule-master-20170219191323-c40c156c",
|
||||
"perl-List-Compare",
|
||||
"perl-List-Compare-0.53-5.module+0+d027b723",
|
||||
)
|
||||
module_build_service.scheduler.handlers.tags.tagged(
|
||||
config=conf, session=db.session, msg=msg)
|
||||
|
||||
|
||||
@@ -29,15 +29,14 @@ import pytest
|
||||
import module_build_service.scm
|
||||
from module_build_service.errors import ValidationError, UnprocessableEntity
|
||||
|
||||
base_dir = os.path.join(os.path.dirname(__file__), 'scm_data')
|
||||
repo_url = 'file://' + base_dir + '/testrepo'
|
||||
base_dir = os.path.join(os.path.dirname(__file__), "scm_data")
|
||||
repo_url = "file://" + base_dir + "/testrepo"
|
||||
|
||||
|
||||
class TestSCMModule:
|
||||
|
||||
def setup_method(self, test_method):
|
||||
self.tempdir = tempfile.mkdtemp()
|
||||
self.repodir = self.tempdir + '/testrepo'
|
||||
self.repodir = self.tempdir + "/testrepo"
|
||||
|
||||
def teardown_method(self, test_method):
|
||||
if os.path.exists(self.tempdir):
|
||||
@@ -48,20 +47,20 @@ class TestSCMModule:
|
||||
scm = module_build_service.scm.SCM(repo_url)
|
||||
scm.checkout(self.tempdir)
|
||||
files = os.listdir(self.repodir)
|
||||
assert 'foo' in files, "foo not in %r" % files
|
||||
assert "foo" in files, "foo not in %r" % files
|
||||
|
||||
def test_local_get_latest_is_sane(self):
|
||||
""" See that a hash is returned by scm.get_latest. """
|
||||
scm = module_build_service.scm.SCM(repo_url)
|
||||
latest = scm.get_latest('master')
|
||||
target = '5481faa232d66589e660cc301179867fb00842c9'
|
||||
latest = scm.get_latest("master")
|
||||
target = "5481faa232d66589e660cc301179867fb00842c9"
|
||||
assert latest == target, "%r != %r" % (latest, target)
|
||||
|
||||
def test_local_get_latest_commit_hash_is_sane(self):
|
||||
""" See that a hash is returned by scm.get_latest. """
|
||||
scm = module_build_service.scm.SCM(repo_url)
|
||||
latest = scm.get_latest('5481f')
|
||||
target = '5481faa232d66589e660cc301179867fb00842c9'
|
||||
latest = scm.get_latest("5481f")
|
||||
target = "5481faa232d66589e660cc301179867fb00842c9"
|
||||
assert latest == target, "%r != %r" % (latest, target)
|
||||
|
||||
def test_local_get_latest_unclean_input(self):
|
||||
@@ -70,22 +69,22 @@ class TestSCMModule:
|
||||
https://pagure.io/fm-orchestrator/issue/329
|
||||
"""
|
||||
scm = module_build_service.scm.SCM(repo_url)
|
||||
assert scm.scheme == 'git', scm.scheme
|
||||
fname = tempfile.mktemp(suffix='mbs-scm-test')
|
||||
assert scm.scheme == "git", scm.scheme
|
||||
fname = tempfile.mktemp(suffix="mbs-scm-test")
|
||||
try:
|
||||
scm.get_latest('master; touch %s' % fname)
|
||||
scm.get_latest("master; touch %s" % fname)
|
||||
except UnprocessableEntity:
|
||||
assert not os.path.exists(fname), "%r exists! Vulnerable." % fname
|
||||
|
||||
def test_local_extract_name(self):
|
||||
scm = module_build_service.scm.SCM(repo_url)
|
||||
target = 'testrepo'
|
||||
assert scm.name == target, '%r != %r' % (scm.name, target)
|
||||
target = "testrepo"
|
||||
assert scm.name == target, "%r != %r" % (scm.name, target)
|
||||
|
||||
def test_local_extract_name_trailing_slash(self):
|
||||
scm = module_build_service.scm.SCM(repo_url + '/')
|
||||
target = 'testrepo'
|
||||
assert scm.name == target, '%r != %r' % (scm.name, target)
|
||||
scm = module_build_service.scm.SCM(repo_url + "/")
|
||||
target = "testrepo"
|
||||
assert scm.name == target, "%r != %r" % (scm.name, target)
|
||||
|
||||
def test_verify(self):
|
||||
scm = module_build_service.scm.SCM(repo_url)
|
||||
@@ -97,20 +96,20 @@ class TestSCMModule:
|
||||
module_build_service.scm.SCM(repo_url, "unknown")
|
||||
|
||||
def test_verify_commit_in_branch(self):
|
||||
target = '7035bd33614972ac66559ac1fdd019ff6027ad21'
|
||||
target = "7035bd33614972ac66559ac1fdd019ff6027ad21"
|
||||
scm = module_build_service.scm.SCM(repo_url + "?#" + target, "dev")
|
||||
scm.checkout(self.tempdir)
|
||||
scm.verify()
|
||||
|
||||
def test_verify_commit_not_in_branch(self):
|
||||
target = '7035bd33614972ac66559ac1fdd019ff6027ad21'
|
||||
target = "7035bd33614972ac66559ac1fdd019ff6027ad21"
|
||||
scm = module_build_service.scm.SCM(repo_url + "?#" + target, "master")
|
||||
scm.checkout(self.tempdir)
|
||||
with pytest.raises(ValidationError):
|
||||
scm.verify()
|
||||
|
||||
def test_verify_unknown_hash(self):
|
||||
target = '7035bd33614972ac66559ac1fdd019ff6027ad22'
|
||||
target = "7035bd33614972ac66559ac1fdd019ff6027ad22"
|
||||
scm = module_build_service.scm.SCM(repo_url + "?#" + target, "master")
|
||||
with pytest.raises(UnprocessableEntity):
|
||||
scm.checkout(self.tempdir)
|
||||
@@ -125,7 +124,7 @@ class TestSCMModule:
|
||||
def test_get_latest_incorrect_component_branch(self):
|
||||
scm = module_build_service.scm.SCM(repo_url)
|
||||
with pytest.raises(UnprocessableEntity):
|
||||
scm.get_latest('foobar')
|
||||
scm.get_latest("foobar")
|
||||
|
||||
def test_get_latest_component_branch(self):
|
||||
ref = "5481faa232d66589e660cc301179867fb00842c9"
|
||||
@@ -143,4 +142,4 @@ class TestSCMModule:
|
||||
def test_get_latest_incorrect_component_ref(self):
|
||||
scm = module_build_service.scm.SCM(repo_url)
|
||||
with pytest.raises(UnprocessableEntity):
|
||||
scm.get_latest('15481faa232d66589e660cc301179867fb00842c9')
|
||||
scm.get_latest("15481faa232d66589e660cc301179867fb00842c9")
|
||||
|
||||
@@ -30,28 +30,26 @@ from tests import make_module, clean_database
|
||||
class TestFindModuleKojiTags:
|
||||
"""Test ursine.find_module_koji_tags"""
|
||||
|
||||
@patch.object(conf, 'koji_tag_prefixes', new=['module'])
|
||||
@patch.object(conf, "koji_tag_prefixes", new=["module"])
|
||||
def test_find_out_all_module_koji_tags(self):
|
||||
session = Mock()
|
||||
session.getFullInheritance.return_value = [
|
||||
{'name': 'module-tag1-s-v-c'},
|
||||
{'name': 'module-tag2-s-v-c'},
|
||||
{'name': 'tag-1'},
|
||||
{"name": "module-tag1-s-v-c"},
|
||||
{"name": "module-tag2-s-v-c"},
|
||||
{"name": "tag-1"},
|
||||
]
|
||||
|
||||
expected_tags = ['module-tag1-s-v-c', 'module-tag2-s-v-c']
|
||||
expected_tags = ["module-tag1-s-v-c", "module-tag2-s-v-c"]
|
||||
|
||||
tags = ursine.find_module_koji_tags(session, 'tag-a-build')
|
||||
tags = ursine.find_module_koji_tags(session, "tag-a-build")
|
||||
assert expected_tags == tags
|
||||
|
||||
@patch.object(conf, 'koji_tag_prefixes', new=['module'])
|
||||
@patch.object(conf, "koji_tag_prefixes", new=["module"])
|
||||
def test_return_empty_if_no_module_koji_tags(self):
|
||||
session = Mock()
|
||||
session.getFullInheritance.return_value = [
|
||||
{'name': 'tag-1'}, {'name': 'tag-2'},
|
||||
]
|
||||
session.getFullInheritance.return_value = [{"name": "tag-1"}, {"name": "tag-2"}]
|
||||
|
||||
tags = ursine.find_module_koji_tags(session, 'tag-a-build')
|
||||
tags = ursine.find_module_koji_tags(session, "tag-a-build")
|
||||
assert [] == tags
|
||||
|
||||
|
||||
@@ -60,56 +58,68 @@ class TestFindUrsineRootTags:
|
||||
|
||||
def setup_method(self):
|
||||
self.koji_session = Mock()
|
||||
self.koji_session.getTag.side_effect = lambda name: \
|
||||
None if name == 'X-build' else {'name': name}
|
||||
self.koji_session.getTag.side_effect = \
|
||||
lambda name: None if name == "X-build" else {"name": name}
|
||||
|
||||
def test_find_build_tags(self):
|
||||
with patch.object(conf, 'koji_external_repo_url_prefix',
|
||||
new='http://example.com/brewroot/'):
|
||||
tags = ursine.find_build_tags_from_external_repos(self.koji_session, [
|
||||
{
|
||||
'external_repo_name': 'tag-1-external-repo',
|
||||
'url': 'http://example.com/brewroot/repos/tag-1-build/latest/$arch/'
|
||||
},
|
||||
{
|
||||
'external_repo_name': 'tag-2-external-repo',
|
||||
'url': 'http://example.com/brewroot/repos/tag-2-build/latest/$arch/'
|
||||
},
|
||||
])
|
||||
with patch.object(
|
||||
conf, "koji_external_repo_url_prefix", new="http://example.com/brewroot/"
|
||||
):
|
||||
tags = ursine.find_build_tags_from_external_repos(
|
||||
self.koji_session,
|
||||
[
|
||||
{
|
||||
"external_repo_name": "tag-1-external-repo",
|
||||
"url": "http://example.com/brewroot/repos/tag-1-build/latest/$arch/",
|
||||
},
|
||||
{
|
||||
"external_repo_name": "tag-2-external-repo",
|
||||
"url": "http://example.com/brewroot/repos/tag-2-build/latest/$arch/",
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
assert ['tag-1-build', 'tag-2-build'] == tags
|
||||
assert ["tag-1-build", "tag-2-build"] == tags
|
||||
|
||||
def test_return_emtpy_if_no_match_external_repo_url(self):
|
||||
with patch.object(conf, 'koji_external_repo_url_prefix',
|
||||
new='http://example.com/brewroot/'):
|
||||
tags = ursine.find_build_tags_from_external_repos(self.koji_session, [
|
||||
{
|
||||
'external_repo_name': 'tag-1-external-repo',
|
||||
'url': 'https://another-site.org/repos/tag-1-build/latest/$arch/'
|
||||
},
|
||||
{
|
||||
'external_repo_name': 'tag-2-external-repo',
|
||||
'url': 'https://another-site.org/repos/tag-2-build/latest/$arch/'
|
||||
},
|
||||
])
|
||||
with patch.object(
|
||||
conf, "koji_external_repo_url_prefix", new="http://example.com/brewroot/"
|
||||
):
|
||||
tags = ursine.find_build_tags_from_external_repos(
|
||||
self.koji_session,
|
||||
[
|
||||
{
|
||||
"external_repo_name": "tag-1-external-repo",
|
||||
"url": "https://another-site.org/repos/tag-1-build/latest/$arch/",
|
||||
},
|
||||
{
|
||||
"external_repo_name": "tag-2-external-repo",
|
||||
"url": "https://another-site.org/repos/tag-2-build/latest/$arch/",
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
assert [] == tags
|
||||
|
||||
def test_some_tag_is_not_koji_tag(self):
|
||||
with patch.object(conf, 'koji_external_repo_url_prefix',
|
||||
new='http://example.com/brewroot/'):
|
||||
tags = ursine.find_build_tags_from_external_repos(self.koji_session, [
|
||||
{
|
||||
'external_repo_name': 'tag-1-external-repo',
|
||||
'url': 'http://example.com/brewroot/repos/tag-1-build/latest/$arch/'
|
||||
},
|
||||
{
|
||||
'external_repo_name': 'tag-2-external-repo',
|
||||
'url': 'http://example.com/brewroot/repos/X-build/latest/$arch/'
|
||||
},
|
||||
])
|
||||
with patch.object(
|
||||
conf, "koji_external_repo_url_prefix", new="http://example.com/brewroot/"
|
||||
):
|
||||
tags = ursine.find_build_tags_from_external_repos(
|
||||
self.koji_session,
|
||||
[
|
||||
{
|
||||
"external_repo_name": "tag-1-external-repo",
|
||||
"url": "http://example.com/brewroot/repos/tag-1-build/latest/$arch/",
|
||||
},
|
||||
{
|
||||
"external_repo_name": "tag-2-external-repo",
|
||||
"url": "http://example.com/brewroot/repos/X-build/latest/$arch/",
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
assert ['tag-1-build'] == tags
|
||||
assert ["tag-1-build"] == tags
|
||||
|
||||
|
||||
class TestGetModulemdsFromUrsineContent:
|
||||
@@ -121,71 +131,63 @@ class TestGetModulemdsFromUrsineContent:
|
||||
def teardown_method(self, test_method):
|
||||
clean_database()
|
||||
|
||||
@patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession')
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_return_empty_if_no_ursine_build_tag_is_found(self, ClientSession):
|
||||
session = ClientSession.return_value
|
||||
|
||||
# No module koji_tag in ursine content yet. This will result in empty
|
||||
# ursine modulemds is returned.
|
||||
session.getFullInheritance.return_value = [
|
||||
{'name': 'tag-1.0-build'},
|
||||
]
|
||||
session.getExternalRepoList.return_value = [
|
||||
{
|
||||
'external_repo_name': 'tag-1.0-external-repo',
|
||||
'url': 'http://example.com/repos/tag-4-build/latest/$arch/'
|
||||
}
|
||||
]
|
||||
session.getFullInheritance.return_value = [{"name": "tag-1.0-build"}]
|
||||
session.getExternalRepoList.return_value = [{
|
||||
"external_repo_name": "tag-1.0-external-repo",
|
||||
"url": "http://example.com/repos/tag-4-build/latest/$arch/",
|
||||
}]
|
||||
|
||||
modulemds = ursine.get_modulemds_from_ursine_content('tag')
|
||||
modulemds = ursine.get_modulemds_from_ursine_content("tag")
|
||||
assert [] == modulemds
|
||||
|
||||
@patch.object(conf, 'koji_tag_prefixes', new=['module'])
|
||||
@patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession')
|
||||
@patch.object(conf, "koji_tag_prefixes", new=["module"])
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_get_modulemds(self, ClientSession):
|
||||
session = ClientSession.return_value
|
||||
|
||||
# Ensure to to get build tag for further query of ursine content.
|
||||
# For this test, the build tag is tag-4-build
|
||||
session.getExternalRepoList.return_value = [
|
||||
{
|
||||
'external_repo_name': 'tag-1.0-external-repo',
|
||||
'url': 'http://example.com/repos/tag-4-build/latest/$arch/'
|
||||
}
|
||||
]
|
||||
session.getExternalRepoList.return_value = [{
|
||||
"external_repo_name": "tag-1.0-external-repo",
|
||||
"url": "http://example.com/repos/tag-4-build/latest/$arch/",
|
||||
}]
|
||||
|
||||
# Ensure to return module tags from ursine content of fake build tag
|
||||
# specified in above external repo's url.
|
||||
def mock_getFullInheritance(tag):
|
||||
if tag == 'tag-4-build':
|
||||
if tag == "tag-4-build":
|
||||
return [
|
||||
{'name': 'tag-1.0-build'},
|
||||
{"name": "tag-1.0-build"},
|
||||
# Below two modules should be returned and whose modulemd
|
||||
# should be also queried from database.
|
||||
{'name': 'module-name1-s-2020-c'},
|
||||
{'name': 'module-name2-s-2021-c'},
|
||||
{"name": "module-name1-s-2020-c"},
|
||||
{"name": "module-name2-s-2021-c"},
|
||||
]
|
||||
raise ValueError('{} is not handled by test.'.format(tag))
|
||||
raise ValueError("{} is not handled by test.".format(tag))
|
||||
|
||||
session.getFullInheritance.side_effect = mock_getFullInheritance
|
||||
|
||||
# Defaults to DB resolver, so create fake module builds and store them
|
||||
# into database to ensure they can be queried.
|
||||
mmd_name1s2020c = make_module(
|
||||
'name1:s:2020:c',
|
||||
xmd={'mbs': {'koji_tag': 'module-name1-s-2020-c'}})
|
||||
"name1:s:2020:c", xmd={"mbs": {"koji_tag": "module-name1-s-2020-c"}})
|
||||
mmd_name2s2021c = make_module(
|
||||
'name2:s:2021:c',
|
||||
xmd={'mbs': {'koji_tag': 'module-name2-s-2021-c'}})
|
||||
"name2:s:2021:c", xmd={"mbs": {"koji_tag": "module-name2-s-2021-c"}})
|
||||
|
||||
koji_tag = 'tag' # It's ok to use arbitrary tag name.
|
||||
with patch.object(conf, 'koji_external_repo_url_prefix', new='http://example.com/'):
|
||||
koji_tag = "tag" # It's ok to use arbitrary tag name.
|
||||
with patch.object(conf, "koji_external_repo_url_prefix", new="http://example.com/"):
|
||||
modulemds = ursine.get_modulemds_from_ursine_content(koji_tag)
|
||||
|
||||
test_nsvcs = [item.dup_nsvc() for item in modulemds]
|
||||
test_nsvcs.sort()
|
||||
|
||||
expected_nsvcs = [mmd_name1s2020c.mmd().dup_nsvc(),
|
||||
mmd_name2s2021c.mmd().dup_nsvc()]
|
||||
expected_nsvcs = [mmd_name1s2020c.mmd().dup_nsvc(), mmd_name2s2021c.mmd().dup_nsvc()]
|
||||
expected_nsvcs.sort()
|
||||
|
||||
session.getExternalRepoList.assert_called_once_with(koji_tag)
|
||||
@@ -195,92 +197,85 @@ class TestGetModulemdsFromUrsineContent:
|
||||
class TestRecordStreamCollisionModules:
|
||||
"""Test ursine.record_stream_collision_modules"""
|
||||
|
||||
@patch.object(conf, 'base_module_names', new=['platform'])
|
||||
@patch.object(ursine, 'find_stream_collision_modules')
|
||||
@patch.object(conf, "base_module_names", new=["platform"])
|
||||
@patch.object(ursine, "find_stream_collision_modules")
|
||||
def test_nothing_changed_if_no_base_module_is_in_buildrequires(
|
||||
self, find_stream_collision_modules):
|
||||
xmd = {
|
||||
'mbs': {
|
||||
'buildrequires': {
|
||||
'modulea': {'stream': 'master'}
|
||||
}
|
||||
}
|
||||
}
|
||||
fake_mmd = make_module('name1:s:2020:c', xmd=xmd, store_to_db=False)
|
||||
self, find_stream_collision_modules
|
||||
):
|
||||
xmd = {"mbs": {"buildrequires": {"modulea": {"stream": "master"}}}}
|
||||
fake_mmd = make_module("name1:s:2020:c", xmd=xmd, store_to_db=False)
|
||||
original_xmd = glib.from_variant_dict(fake_mmd.get_xmd())
|
||||
|
||||
with patch.object(ursine, 'log') as log:
|
||||
with patch.object(ursine, "log") as log:
|
||||
ursine.handle_stream_collision_modules(fake_mmd)
|
||||
assert 2 == log.info.call_count
|
||||
find_stream_collision_modules.assert_not_called()
|
||||
|
||||
assert original_xmd == glib.from_variant_dict(fake_mmd.get_xmd())
|
||||
|
||||
@patch.object(conf, 'base_module_names', new=['platform'])
|
||||
@patch('module_build_service.utils.ursine.get_modulemds_from_ursine_content')
|
||||
@patch.object(conf, "base_module_names", new=["platform"])
|
||||
@patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")
|
||||
def test_mark_handled_even_if_no_modules_in_ursine_content(
|
||||
self, get_modulemds_from_ursine_content):
|
||||
self, get_modulemds_from_ursine_content
|
||||
):
|
||||
xmd = {
|
||||
'mbs': {
|
||||
'buildrequires': {
|
||||
'modulea': {'stream': 'master'},
|
||||
'platform': {'stream': 'master', 'koji_tag': 'module-rhel-8.0-build'},
|
||||
"mbs": {
|
||||
"buildrequires": {
|
||||
"modulea": {"stream": "master"},
|
||||
"platform": {"stream": "master", "koji_tag": "module-rhel-8.0-build"},
|
||||
}
|
||||
}
|
||||
}
|
||||
fake_mmd = make_module('name1:s:2020:c', xmd=xmd, store_to_db=False)
|
||||
fake_mmd = make_module("name1:s:2020:c", xmd=xmd, store_to_db=False)
|
||||
original_xmd = glib.from_variant_dict(fake_mmd.get_xmd())
|
||||
|
||||
get_modulemds_from_ursine_content.return_value = []
|
||||
|
||||
with patch.object(ursine, 'log') as log:
|
||||
with patch.object(ursine, "log") as log:
|
||||
ursine.handle_stream_collision_modules(fake_mmd)
|
||||
assert 2 == log.info.call_count
|
||||
|
||||
expected_xmd = copy.deepcopy(original_xmd)
|
||||
# Ensure stream_collision_modules is set.
|
||||
expected_xmd['mbs']['buildrequires']['platform']['stream_collision_modules'] = ''
|
||||
expected_xmd['mbs']['buildrequires']['platform']['ursine_rpms'] = ''
|
||||
expected_xmd["mbs"]["buildrequires"]["platform"]["stream_collision_modules"] = ""
|
||||
expected_xmd["mbs"]["buildrequires"]["platform"]["ursine_rpms"] = ""
|
||||
assert expected_xmd == glib.from_variant_dict(fake_mmd.get_xmd())
|
||||
|
||||
@patch.object(conf, 'base_module_names', new=['platform', 'project-platform'])
|
||||
@patch('module_build_service.utils.ursine.get_modulemds_from_ursine_content')
|
||||
@patch('module_build_service.resolver.GenericResolver.create')
|
||||
@patch('module_build_service.builder.KojiModuleBuilder.KojiClientSession')
|
||||
def test_add_collision_modules(self, ClientSession, resolver_create,
|
||||
get_modulemds_from_ursine_content):
|
||||
@patch.object(conf, "base_module_names", new=["platform", "project-platform"])
|
||||
@patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")
|
||||
@patch("module_build_service.resolver.GenericResolver.create")
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
|
||||
def test_add_collision_modules(
|
||||
self, ClientSession, resolver_create, get_modulemds_from_ursine_content
|
||||
):
|
||||
xmd = {
|
||||
'mbs': {
|
||||
'buildrequires': {
|
||||
'modulea': {'stream': 'master'},
|
||||
'foo': {'stream': '1'},
|
||||
'bar': {'stream': '2'},
|
||||
'platform': {'stream': 'master', 'koji_tag': 'module-rhel-8.0-build'},
|
||||
'project-platform': {
|
||||
'stream': 'master', 'koji_tag': 'module-project-1.0-build'
|
||||
"mbs": {
|
||||
"buildrequires": {
|
||||
"modulea": {"stream": "master"},
|
||||
"foo": {"stream": "1"},
|
||||
"bar": {"stream": "2"},
|
||||
"platform": {"stream": "master", "koji_tag": "module-rhel-8.0-build"},
|
||||
"project-platform": {
|
||||
"stream": "master",
|
||||
"koji_tag": "module-project-1.0-build",
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
fake_mmd = make_module('name1:s:2020:c',
|
||||
xmd=xmd, store_to_db=False)
|
||||
fake_mmd = make_module("name1:s:2020:c", xmd=xmd, store_to_db=False)
|
||||
|
||||
def mock_get_ursine_modulemds(koji_tag):
|
||||
if koji_tag == 'module-rhel-8.0-build':
|
||||
if koji_tag == "module-rhel-8.0-build":
|
||||
return [
|
||||
# This is the one
|
||||
make_module('modulea:10:20180813041838:5ea3b708',
|
||||
store_to_db=False),
|
||||
make_module('moduleb:1.0:20180113042038:6ea3b105',
|
||||
store_to_db=False),
|
||||
make_module("modulea:10:20180813041838:5ea3b708", store_to_db=False),
|
||||
make_module("moduleb:1.0:20180113042038:6ea3b105", store_to_db=False),
|
||||
]
|
||||
if koji_tag == 'module-project-1.0-build':
|
||||
if koji_tag == "module-project-1.0-build":
|
||||
return [
|
||||
# Both of them are the collided modules
|
||||
make_module('bar:6:20181013041838:817fa3a8',
|
||||
store_to_db=False),
|
||||
make_module('foo:2:20180113041838:95f078a1',
|
||||
store_to_db=False),
|
||||
make_module("bar:6:20181013041838:817fa3a8", store_to_db=False),
|
||||
make_module("foo:2:20180113041838:95f078a1", store_to_db=False),
|
||||
]
|
||||
|
||||
get_modulemds_from_ursine_content.side_effect = mock_get_ursine_modulemds
|
||||
@@ -288,34 +283,31 @@ class TestRecordStreamCollisionModules:
|
||||
# Mock for finding out built rpms
|
||||
def mock_get_module(name, stream, version, context, strict=True):
|
||||
return {
|
||||
'modulea:10:20180813041838:5ea3b708': {
|
||||
'koji_tag': 'module-modulea-10-20180813041838-5ea3b708',
|
||||
"modulea:10:20180813041838:5ea3b708": {
|
||||
"koji_tag": "module-modulea-10-20180813041838-5ea3b708"
|
||||
},
|
||||
'bar:6:20181013041838:817fa3a8': {
|
||||
'koji_tag': 'module-bar-6-20181013041838-817fa3a8',
|
||||
"bar:6:20181013041838:817fa3a8": {
|
||||
"koji_tag": "module-bar-6-20181013041838-817fa3a8"
|
||||
},
|
||||
'foo:2:20180113041838:95f078a1': {
|
||||
'koji_tag': 'module-foo-2-20180113041838-95f078a1',
|
||||
"foo:2:20180113041838:95f078a1": {
|
||||
"koji_tag": "module-foo-2-20180113041838-95f078a1"
|
||||
},
|
||||
}['{}:{}:{}:{}'.format(name, stream, version, context)]
|
||||
}["{}:{}:{}:{}".format(name, stream, version, context)]
|
||||
|
||||
resolver = resolver_create.return_value
|
||||
resolver._get_module.side_effect = mock_get_module
|
||||
|
||||
def mock_listTaggedRPMS(tag, latest):
|
||||
return {
|
||||
'module-modulea-10-20180813041838-5ea3b708': [[
|
||||
{'name': 'pkg1', 'version': '1.0', 'release': '1.fc28',
|
||||
'epoch': None},
|
||||
]],
|
||||
'module-bar-6-20181013041838-817fa3a8': [[
|
||||
{'name': 'pkg2', 'version': '2.0', 'release': '1.fc28',
|
||||
'epoch': None},
|
||||
]],
|
||||
'module-foo-2-20180113041838-95f078a1': [[
|
||||
{'name': 'pkg3', 'version': '3.0', 'release': '1.fc28',
|
||||
'epoch': None},
|
||||
]],
|
||||
"module-modulea-10-20180813041838-5ea3b708": [
|
||||
[{"name": "pkg1", "version": "1.0", "release": "1.fc28", "epoch": None}]
|
||||
],
|
||||
"module-bar-6-20181013041838-817fa3a8": [
|
||||
[{"name": "pkg2", "version": "2.0", "release": "1.fc28", "epoch": None}]
|
||||
],
|
||||
"module-foo-2-20180113041838-95f078a1": [
|
||||
[{"name": "pkg3", "version": "3.0", "release": "1.fc28", "epoch": None}]
|
||||
],
|
||||
}[tag]
|
||||
|
||||
koji_session = ClientSession.return_value
|
||||
@@ -324,59 +316,47 @@ class TestRecordStreamCollisionModules:
|
||||
ursine.handle_stream_collision_modules(fake_mmd)
|
||||
|
||||
xmd = glib.from_variant_dict(fake_mmd.get_xmd())
|
||||
buildrequires = xmd['mbs']['buildrequires']
|
||||
buildrequires = xmd["mbs"]["buildrequires"]
|
||||
|
||||
assert (['modulea:10:20180813041838:5ea3b708'] ==
|
||||
buildrequires['platform']['stream_collision_modules'])
|
||||
assert (['pkg1-0:1.0-1.fc28'] ==
|
||||
buildrequires['platform']['ursine_rpms'])
|
||||
modules = buildrequires["platform"]["stream_collision_modules"]
|
||||
assert ["modulea:10:20180813041838:5ea3b708"] == modules
|
||||
assert ["pkg1-0:1.0-1.fc28"] == buildrequires["platform"]["ursine_rpms"]
|
||||
|
||||
modules = sorted(
|
||||
buildrequires['project-platform']['stream_collision_modules'])
|
||||
expected_modules = ['bar:6:20181013041838:817fa3a8',
|
||||
'foo:2:20180113041838:95f078a1']
|
||||
modules = sorted(buildrequires["project-platform"]["stream_collision_modules"])
|
||||
expected_modules = ["bar:6:20181013041838:817fa3a8", "foo:2:20180113041838:95f078a1"]
|
||||
assert expected_modules == modules
|
||||
|
||||
assert (['pkg2-0:2.0-1.fc28', 'pkg3-0:3.0-1.fc28'] ==
|
||||
sorted(buildrequires['project-platform']['ursine_rpms']))
|
||||
rpms = sorted(buildrequires["project-platform"]["ursine_rpms"])
|
||||
assert ["pkg2-0:2.0-1.fc28", "pkg3-0:3.0-1.fc28"] == rpms
|
||||
|
||||
|
||||
class TestFindStreamCollisionModules:
|
||||
"""Test ursine.find_stream_collision_modules"""
|
||||
|
||||
@patch('module_build_service.utils.ursine.get_modulemds_from_ursine_content')
|
||||
def test_no_modulemds_found_from_ursine_content(
|
||||
self, get_modulemds_from_ursine_content):
|
||||
@patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")
|
||||
def test_no_modulemds_found_from_ursine_content(self, get_modulemds_from_ursine_content):
|
||||
get_modulemds_from_ursine_content.return_value = []
|
||||
assert not ursine.find_stream_collision_modules({}, 'koji_tag')
|
||||
assert not ursine.find_stream_collision_modules({}, "koji_tag")
|
||||
|
||||
@patch('module_build_service.utils.ursine.get_modulemds_from_ursine_content')
|
||||
@patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")
|
||||
def test_no_collisions_found(self, get_modulemds_from_ursine_content):
|
||||
xmd_mbs_buildrequires = {
|
||||
'modulea': {'stream': 'master'},
|
||||
'moduleb': {'stream': '10'},
|
||||
}
|
||||
xmd_mbs_buildrequires = {"modulea": {"stream": "master"}, "moduleb": {"stream": "10"}}
|
||||
get_modulemds_from_ursine_content.return_value = [
|
||||
make_module('moduler:1:1:c1', store_to_db=False),
|
||||
make_module('modules:2:1:c2', store_to_db=False),
|
||||
make_module('modulet:3:1:c3', store_to_db=False),
|
||||
make_module("moduler:1:1:c1", store_to_db=False),
|
||||
make_module("modules:2:1:c2", store_to_db=False),
|
||||
make_module("modulet:3:1:c3", store_to_db=False),
|
||||
]
|
||||
assert [] == ursine.find_stream_collision_modules(
|
||||
xmd_mbs_buildrequires, 'koji_tag')
|
||||
assert [] == ursine.find_stream_collision_modules(xmd_mbs_buildrequires, "koji_tag")
|
||||
|
||||
@patch('module_build_service.utils.ursine.get_modulemds_from_ursine_content')
|
||||
@patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")
|
||||
def test_collision_modules_are_found(self, get_modulemds_from_ursine_content):
|
||||
xmd_mbs_buildrequires = {
|
||||
'modulea': {'stream': 'master'},
|
||||
'moduleb': {'stream': '10'},
|
||||
}
|
||||
xmd_mbs_buildrequires = {"modulea": {"stream": "master"}, "moduleb": {"stream": "10"}}
|
||||
fake_modules = [
|
||||
make_module('moduler:1:1:c1', store_to_db=False),
|
||||
make_module('moduleb:6:1:c2', store_to_db=False),
|
||||
make_module('modulet:3:1:c3', store_to_db=False),
|
||||
make_module("moduler:1:1:c1", store_to_db=False),
|
||||
make_module("moduleb:6:1:c2", store_to_db=False),
|
||||
make_module("modulet:3:1:c3", store_to_db=False),
|
||||
]
|
||||
get_modulemds_from_ursine_content.return_value = fake_modules
|
||||
|
||||
assert [fake_modules[1].dup_nsvc()] == \
|
||||
ursine.find_stream_collision_modules(
|
||||
xmd_mbs_buildrequires, 'koji_tag')
|
||||
modules = ursine.find_stream_collision_modules(xmd_mbs_buildrequires, "koji_tag")
|
||||
assert [fake_modules[1].dup_nsvc()] == modules
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -29,7 +29,6 @@ from tests import db, clean_database, make_module, init_data, base_dir
|
||||
|
||||
|
||||
class TestUtilsModuleStreamExpansion:
|
||||
|
||||
def setup_method(self, test_method):
|
||||
clean_database(False)
|
||||
|
||||
@@ -45,8 +44,10 @@ class TestUtilsModuleStreamExpansion:
|
||||
mmd = module_build.mmd()
|
||||
module_build_service.utils.expand_mse_streams(db.session, mmd)
|
||||
modules = module_build_service.utils.get_mmds_required_by_module_recursively(mmd)
|
||||
nsvcs = [":".join([m.get_name(), m.get_stream(), str(m.get_version()), m.get_context()])
|
||||
for m in modules]
|
||||
nsvcs = [
|
||||
":".join([m.get_name(), m.get_stream(), str(m.get_version()), m.get_context()])
|
||||
for m in modules
|
||||
]
|
||||
return nsvcs
|
||||
|
||||
def _generate_default_modules(self):
|
||||
@@ -70,99 +71,96 @@ class TestUtilsModuleStreamExpansion:
|
||||
self._generate_default_modules()
|
||||
module_build = make_module(
|
||||
"app:1:0:c1", {"gtk": ["1", "2"]}, {"platform": ["f28"], "gtk": ["1", "2"]})
|
||||
mmds = module_build_service.utils.generate_expanded_mmds(
|
||||
db.session, module_build.mmd())
|
||||
mmds = module_build_service.utils.generate_expanded_mmds(db.session, module_build.mmd())
|
||||
contexts = set([mmd.get_context() for mmd in mmds])
|
||||
assert set(['e1e005fb', 'ce132a1e']) == contexts
|
||||
assert set(["e1e005fb", "ce132a1e"]) == contexts
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'requires,build_requires,stream_ambigous,expected_xmd,expected_buildrequires', [
|
||||
({"gtk": ["1", "2"]},
|
||||
{"platform": ["f28"], "gtk": ["1", "2"]}, True,
|
||||
set([
|
||||
frozenset(['platform:f28:0:c10', 'gtk:2:0:c4']),
|
||||
frozenset(['platform:f28:0:c10', 'gtk:1:0:c2'])
|
||||
]),
|
||||
set([
|
||||
frozenset(['gtk:1', 'platform:f28']),
|
||||
frozenset(['gtk:2', 'platform:f28']),
|
||||
])),
|
||||
|
||||
({"foo": ["1"]},
|
||||
{"platform": ["f28"], "foo": ["1"], "gtk": ["1", "2"]}, True,
|
||||
set([
|
||||
frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10']),
|
||||
frozenset(['foo:1:0:c2', 'gtk:2:0:c4', 'platform:f28:0:c10'])
|
||||
]),
|
||||
set([
|
||||
frozenset(['foo:1', 'gtk:1', 'platform:f28']),
|
||||
frozenset(['foo:1', 'gtk:2', 'platform:f28'])
|
||||
])),
|
||||
|
||||
({"gtk": ["1"], "foo": ["1"]},
|
||||
{"platform": ["f28"], "gtk": ["1"], "foo": ["1"]}, False,
|
||||
set([
|
||||
frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10'])
|
||||
]),
|
||||
set([
|
||||
frozenset(['foo:1', 'gtk:1', 'platform:f28'])
|
||||
])),
|
||||
|
||||
({"gtk": ["1"], "foo": ["1"]},
|
||||
{"gtk": ["1"], "foo": ["1"], "platform": ["f28"]}, False,
|
||||
set([
|
||||
frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10'])
|
||||
]),
|
||||
set([
|
||||
frozenset(['foo:1', 'gtk:1', 'platform:f28'])
|
||||
])),
|
||||
|
||||
({"gtk": ["-2"], "foo": ["-2"]},
|
||||
{"platform": ["f28"], "gtk": ["-2"], "foo": ["-2"]}, True,
|
||||
set([
|
||||
frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10'])
|
||||
]),
|
||||
set([
|
||||
frozenset(['foo:1', 'gtk:1', 'platform:f28'])
|
||||
])),
|
||||
|
||||
({"gtk": ["1"], "foo": ["1"]},
|
||||
{"platform": ["f28"], "gtk": ["-1", "1"], "foo": ["-2", "1"]}, False,
|
||||
set([
|
||||
frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10'])
|
||||
]),
|
||||
set([
|
||||
frozenset(['foo:1', 'gtk:1', 'platform:f28'])
|
||||
])),
|
||||
|
||||
({"gtk": ["1"], "foo": ["1"]},
|
||||
{"platform": ["f28"], "gtk": ["1"]}, False,
|
||||
set([
|
||||
frozenset(['gtk:1:0:c2', 'platform:f28:0:c10'])
|
||||
]),
|
||||
set([
|
||||
frozenset(['gtk:1', 'platform:f28'])
|
||||
])),
|
||||
|
||||
({"gtk": []}, {"platform": ["f28"], "gtk": ["1"]}, True,
|
||||
set([
|
||||
frozenset(['gtk:1:0:c2', 'platform:f28:0:c10'])
|
||||
]),
|
||||
set([
|
||||
frozenset(['gtk:1', 'platform:f28'])
|
||||
])),
|
||||
|
||||
({}, {"platform": ["f29"], "app": ["1"]}, False,
|
||||
set([
|
||||
frozenset(['app:1:0:c6', 'platform:f29:0:c11'])
|
||||
]),
|
||||
set([
|
||||
frozenset(['app:1', 'platform:f29'])
|
||||
])),
|
||||
])
|
||||
"requires,build_requires,stream_ambigous,expected_xmd,expected_buildrequires",
|
||||
[
|
||||
(
|
||||
{"gtk": ["1", "2"]},
|
||||
{"platform": ["f28"], "gtk": ["1", "2"]},
|
||||
True,
|
||||
set(
|
||||
[
|
||||
frozenset(["platform:f28:0:c10", "gtk:2:0:c4"]),
|
||||
frozenset(["platform:f28:0:c10", "gtk:1:0:c2"]),
|
||||
]
|
||||
),
|
||||
set([frozenset(["gtk:1", "platform:f28"]), frozenset(["gtk:2", "platform:f28"])]),
|
||||
),
|
||||
(
|
||||
{"foo": ["1"]},
|
||||
{"platform": ["f28"], "foo": ["1"], "gtk": ["1", "2"]},
|
||||
True,
|
||||
set(
|
||||
[
|
||||
frozenset(["foo:1:0:c2", "gtk:1:0:c2", "platform:f28:0:c10"]),
|
||||
frozenset(["foo:1:0:c2", "gtk:2:0:c4", "platform:f28:0:c10"]),
|
||||
]
|
||||
),
|
||||
set(
|
||||
[
|
||||
frozenset(["foo:1", "gtk:1", "platform:f28"]),
|
||||
frozenset(["foo:1", "gtk:2", "platform:f28"]),
|
||||
]
|
||||
),
|
||||
),
|
||||
(
|
||||
{"gtk": ["1"], "foo": ["1"]},
|
||||
{"platform": ["f28"], "gtk": ["1"], "foo": ["1"]},
|
||||
False,
|
||||
set([frozenset(["foo:1:0:c2", "gtk:1:0:c2", "platform:f28:0:c10"])]),
|
||||
set([frozenset(["foo:1", "gtk:1", "platform:f28"])]),
|
||||
),
|
||||
(
|
||||
{"gtk": ["1"], "foo": ["1"]},
|
||||
{"gtk": ["1"], "foo": ["1"], "platform": ["f28"]},
|
||||
False,
|
||||
set([frozenset(["foo:1:0:c2", "gtk:1:0:c2", "platform:f28:0:c10"])]),
|
||||
set([frozenset(["foo:1", "gtk:1", "platform:f28"])]),
|
||||
),
|
||||
(
|
||||
{"gtk": ["-2"], "foo": ["-2"]},
|
||||
{"platform": ["f28"], "gtk": ["-2"], "foo": ["-2"]},
|
||||
True,
|
||||
set([frozenset(["foo:1:0:c2", "gtk:1:0:c2", "platform:f28:0:c10"])]),
|
||||
set([frozenset(["foo:1", "gtk:1", "platform:f28"])]),
|
||||
),
|
||||
(
|
||||
{"gtk": ["1"], "foo": ["1"]},
|
||||
{"platform": ["f28"], "gtk": ["-1", "1"], "foo": ["-2", "1"]},
|
||||
False,
|
||||
set([frozenset(["foo:1:0:c2", "gtk:1:0:c2", "platform:f28:0:c10"])]),
|
||||
set([frozenset(["foo:1", "gtk:1", "platform:f28"])]),
|
||||
),
|
||||
(
|
||||
{"gtk": ["1"], "foo": ["1"]},
|
||||
{"platform": ["f28"], "gtk": ["1"]},
|
||||
False,
|
||||
set([frozenset(["gtk:1:0:c2", "platform:f28:0:c10"])]),
|
||||
set([frozenset(["gtk:1", "platform:f28"])]),
|
||||
),
|
||||
(
|
||||
{"gtk": []},
|
||||
{"platform": ["f28"], "gtk": ["1"]},
|
||||
True,
|
||||
set([frozenset(["gtk:1:0:c2", "platform:f28:0:c10"])]),
|
||||
set([frozenset(["gtk:1", "platform:f28"])]),
|
||||
),
|
||||
(
|
||||
{},
|
||||
{"platform": ["f29"], "app": ["1"]},
|
||||
False,
|
||||
set([frozenset(["app:1:0:c6", "platform:f29:0:c11"])]),
|
||||
set([frozenset(["app:1", "platform:f29"])]),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_generate_expanded_mmds_buildrequires(
|
||||
self, requires, build_requires, stream_ambigous, expected_xmd,
|
||||
expected_buildrequires):
|
||||
self, requires, build_requires, stream_ambigous, expected_xmd, expected_buildrequires
|
||||
):
|
||||
self._generate_default_modules()
|
||||
module_build = make_module("app:1:0:c1", requires, build_requires)
|
||||
|
||||
@@ -184,20 +182,22 @@ class TestUtilsModuleStreamExpansion:
|
||||
name, stream = ns.split(":")
|
||||
default_streams[name] = stream
|
||||
module_build_service.utils.generate_expanded_mmds(
|
||||
db.session, module_build.mmd(), raise_if_stream_ambigous=True,
|
||||
default_streams=default_streams)
|
||||
db.session,
|
||||
module_build.mmd(),
|
||||
raise_if_stream_ambigous=True,
|
||||
default_streams=default_streams,
|
||||
)
|
||||
|
||||
mmds = module_build_service.utils.generate_expanded_mmds(
|
||||
db.session, module_build.mmd())
|
||||
mmds = module_build_service.utils.generate_expanded_mmds(db.session, module_build.mmd())
|
||||
|
||||
buildrequires_per_mmd_xmd = set()
|
||||
buildrequires_per_mmd_buildrequires = set()
|
||||
for mmd in mmds:
|
||||
xmd = glib.from_variant_dict(mmd.get_xmd())
|
||||
br_nsvcs = []
|
||||
for name, detail in xmd['mbs']['buildrequires'].items():
|
||||
br_nsvcs.append(":".join([
|
||||
name, detail["stream"], detail["version"], detail["context"]]))
|
||||
for name, detail in xmd["mbs"]["buildrequires"].items():
|
||||
br_nsvcs.append(
|
||||
":".join([name, detail["stream"], detail["version"], detail["context"]]))
|
||||
buildrequires_per_mmd_xmd.add(frozenset(br_nsvcs))
|
||||
|
||||
assert len(mmd.get_dependencies()) == 1
|
||||
@@ -212,47 +212,45 @@ class TestUtilsModuleStreamExpansion:
|
||||
assert buildrequires_per_mmd_xmd == expected_xmd
|
||||
assert buildrequires_per_mmd_buildrequires == expected_buildrequires
|
||||
|
||||
@pytest.mark.parametrize('requires,build_requires,expected', [
|
||||
({"gtk": ["1", "2"]}, {"platform": [], "gtk": ["1", "2"]},
|
||||
set([
|
||||
frozenset(['gtk:1']),
|
||||
frozenset(['gtk:2']),
|
||||
])),
|
||||
|
||||
({"gtk": ["1", "2"]}, {"platform": [], "gtk": ["1"]},
|
||||
set([
|
||||
frozenset(['gtk:1', 'gtk:2']),
|
||||
])),
|
||||
|
||||
({"gtk": ["1"], "foo": ["1"]},
|
||||
{"platform": [], "gtk": ["1"], "foo": ["1"]},
|
||||
set([
|
||||
frozenset(['foo:1', 'gtk:1']),
|
||||
])),
|
||||
|
||||
({"gtk": ["-2"], "foo": ["-2"]},
|
||||
{"platform": [], "gtk": ["-2"], "foo": ["-2"]},
|
||||
set([
|
||||
frozenset(['foo:1', 'gtk:1']),
|
||||
])),
|
||||
|
||||
({"gtk": ["-1", "1"], "foo": ["-2", "1"]},
|
||||
{"platform": [], "gtk": ["-1", "1"], "foo": ["-2", "1"]},
|
||||
set([
|
||||
frozenset(['foo:1', 'gtk:1']),
|
||||
])),
|
||||
|
||||
({"gtk": [], "foo": []}, {"platform": [], "gtk": ["1"], "foo": ["1"]},
|
||||
set([
|
||||
frozenset([]),
|
||||
])),
|
||||
|
||||
])
|
||||
@pytest.mark.parametrize(
|
||||
"requires,build_requires,expected",
|
||||
[
|
||||
(
|
||||
{"gtk": ["1", "2"]},
|
||||
{"platform": [], "gtk": ["1", "2"]},
|
||||
set([frozenset(["gtk:1"]), frozenset(["gtk:2"])]),
|
||||
),
|
||||
(
|
||||
{"gtk": ["1", "2"]},
|
||||
{"platform": [], "gtk": ["1"]},
|
||||
set([frozenset(["gtk:1", "gtk:2"])]),
|
||||
),
|
||||
(
|
||||
{"gtk": ["1"], "foo": ["1"]},
|
||||
{"platform": [], "gtk": ["1"], "foo": ["1"]},
|
||||
set([frozenset(["foo:1", "gtk:1"])]),
|
||||
),
|
||||
(
|
||||
{"gtk": ["-2"], "foo": ["-2"]},
|
||||
{"platform": [], "gtk": ["-2"], "foo": ["-2"]},
|
||||
set([frozenset(["foo:1", "gtk:1"])]),
|
||||
),
|
||||
(
|
||||
{"gtk": ["-1", "1"], "foo": ["-2", "1"]},
|
||||
{"platform": [], "gtk": ["-1", "1"], "foo": ["-2", "1"]},
|
||||
set([frozenset(["foo:1", "gtk:1"])]),
|
||||
),
|
||||
(
|
||||
{"gtk": [], "foo": []},
|
||||
{"platform": [], "gtk": ["1"], "foo": ["1"]},
|
||||
set([frozenset([])]),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_generate_expanded_mmds_requires(self, requires, build_requires, expected):
|
||||
self._generate_default_modules()
|
||||
module_build = make_module("app:1:0:c1", requires, build_requires)
|
||||
mmds = module_build_service.utils.generate_expanded_mmds(
|
||||
db.session, module_build.mmd())
|
||||
mmds = module_build_service.utils.generate_expanded_mmds(db.session, module_build.mmd())
|
||||
|
||||
requires_per_mmd = set()
|
||||
for mmd in mmds:
|
||||
@@ -266,33 +264,83 @@ class TestUtilsModuleStreamExpansion:
|
||||
|
||||
assert requires_per_mmd == expected
|
||||
|
||||
@pytest.mark.parametrize('requires,build_requires,expected', [
|
||||
({}, {"platform": [], "gtk": ["1", "2"]},
|
||||
['platform:f29:0:c11', 'gtk:2:0:c4', 'gtk:2:0:c5',
|
||||
'platform:f28:0:c10', 'gtk:1:0:c2', 'gtk:1:0:c3']),
|
||||
|
||||
({}, {"platform": [], "gtk": ["1"], "foo": ["1"]},
|
||||
['platform:f28:0:c10', 'gtk:1:0:c2', 'gtk:1:0:c3',
|
||||
'foo:1:0:c2', 'foo:1:0:c3', 'platform:f29:0:c11']),
|
||||
|
||||
({}, {"gtk": ["1"], "foo": ["1"], "platform": ["f28"]},
|
||||
['platform:f28:0:c10', 'gtk:1:0:c2',
|
||||
'foo:1:0:c2']),
|
||||
|
||||
([{}, {}], [{"platform": [], "gtk": ["1"], "foo": ["1"]},
|
||||
{"platform": [], "gtk": ["2"], "foo": ["2"]}],
|
||||
['foo:1:0:c2', 'foo:1:0:c3', 'foo:2:0:c4', 'foo:2:0:c5',
|
||||
'platform:f28:0:c10', 'platform:f29:0:c11', 'gtk:1:0:c2',
|
||||
'gtk:1:0:c3', 'gtk:2:0:c4', 'gtk:2:0:c5']),
|
||||
|
||||
({}, {"platform": [], "gtk": ["-2"], "foo": ["-2"]},
|
||||
['foo:1:0:c2', 'foo:1:0:c3', 'platform:f29:0:c11',
|
||||
'platform:f28:0:c10', 'gtk:1:0:c2', 'gtk:1:0:c3']),
|
||||
|
||||
({}, {"platform": [], "gtk": ["-1", "1"], "foo": ["-2", "1"]},
|
||||
['foo:1:0:c2', 'foo:1:0:c3', 'platform:f29:0:c11',
|
||||
'platform:f28:0:c10', 'gtk:1:0:c2', 'gtk:1:0:c3']),
|
||||
])
|
||||
@pytest.mark.parametrize(
|
||||
"requires,build_requires,expected",
|
||||
[
|
||||
(
|
||||
{},
|
||||
{"platform": [], "gtk": ["1", "2"]},
|
||||
[
|
||||
"platform:f29:0:c11",
|
||||
"gtk:2:0:c4",
|
||||
"gtk:2:0:c5",
|
||||
"platform:f28:0:c10",
|
||||
"gtk:1:0:c2",
|
||||
"gtk:1:0:c3",
|
||||
],
|
||||
),
|
||||
(
|
||||
{},
|
||||
{"platform": [], "gtk": ["1"], "foo": ["1"]},
|
||||
[
|
||||
"platform:f28:0:c10",
|
||||
"gtk:1:0:c2",
|
||||
"gtk:1:0:c3",
|
||||
"foo:1:0:c2",
|
||||
"foo:1:0:c3",
|
||||
"platform:f29:0:c11",
|
||||
],
|
||||
),
|
||||
(
|
||||
{},
|
||||
{"gtk": ["1"], "foo": ["1"], "platform": ["f28"]},
|
||||
["platform:f28:0:c10", "gtk:1:0:c2", "foo:1:0:c2"],
|
||||
),
|
||||
(
|
||||
[{}, {}],
|
||||
[
|
||||
{"platform": [], "gtk": ["1"], "foo": ["1"]},
|
||||
{"platform": [], "gtk": ["2"], "foo": ["2"]},
|
||||
],
|
||||
[
|
||||
"foo:1:0:c2",
|
||||
"foo:1:0:c3",
|
||||
"foo:2:0:c4",
|
||||
"foo:2:0:c5",
|
||||
"platform:f28:0:c10",
|
||||
"platform:f29:0:c11",
|
||||
"gtk:1:0:c2",
|
||||
"gtk:1:0:c3",
|
||||
"gtk:2:0:c4",
|
||||
"gtk:2:0:c5",
|
||||
],
|
||||
),
|
||||
(
|
||||
{},
|
||||
{"platform": [], "gtk": ["-2"], "foo": ["-2"]},
|
||||
[
|
||||
"foo:1:0:c2",
|
||||
"foo:1:0:c3",
|
||||
"platform:f29:0:c11",
|
||||
"platform:f28:0:c10",
|
||||
"gtk:1:0:c2",
|
||||
"gtk:1:0:c3",
|
||||
],
|
||||
),
|
||||
(
|
||||
{},
|
||||
{"platform": [], "gtk": ["-1", "1"], "foo": ["-2", "1"]},
|
||||
[
|
||||
"foo:1:0:c2",
|
||||
"foo:1:0:c3",
|
||||
"platform:f29:0:c11",
|
||||
"platform:f28:0:c10",
|
||||
"gtk:1:0:c2",
|
||||
"gtk:1:0:c3",
|
||||
],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_get_required_modules_simple(self, requires, build_requires, expected):
|
||||
module_build = make_module("app:1:0:c1", requires, build_requires)
|
||||
self._generate_default_modules()
|
||||
@@ -316,15 +364,28 @@ class TestUtilsModuleStreamExpansion:
|
||||
make_module("lorem:1:1:c2", {"base": ["f29"]}, {}, base_module)
|
||||
make_module("base:f29:0:c3", {"platform": ["f29"]}, {}, base_module)
|
||||
|
||||
@pytest.mark.parametrize('requires,build_requires,expected', [
|
||||
({}, {"platform": [], "gtk": ["1"]},
|
||||
['foo:1:1:c2', 'base:f29:0:c3', 'platform:f29:0:c11',
|
||||
'bar:1:1:c2', 'gtk:1:1:c2', 'lorem:1:1:c2']),
|
||||
|
||||
({}, {"platform": [], "foo": ["1"]},
|
||||
['foo:1:1:c2', 'base:f29:0:c3', 'platform:f29:0:c11',
|
||||
'bar:1:1:c2', 'lorem:1:1:c2']),
|
||||
])
|
||||
@pytest.mark.parametrize(
|
||||
"requires,build_requires,expected",
|
||||
[
|
||||
(
|
||||
{},
|
||||
{"platform": [], "gtk": ["1"]},
|
||||
[
|
||||
"foo:1:1:c2",
|
||||
"base:f29:0:c3",
|
||||
"platform:f29:0:c11",
|
||||
"bar:1:1:c2",
|
||||
"gtk:1:1:c2",
|
||||
"lorem:1:1:c2",
|
||||
],
|
||||
),
|
||||
(
|
||||
{},
|
||||
{"platform": [], "foo": ["1"]},
|
||||
["foo:1:1:c2", "base:f29:0:c3", "platform:f29:0:c11", "bar:1:1:c2", "lorem:1:1:c2"],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_get_required_modules_recursion(self, requires, build_requires, expected):
|
||||
module_build = make_module("app:1:0:c1", requires, build_requires)
|
||||
self._generate_default_modules_recursion()
|
||||
@@ -345,10 +406,16 @@ class TestUtilsModuleStreamExpansion:
|
||||
make_module("gtk:1:2:c2", {"platform": ["f29"]}, {}, f290100)
|
||||
make_module("gtk:1:3:c2", {"platform": ["f29"]}, {}, f290200)
|
||||
|
||||
@pytest.mark.parametrize('requires,build_requires,expected', [
|
||||
({}, {"platform": ["f29.1.0"], "gtk": ["1"]},
|
||||
['platform:f29.0.0:0:c11', 'gtk:1:0:c2', 'gtk:1:2:c2', 'platform:f29.1.0:0:c11']),
|
||||
])
|
||||
@pytest.mark.parametrize(
|
||||
"requires,build_requires,expected",
|
||||
[
|
||||
(
|
||||
{},
|
||||
{"platform": ["f29.1.0"], "gtk": ["1"]},
|
||||
["platform:f29.0.0:0:c11", "gtk:1:0:c2", "gtk:1:2:c2", "platform:f29.1.0:0:c11"],
|
||||
)
|
||||
],
|
||||
)
|
||||
def test_get_required_modules_stream_versions(self, requires, build_requires, expected):
|
||||
module_build = make_module("app:1:0:c1", requires, build_requires)
|
||||
self._generate_default_modules_modules_multiple_stream_versions()
|
||||
@@ -359,32 +426,32 @@ class TestUtilsModuleStreamExpansion:
|
||||
"""Ensure the correct results are returned without duplicates."""
|
||||
init_data(data_size=1, multiple_stream_versions=True)
|
||||
mmd = module_build_service.utils.load_mmd_file(
|
||||
os.path.join(base_dir, 'staged_data', 'testmodule_v2.yaml'))
|
||||
os.path.join(base_dir, "staged_data", "testmodule_v2.yaml"))
|
||||
deps = mmd.get_dependencies()
|
||||
brs = deps[0].get_buildrequires()
|
||||
brs['platform'].set(['f29.1.0', 'f29.2.0'])
|
||||
brs["platform"].set(["f29.1.0", "f29.2.0"])
|
||||
deps[0].set_buildrequires(brs)
|
||||
mmd.set_dependencies(deps)
|
||||
|
||||
mmds = module_build_service.utils.mse._get_base_module_mmds(mmd)
|
||||
expected = set(['platform:f29.0.0', 'platform:f29.1.0', 'platform:f29.2.0'])
|
||||
expected = set(["platform:f29.0.0", "platform:f29.1.0", "platform:f29.2.0"])
|
||||
# Verify no duplicates were returned before doing set operations
|
||||
assert len(mmds) == len(expected)
|
||||
# Verify the expected ones were returned
|
||||
actual = set()
|
||||
for mmd_ in mmds:
|
||||
actual.add('{}:{}'.format(mmd_.get_name(), mmd_.get_stream()))
|
||||
actual.add("{}:{}".format(mmd_.get_name(), mmd_.get_stream()))
|
||||
assert actual == expected
|
||||
|
||||
@pytest.mark.parametrize('virtual_streams', (None, ["f29"], ["lp29"]))
|
||||
@pytest.mark.parametrize("virtual_streams", (None, ["f29"], ["lp29"]))
|
||||
def test__get_base_module_mmds_virtual_streams(self, virtual_streams):
|
||||
"""Ensure the correct results are returned without duplicates."""
|
||||
init_data(data_size=1, multiple_stream_versions=True)
|
||||
mmd = module_build_service.utils.load_mmd_file(
|
||||
os.path.join(base_dir, 'staged_data', 'testmodule_v2.yaml'))
|
||||
os.path.join(base_dir, "staged_data", "testmodule_v2.yaml"))
|
||||
deps = mmd.get_dependencies()
|
||||
brs = deps[0].get_buildrequires()
|
||||
brs['platform'].set(['f29.2.0'])
|
||||
brs["platform"].set(["f29.2.0"])
|
||||
deps[0].set_buildrequires(brs)
|
||||
mmd.set_dependencies(deps)
|
||||
|
||||
@@ -392,14 +459,14 @@ class TestUtilsModuleStreamExpansion:
|
||||
|
||||
mmds = module_build_service.utils.mse._get_base_module_mmds(mmd)
|
||||
if virtual_streams == ["f29"]:
|
||||
expected = set(['platform:f29.0.0', 'platform:f29.1.0', 'platform:f29.2.0',
|
||||
'platform:lp29.1.1'])
|
||||
expected = set(
|
||||
["platform:f29.0.0", "platform:f29.1.0", "platform:f29.2.0", "platform:lp29.1.1"])
|
||||
else:
|
||||
expected = set(['platform:f29.0.0', 'platform:f29.1.0', 'platform:f29.2.0'])
|
||||
expected = set(["platform:f29.0.0", "platform:f29.1.0", "platform:f29.2.0"])
|
||||
# Verify no duplicates were returned before doing set operations
|
||||
assert len(mmds) == len(expected)
|
||||
# Verify the expected ones were returned
|
||||
actual = set()
|
||||
for mmd_ in mmds:
|
||||
actual.add('{}:{}'.format(mmd_.get_name(), mmd_.get_stream()))
|
||||
actual.add("{}:{}".format(mmd_.get_name(), mmd_.get_stream()))
|
||||
assert actual == expected
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user