Merge branch 'master' of ssh://pagure.io/fm-orchestrator

This commit is contained in:
Jan Kaluza
2017-05-02 14:48:17 +02:00
38 changed files with 18372 additions and 230 deletions

View File

@@ -30,13 +30,13 @@ class BaseConfiguration(object):
KOJI_PROFILE = 'koji'
KOJI_ARCHES = ['i686', 'armv7hl', 'x86_64']
KOJI_PROXYUSER = True
KOJI_REPOSITORY_URL = 'https://kojipkgs.stg.fedoraproject.org/repos'
KOJI_REPOSITORY_URL = 'https://kojipkgs.fedoraproject.org/repos'
KOJI_TAG_PREFIXES = ['module']
COPR_CONFIG = '/etc/module-build-service/copr.conf'
PDC_URL = 'http://modularity.fedorainfracloud.org:8080/rest_api/v1'
PDC_URL = 'http://pdc.fedoraproject.org/rest_api/v1'
PDC_INSECURE = True
PDC_DEVELOP = True
SCMURLS = ["git://pkgs.stg.fedoraproject.org/modules/"]
SCMURLS = ["git://pkgs.fedoraproject.org/modules/"]
YAML_SUBMIT_ALLOWED = False
# How often should we resort to polling, in seconds
@@ -60,7 +60,7 @@ class BaseConfiguration(object):
SSL_CERTIFICATE_KEY_FILE = '/etc/module-build-service/server.key'
SSL_CA_CERTIFICATE_FILE = '/etc/module-build-service/cacert.pem'
PKGDB_API_URL = 'https://admin.stg.fedoraproject.org/pkgdb/api'
PKGDB_API_URL = 'https://admin.fedoraproject.org/pkgdb/api'
ALLOWED_GROUPS = set([
'packager',
@@ -94,6 +94,8 @@ class BaseConfiguration(object):
# Disable Client Authorization
NO_AUTH = False
CACHE_DIR = '~/modulebuild/cache'
class DevConfiguration(BaseConfiguration):
DEBUG = True
@@ -158,6 +160,10 @@ class TestConfiguration(BaseConfiguration):
KOJI_PROFILE = 'staging'
SERVER_NAME = 'localhost'
KOJI_REPOSITORY_URL = 'https://kojipkgs.stg.fedoraproject.org/repos'
SCMURLS = ["git://pkgs.stg.fedoraproject.org/modules/"]
PKGDB_API_URL = 'https://admin.stg.fedoraproject.org/pkgdb/api'
class ProdConfiguration(BaseConfiguration):
pass

View File

@@ -9,6 +9,9 @@ import subprocess
import requests
import koji
import time
import operator
from tabulate import tabulate
from multiprocessing.dummy import Pool as ThreadPool
from copy import copy
DEFAULT_ID_PROVIDER = "https://id.fedoraproject.org/openidc/"
@@ -16,6 +19,17 @@ DEFAULT_MBS_SERVER = "https://mbs.fedoraproject.org"
openidc_client.WEB_PORTS = [13747]
BUILD_STATES = {
"init": 0,
"wait": 1,
"build": 2,
"done": 3,
"failed": 4,
"ready": 5,
}
INVERSE_BUILD_STATES = {v: k for k, v in BUILD_STATES.items()}
def watch_build(server, build_id):
"""
Watches the MBS build in a loop, updates every 30 seconds.
@@ -240,6 +254,68 @@ def cancel_module_build(server, id_provider, build_id):
{'state': 'failed'})
logging.info(resp.text)
def show_overview(server):
if not server:
server = DEFAULT_MBS_SERVER
# Base URL to query.
baseurl = server + '/module-build-service/1/module-builds/'
# This logging would break our formatting.
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
def get_module_builds(page=1, state=0):
"""
Yields modules with state `state`.
"""
response = requests.get(baseurl, params=dict(page=page, state=state))
data = response.json()
for item in data['items']:
yield item
if data['meta']['pages'] > page:
for item in get_module_builds(page=page+1, state=state):
yield item
def get_module_info(module):
"""
Returns the row with module_info.
"""
idx = module['id']
response = requests.get(baseurl + '/%i?verbose=true' % idx)
module = response.json()
n_components = len(module['tasks'].get('rpms', []))
n_built_components = len([c for c in module['tasks'].get('rpms', {}).values() if c['state'] not in [None, 0, koji.BUILD_STATES["BUILDING"]]])
row = [module["id"], module["state_name"], module["time_submitted"],
"%s/%s" % (n_built_components, n_components), module["owner"],
"%s-%s-%s" % (module["name"], module["stream"], module["version"])]
return row
# We are interested only in init, wait and build states.
states = [BUILD_STATES["init"], BUILD_STATES["wait"],
BUILD_STATES["build"]]
# Get all modules in the states we are interested in using 3 threads.
pool = ThreadPool(3)
module_builds = pool.map(lambda x: list(get_module_builds(state=x)),
states)
# Make one flat list with all the modules.
module_builds = [item for sublist in module_builds for item in sublist]
# Get the table rows with information about each module using 20 threads.
pool = ThreadPool(20)
table = pool.map(get_module_info, module_builds)
# Sort it according to 'id' (first element in list).
table = list(reversed(sorted(
table, key=operator.itemgetter(0),
)))
# Headers for table we will show to user.
headers = ["ID", "State", "Submitted", "Components", "Owner", "Module"]
print(tabulate(table, headers=headers))
def main():
# Parse command line arguments
parser = argparse.ArgumentParser(description="Submits and manages module builds.")
@@ -276,6 +352,7 @@ def main():
'cancel', help="cancel module build",
description="Cancels the build submitted by 'submit' subcommand.")
parser_cancel.add_argument("build_id")
parser_local = subparsers.add_parser(
'local', help="do local build of module",
description="Starts local build of a module using the Mock backend. "
@@ -285,6 +362,10 @@ def main():
parser_local.add_argument("scm_url", nargs='?')
parser_local.add_argument("branch", nargs='?')
parser_overview = subparsers.add_parser(
'overview', help="show overview of module builds",
description="Shows overview of module builds.")
args = parser.parse_args()
# Initialize the logging.
@@ -316,6 +397,8 @@ def main():
elif args.cmd_name == "cancel":
# Cancel the module build
cancel_module_build(args.server, args.idprovider, args.build_id)
elif args.cmd_name == "overview":
show_overview(args.server)
if __name__ == "__main__":
main()

View File

@@ -60,13 +60,16 @@ class CoprModuleBuilder(GenericBuilder):
self.owner = owner
self.config = config
self.tag_name = tag_name
self.module_str = module
self.module = module
self.module_str = module.name
self.copr = None
self.client = CoprModuleBuilder._get_client(config)
self.client.username = self.owner
self.chroot = "custom-1-x86_64"
self.__prep = False
@classmethod
def _get_client(cls, config):
return CoprClient.create_from_file_config(config.copr_config)
@@ -81,17 +84,19 @@ class CoprModuleBuilder(GenericBuilder):
"""
self.copr = self._get_copr_safe()
self._create_module_safe()
# @FIXME Not able to use gcc-c++ in chroot (RhBug: 1440889)
packages = groups["build"] - {"gcc-c++"}
self._update_chroot(packages=list(packages))
if self.copr and self.copr.projectname and self.copr.username:
self.__prep = True
log.info("%r buildroot sucessfully connected." % self)
def _get_copr_safe(self):
# @TODO it would be nice if the module build object was passed to Builder __init__
module = ModuleBuild.query.filter(ModuleBuild.name == self.module_str).one()
kwargs = {
"ownername": module.copr_owner or self.owner,
"projectname": module.copr_project or CoprModuleBuilder._tag_to_copr_name(self.tag_name)
"ownername": self.module.copr_owner or self.owner,
"projectname": self.module.copr_project or CoprModuleBuilder._tag_to_copr_name(self.tag_name)
}
try:
@@ -104,20 +109,17 @@ class CoprModuleBuilder(GenericBuilder):
return self.client.get_project_details(projectname, username=ownername).handle
def _create_copr(self, ownername, projectname):
# @TODO fix issues with custom-1-x86_64 and custom-1-i386 chroot and use it
return self.client.create_project(ownername, projectname, ["fedora-24-x86_64"])
return self.client.create_project(ownername, projectname, [self.chroot])
def _create_module_safe(self):
from copr.exceptions import CoprRequestException
# @TODO it would be nice if the module build object was passed to Builder __init__
module = ModuleBuild.query.filter(ModuleBuild.name == self.module_str).one()
modulemd = tempfile.mktemp()
module.mmd().dump(modulemd)
self.module.mmd().dump(modulemd)
kwargs = {
"username": module.copr_owner or self.owner,
"projectname": module.copr_project or CoprModuleBuilder._tag_to_copr_name(self.tag_name),
"username": self.module.copr_owner or self.owner,
"projectname": self.module.copr_project or CoprModuleBuilder._tag_to_copr_name(self.tag_name),
"modulemd": modulemd,
"create": True,
"build": False,
@@ -159,6 +161,13 @@ class CoprModuleBuilder(GenericBuilder):
koji add-group-pkg $module-build-tag srpm-build bash
"""
# Install the module-build-macros into the buildroot
# We are using same hack as mock builder does
for artifact in artifacts:
if artifact and artifact.startswith("module-build-macros"):
self._update_chroot(packages=["module-build-macros"])
break
# Start of a new batch of builds is triggered by buildsys.repo.done message.
# However in Copr there is no such thing. Therefore we are going to fake
# the message when builds are finished
@@ -168,7 +177,22 @@ class CoprModuleBuilder(GenericBuilder):
log.info("%r adding deps on %r" % (self, dependencies))
# @TODO get architecture from some builder variable
repos = [self._dependency_repo(d, "x86_64") for d in dependencies]
self.client.modify_project(self.copr.projectname, username=self.copr.username, repos=repos)
self._update_chroot(repos=repos)
def _update_chroot(self, packages=None, repos=None):
request = self.client.get_chroot(self.copr.projectname, self.copr.username, self.chroot)
chroot = request.data["chroot"]
current_packages = (chroot["buildroot_pkgs"] or "").split()
current_repos = (chroot["repos"] or "").split()
def merge(current, new):
current, new = current or [], new or []
return " ".join(set(current + new))
self.client.edit_chroot(self.copr.projectname, self.chroot,
ownername=self.copr.username,
packages=merge(current_packages, packages),
repos=merge(current_repos, repos))
def _dependency_repo(self, module, arch, backend="copr"):
try:
@@ -226,8 +250,7 @@ class CoprModuleBuilder(GenericBuilder):
def finalize(self):
modulemd = tempfile.mktemp()
m1 = ModuleBuild.query.filter(ModuleBuild.name == self.module_str).one()
m1.mmd().dump(modulemd)
self.module.mmd().dump(modulemd)
# Create a module from previous project
result = self.client.make_module(username=self.copr.username, projectname=self.copr.projectname,

View File

@@ -61,12 +61,12 @@ class KojiModuleBuilder(GenericBuilder):
def __init__(self, owner, module, config, tag_name, components):
"""
:param owner: a string representing who kicked off the builds
:param module: string representing module
:param module: module_build_service.models.ModuleBuild instance.
:param config: module_build_service.config.Config instance
:param tag_name: name of tag for given module
"""
self.owner = owner
self.module_str = module
self.module_str = module.name
self.config = config
self.tag_name = tag_name
self.__prep = False

View File

@@ -37,7 +37,8 @@ import module_build_service.scheduler
import module_build_service.scheduler.consumer
from base import GenericBuilder
from utils import execute_cmd, build_from_scm, fake_repo_done_message
from utils import (build_from_scm, fake_repo_done_message,
create_local_repo_from_koji_tag, execute_cmd)
from KojiModuleBuilder import KojiModuleBuilder
from module_build_service.models import ModuleBuild
@@ -94,7 +95,7 @@ mdpolicy=group:primary
@module_build_service.utils.validate_koji_tag('tag_name')
def __init__(self, owner, module, config, tag_name, components):
self.module_str = module
self.module_str = module.name
self.tag_name = tag_name
self.config = config
self.groups = []
@@ -171,7 +172,7 @@ mdpolicy=group:primary
execute_cmd(['/usr/bin/createrepo_c', path])
execute_cmd(['/usr/bin/modifyrepo_c', '--mdtype=modules', mmd_path, repodata_path])
def _add_repo(self, name, baseurl, extra = ""):
def _add_repo(self, name, baseurl, extra=""):
"""
Adds repository to Mock config file. Call _write_mock_config() to
actually write the config file to filesystem.
@@ -274,7 +275,10 @@ mdpolicy=group:primary
# extended to Copr in the future.
self._load_mock_config()
for tag in dependencies:
baseurl = KojiModuleBuilder.repo_from_tag(self.config, tag, self.arch)
repo_dir = os.path.join(self.config.cache_dir, "koji_tags", tag)
create_local_repo_from_koji_tag(self.config, tag, repo_dir,
[self.arch, "noarch"])
baseurl = "file://" + repo_dir
self._add_repo(tag, baseurl)
self._write_mock_config()

View File

@@ -9,9 +9,8 @@ __all__ = [
GenericBuilder.register_backend_class(KojiModuleBuilder)
if conf.system == "mock":
from MockModuleBuilder import MockModuleBuilder
GenericBuilder.register_backend_class(MockModuleBuilder)
from MockModuleBuilder import MockModuleBuilder
GenericBuilder.register_backend_class(MockModuleBuilder)
if conf.system == "copr":
from CoprModuleBuilder import CoprModuleBuilder

View File

@@ -100,7 +100,7 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
def create(cls, owner, module, backend, config, **extra):
"""
:param owner: a string representing who kicked off the builds
:param module: a module string e.g. 'testmodule-1.0'
:param module: module_build_service.models.ModuleBuild instance.
:param backend: a string representing backend e.g. 'koji'
:param config: instance of module_build_service.config.Config
@@ -125,7 +125,7 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
"""
components = [c.package for c in module.component_builds]
builder = GenericBuilder.create(
module.owner, module.name, config.system, config,
module.owner, module, config.system, config,
tag_name=module.koji_tag, components=components)
groups = GenericBuilder.default_buildroot_groups(session, module)
builder.buildroot_connect(groups)

View File

@@ -3,7 +3,11 @@ import koji
import tempfile
import shutil
import subprocess
import munch
import errno
import logging
import urlgrabber.grabber as grabber
import urlgrabber.progress as progress
import module_build_service
import module_build_service.scheduler
from module_build_service import log, scm, messaging
@@ -13,7 +17,7 @@ logging.basicConfig(level=logging.DEBUG)
def build_from_scm(artifact_name, source, config, build_srpm,
data = None, stdout=None, stderr=None):
data=None, stdout=None, stderr=None):
"""
Builds the artifact from the SCM based source.
@@ -65,7 +69,7 @@ def build_from_scm(artifact_name, source, config, build_srpm,
return ret
def execute_cmd(args, stdout = None, stderr = None, cwd = None):
def execute_cmd(args, stdout=None, stderr=None, cwd=None):
"""
Executes command defined by `args`. If `stdout` or `stderr` is set to
Python file object, the stderr/stdout output is redirecter to that file.
@@ -99,3 +103,89 @@ def fake_repo_done_message(tag_name):
repo_tag=tag_name + "-build",
)
module_build_service.scheduler.consumer.work_queue_put(msg)
def create_local_repo_from_koji_tag(config, tag, repo_dir, archs=None):
"""
Downloads the packages build for one of `archs` (defaults to ['x86_64',
'noarch']) in Koji tag `tag` to `repo_dir` and creates repository in that
directory. Needs config.koji_profile and config.koji_config to be set.
"""
# Placed here to avoid py2/py3 conflicts...
import koji
if not archs:
archs = ["x86_64", "noarch"]
# Load koji config and create Koji session.
koji_config = munch.Munch(koji.read_config(
profile_name=config.koji_profile,
user_config=config.koji_config,
))
address = koji_config.server
log.info("Connecting to koji %r" % address)
session = koji.ClientSession(address, opts=koji_config)
# Get the list of all RPMs and builds in a tag.
try:
rpms, builds = session.listTaggedRPMS(tag, latest=True)
except koji.GenericError as e:
log.exception("Failed to list rpms in tag %r" % tag)
# Reformat builds so they are dict with build_id as a key.
builds = {build['build_id']: build for build in builds}
# Prepare pathinfo we will use to generate the URL.
pathinfo = koji.PathInfo(topdir=session.opts["topurl"])
# Prepare the list of URLs to download
urls = []
for rpm in rpms:
build_info = builds[rpm['build_id']]
# We do not download debuginfo packages or packages built for archs
# we are not interested in.
if koji.is_debuginfo(rpm['name']) or not rpm['arch'] in archs:
continue
fname = pathinfo.rpm(rpm)
url = pathinfo.build(build_info) + '/' + fname
urls.append((url, os.path.basename(fname), rpm['size']))
log.info("Downloading %d packages from Koji tag %s to %s" % (len(urls), tag, repo_dir))
# Create the output directory
try:
os.makedirs(repo_dir)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
# When True, we want to run the createrepo_c.
repo_changed = False
# Donload the RPMs.
pg = progress.TextMeter()
for url, relpath, size in urls:
local_fn = os.path.join(repo_dir, relpath)
# Download only when RPM is missing or the size does not match.
if not os.path.exists(local_fn) or os.path.getsize(local_fn) != size:
if os.path.exists(local_fn):
os.remove(local_fn)
repo_changed = True
grabber.urlgrab(url, filename=local_fn, progress_obj=pg,
async=(tag, 5), text=relpath)
grabber.parallel_wait()
# If we downloaded something, run the createrepo_c.
if repo_changed:
repodata_path = os.path.join(repo_dir, "repodata")
if os.path.exists(repodata_path):
shutil.rmtree(repodata_path)
log.info("Creating local repository in %s" % repo_dir)
execute_cmd(['/usr/bin/createrepo_c', repo_dir])

View File

@@ -103,6 +103,12 @@ def init_config(app):
app.config.from_object(config_section_obj)
return conf
class Path:
"""
Config type for paths. Expands the users home directory.
"""
pass
class Config(object):
"""Class representing the orchestrator configuration."""
@@ -127,6 +133,10 @@ class Config(object):
'type': int,
'default': 0,
'desc': 'Polling interval, in seconds.'},
'cache_dir': {
'type': Path,
'default': '~/modulebuild/cache',
'desc': 'Cache directory'},
'pdc_url': {
'type': str,
'default': '',
@@ -276,8 +286,8 @@ class Config(object):
'default': 'fedpkg --release f26 srpm',
'desc': ''},
'mock_resultsdir': {
'type': str,
'default': '/tmp',
'type': Path,
'default': '~/modulebuild/builds',
'desc': 'Directory for Mock build results.'},
'scmurls': {
'type': list,
@@ -317,7 +327,7 @@ class Config(object):
# set defaults
for name, values in self._defaults.items():
self.set_item(name, values['default'])
self.set_item(name, values['default'], values['type'])
# override defaults
for key in dir(conf_section_obj):
@@ -327,7 +337,7 @@ class Config(object):
# set item (lower key)
self.set_item(key.lower(), getattr(conf_section_obj, key))
def set_item(self, key, value):
def set_item(self, key, value, value_type=None):
"""
Set value for configuration item. Creates the self._key = value
attribute and self.key property to set/get/del the attribute.
@@ -343,6 +353,10 @@ class Config(object):
setifok_func = '_setifok_{}'.format(key)
if hasattr(self, setifok_func):
setx = lambda self, val: getattr(self, setifok_func)(val)
elif value_type == Path:
# For paths, expanduser.
setx = lambda self, val: setattr(
self, "_" + key, os.path.expanduser(val))
else:
setx = lambda self, val: setattr(self, "_" + key, val)
getx = lambda self: getattr(self, "_" + key)
@@ -360,8 +374,8 @@ class Config(object):
value = convert(value)
except:
raise TypeError("Configuration value conversion failed for name: %s" % key)
# unknown type/unsupported conversion
elif convert is not None:
# unknown type/unsupported conversion, or conversion not needed
elif convert is not None and convert not in [Path]:
raise TypeError("Unsupported type %s for configuration item name: %s" % (convert, key))
# Set the attribute to the correct value

View File

@@ -73,11 +73,11 @@ def init_logging(conf):
log_backend = conf.log_backend
if not log_backend or len(log_backend) == 0 or log_backend == "console":
logging.basicConfig(level = conf.log_level, format = log_format)
logging.basicConfig(level=conf.log_level, format=log_format)
log = logging.getLogger()
log.setLevel(conf.log_level)
elif log_backend == "journal":
logging.basicConfig(level = conf.log_level, format = log_format)
logging.basicConfig(level=conf.log_level, format=log_format)
try:
from systemd import journal
except:
@@ -87,6 +87,6 @@ def init_logging(conf):
log.propagate = False
log.addHandler(journal.JournalHandler())
else:
logging.basicConfig(filename = conf.log_file, level = conf.log_level,
format = log_format)
logging.basicConfig(filename=conf.log_file, level=conf.log_level,
format=log_format)
log = logging.getLogger()

View File

@@ -27,7 +27,6 @@
import json
import os
import re
import kobo.rpmlib
try:
from inspect import signature
except ImportError:
@@ -225,8 +224,9 @@ class BaseMessage(object):
build = msg_inner_msg.get('build')
status = msg_inner_msg.get('status')
pkg = msg_inner_msg.get('pkg')
version = msg_inner_msg.get('version')
what = msg_inner_msg.get('what')
msg_obj = CoprBuildEnd(msg_id, build, status, copr, pkg, what)
msg_obj = CoprBuildEnd(msg_id, build, status, copr, pkg, version, what)
# If the message matched the regex and is important to the app,
# it will be returned
@@ -305,16 +305,16 @@ class CoprBuildEnd(KojiBuildChange):
(e.g. mutt-kz-1.5.23.1-1.20150203.git.c8504a8a.fc21)
:param state_reason: the optional reason as to why the state changed
"""
def __init__(self, msg_id, build_id, status, copr, pkg, what=None):
nvr = kobo.rpmlib.parse_nvra(pkg)
def __init__(self, msg_id, build_id, status, copr, pkg, version, what=None):
ver, rel = version.split("-", 1)
super(CoprBuildEnd, self).__init__(
msg_id=msg_id,
build_id=build_id,
task_id=build_id,
build_new_state=status,
build_name=nvr["name"],
build_version=nvr["version"],
build_release=".".join(s for s in [nvr["release"], nvr["epoch"], nvr["arch"]] if s),
build_name=pkg,
build_version=ver,
build_release=rel,
state_reason=what,
)
self.copr = copr

View File

@@ -133,6 +133,26 @@ class ModuleBuild(MBSBase):
if component.batch == self.batch
]
def up_to_current_batch(self, state=None):
"""
Returns all components of this module in the current batch and
in the previous batches.
"""
if not self.batch:
raise ValueError("No batch is in progress: %r" % self.batch)
if state != None:
return [
component for component in self.component_builds
if component.batch <= self.batch and component.state == state
]
else:
return [
component for component in self.component_builds
if component.batch <= self.batch
]
def mmd(self):
mmd = _modulemd.ModuleMetadata()
try:
@@ -269,6 +289,7 @@ class ModuleBuild(MBSBase):
# TODO, show their entire .json() ?
'component_builds': [build.id for build in self.component_builds],
'modulemd': self.modulemd,
'koji_tag': self.koji_tag,
'state_trace': [{'time': record.state_time,
'state': record.state,
'state_name': INVERSE_BUILD_STATES[record.state],
@@ -301,6 +322,7 @@ class ModuleBuild(MBSBase):
"time_submitted": self._utc_datetime_to_iso(self.time_submitted),
"time_modified": self._utc_datetime_to_iso(self.time_modified),
"time_completed": self._utc_datetime_to_iso(self.time_completed),
"koji_tag": self.koji_tag,
"tasks": self.tasks()
}
@@ -331,8 +353,8 @@ class ModuleBuild(MBSBase):
module_id=module_id).order_by(ModuleBuildTrace.state_time).all()
def __repr__(self):
return "<ModuleBuild %s, stream=%s, version=%s, state %r, batch %r, state_reason %r>" % (
self.name, self.stream, self.version,
return "<ModuleBuild %s, id=%d, stream=%s, version=%s, state %r, batch %r, state_reason %r>" % (
self.name, self.id, self.stream, self.version,
INVERSE_BUILD_STATES[self.state], self.batch, self.state_reason)

View File

@@ -207,7 +207,7 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer):
if handler is self.NO_OP:
log.debug("Handler is NO_OP: %s" % idx)
else:
log.debug("Calling %s" % idx)
log.info("Calling %s" % idx)
further_work = []
try:
further_work = handler(conf, session, msg) or []

View File

@@ -28,6 +28,8 @@ import module_build_service.builder
import module_build_service.pdc
import module_build_service.utils
import module_build_service.messaging
from module_build_service.utils import (
start_next_batch_build, attempt_to_reuse_all_components)
from requests.exceptions import ConnectionError
@@ -81,7 +83,7 @@ def failed(config, session, msg):
if build.koji_tag:
builder = module_build_service.builder.GenericBuilder.create(
build.owner, build.name, config.system, config, tag_name=build.koji_tag,
build.owner, build, config.system, config, tag_name=build.koji_tag,
components=[c.package for c in build.component_builds])
builder.buildroot_connect(groups)
@@ -234,32 +236,42 @@ def wait(config, session, msg):
log.debug("Adding dependencies %s into buildroot for module %s" % (dependencies, module_info))
builder.buildroot_add_repos(dependencies)
# inject dist-tag into buildroot
srpm = builder.get_disttag_srpm(
disttag=".%s" % get_rpm_release_from_mmd(build.mmd()),
module_build=build)
# If all components in module build will be reused, we don't have to build
# module-build-macros, because there won't be any build done.
if attempt_to_reuse_all_components(builder, session, build):
log.info("All components have ben reused for module %r, "
"skipping build" % build)
session.commit()
return []
else:
# Build the module-build-macros
# inject dist-tag into buildroot
srpm = builder.get_disttag_srpm(
disttag=".%s" % get_rpm_release_from_mmd(build.mmd()),
module_build=build)
log.debug("Starting build batch 1")
build.batch = 1
log.debug("Starting build batch 1")
build.batch = 1
session.commit()
artifact_name = "module-build-macros"
task_id, state, reason, nvr = builder.build(artifact_name=artifact_name, source=srpm)
artifact_name = "module-build-macros"
task_id, state, reason, nvr = builder.build(artifact_name=artifact_name, source=srpm)
component_build = models.ComponentBuild(
module_id=build.id,
package=artifact_name,
format="rpms",
scmurl=srpm,
task_id=task_id,
state=state,
state_reason=reason,
nvr=nvr,
batch=1,
)
session.add(component_build)
build.transition(config, state="build")
session.add(build)
session.commit()
component_build = models.ComponentBuild(
module_id=build.id,
package=artifact_name,
format="rpms",
scmurl=srpm,
task_id=task_id,
state=state,
state_reason=reason,
nvr=nvr,
batch=1,
)
session.add(component_build)
build.transition(config, state="build")
session.add(build)
session.commit()
# If this build already exists and is done, then fake the repo change event
# back to the scheduler

View File

@@ -85,7 +85,7 @@ def done(config, session, msg):
session, module_build)
builder = module_build_service.builder.GenericBuilder.create(
module_build.owner, module_build.name, config.system, config,
module_build.owner, module_build, config.system, config,
tag_name=tag, components=[c.package for c in module_build.component_builds])
builder.buildroot_connect(groups)

View File

@@ -72,10 +72,10 @@ def tagged(config, session, msg):
"building components in a batch", tag)
return []
# Get the list of untagged components in current batch which
# Get the list of untagged components in current/previous batches which
# have been built successfully.
untagged_components = [
c for c in module_build.current_batch()
c for c in module_build.up_to_current_batch()
if not c.tagged and c.state == koji.BUILD_STATES['COMPLETE']
]

View File

@@ -191,8 +191,11 @@ class MBSProducer(PollingProducer):
for module_build in session.query(models.ModuleBuild).filter_by(
state=models.BUILD_STATES['build']).all():
# If there are no components in the build state on the module build,
# then no possible event will start off new component builds
if not module_build.current_batch(koji.BUILD_STATES['BUILDING']):
# then no possible event will start off new component builds.
# But do not try to start new builds when we are waiting for the
# repo-regen.
if (not module_build.current_batch(koji.BUILD_STATES['BUILDING'])
and not module_build.new_repo_task_id):
# Initialize the builder...
builder = GenericBuilder.create_from_module(
session, module_build, config)
@@ -232,5 +235,7 @@ class MBSProducer(PollingProducer):
str(module_build.new_repo_task_id), module_build)
taginfo = koji_session.getTag(module_build.koji_tag + "-build")
module_build.new_repo_task_id = koji_session.newRepo(taginfo["name"])
else:
module_build.new_repo_task_id = 0
session.commit()

View File

@@ -37,7 +37,7 @@ import shutil
import datetime
from module_build_service import log
from module_build_service.errors import Forbidden, ValidationError
from module_build_service.errors import Forbidden, ValidationError, UnprocessableEntity
import module_build_service.utils
@@ -47,7 +47,7 @@ class SCM(object):
# Assuming git for HTTP schemas
types = module_build_service.utils.scm_url_schemes()
def __init__(self, url, branch = None, allowed_scm=None, allow_local = False):
def __init__(self, url, branch=None, allowed_scm=None, allow_local=False):
"""Initialize the SCM object using the specified scmurl.
If url is not in the list of allowed_scm, an error will be raised.
@@ -124,7 +124,7 @@ class SCM(object):
@staticmethod
@module_build_service.utils.retry(wait_on=RuntimeError)
def _run(cmd, chdir=None, log_stdout = False):
def _run(cmd, chdir=None, log_stdout=False):
proc = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE, cwd=chdir)
stdout, stderr = proc.communicate()
if log_stdout and stdout:
@@ -157,7 +157,17 @@ class SCM(object):
# perform checkouts
SCM._run(module_clone_cmd, chdir=scmdir)
if self.commit:
SCM._run(module_checkout_cmd, chdir=sourcedir)
try:
SCM._run(module_checkout_cmd, chdir=sourcedir)
except RuntimeError as e:
if (e.message.endswith(
" did not match any file(s) known to git.\\n\"") or
"fatal: reference is not a tree: " in e.message):
raise UnprocessableEntity(
"checkout: The requested commit hash was not found "
"within the repository. Perhaps you forgot to push. "
"The original message was: %s" % e.message)
raise
timestamp = SCM._run(["git", "show" , "-s", "--format=%ct"], chdir=sourcedir)[1]
dt = datetime.datetime.utcfromtimestamp(int(timestamp))

View File

@@ -152,12 +152,10 @@ def continue_batch_build(config, module, session, builder, components=None):
further_work = []
components_to_build = []
for c in unbuilt_components:
previous_component_build = None
# Check to see if we can reuse a previous component build
# instead of rebuilding it if the builder is Koji
if conf.system == 'koji':
previous_component_build = get_reusable_component(
session, module, c.package)
# instead of rebuilding it
previous_component_build = get_reusable_component(
session, module, c.package)
# If a component build can't be reused, we need to check
# the concurrent threshold.
if (not previous_component_build
@@ -166,37 +164,7 @@ def continue_batch_build(config, module, session, builder, components=None):
break
if previous_component_build:
log.info(
'Reusing component "{0}" from a previous module '
'build with the nvr "{1}"'.format(
c.package, previous_component_build.nvr))
c.reused_component_id = previous_component_build.id
c.task_id = previous_component_build.task_id
# Use BUILDING state here, because we want the state to change to
# COMPLETE by the fake KojiBuildChange message we are generating
# few lines below. If we would set it to the right state right
# here, we would miss the code path handling the KojiBuildChange
# which works only when switching from BUILDING to COMPLETE.
c.state = koji.BUILD_STATES['BUILDING']
c.state_reason = \
'Reused component from previous module build'
c.nvr = previous_component_build.nvr
nvr_dict = kobo.rpmlib.parse_nvr(c.nvr)
# Add this message to further_work so that the reused
# component will be tagged properly
further_work.append(
module_build_service.messaging.KojiBuildChange(
msg_id='start_build_batch: fake msg',
build_id=None,
task_id=c.task_id,
build_new_state=previous_component_build.state,
build_name=c.package,
build_version=nvr_dict['version'],
build_release=nvr_dict['release'],
module_build_id=c.module_id,
state_reason=c.state_reason
)
)
further_work += reuse_component(c, previous_component_build)
continue
# We set state to BUILDING here, because we are going to build the
@@ -227,7 +195,7 @@ def continue_batch_build(config, module, session, builder, components=None):
def start_next_batch_build(config, module, session, builder, components=None):
"""
Tries to start the build of next batch. In case there are still unbuilt
components in a batch, tries to submit more components until it hits
components in a batch, tries to submit more components until it hits
concurrent builds limit. Otherwise Increments module.batch and submits component
builds from the next batch.
@@ -370,7 +338,7 @@ def filter_module_builds(flask_request):
else:
raise ValidationError('An invalid state was supplied')
for key in ['name', 'owner']:
for key in ['name', 'owner', 'koji_tag']:
if flask_request.args.get(key, None):
search_query[key] = flask_request.args[key]
@@ -488,11 +456,19 @@ def _scm_get_latest(pkg):
# we want to pull from, we need to resolve that f25 branch
# to the specific commit available at the time of
# submission (now).
pkg.ref = module_build_service.scm.SCM(
pkgref = module_build_service.scm.SCM(
pkg.repository).get_latest(branch=pkg.ref)
except Exception as e:
return "Failed to get the latest commit for %s#%s" % (pkg.repository, pkg.ref)
return None
log.exception(e)
return {
'error': "Failed to get the latest commit for %s#%s" % (pkg.repository, pkg.ref)
}
return {
'pkg_name': pkg.name,
'pkg_ref': pkgref,
'error': None
}
def format_mmd(mmd, scmurl):
"""
@@ -552,6 +528,8 @@ def format_mmd(mmd, scmurl):
mmd.xmd['mbs']['buildrequires'] = {}
if mmd.components:
if 'rpms' not in mmd.xmd['mbs']:
mmd.xmd['mbs']['rpms'] = {}
# Add missing data in RPM components
for pkgname, pkg in mmd.components.rpms.items():
if pkg.repository and not conf.rpms_allow_repository:
@@ -577,17 +555,34 @@ def format_mmd(mmd, scmurl):
mod.ref = 'master'
# Check that SCM URL is valid and replace potential branches in
# pkg.ref by real SCM hash.
# pkg.ref by real SCM hash and store the result to our private xmd
# place in modulemd.
pool = ThreadPool(20)
err_msgs = pool.map(_scm_get_latest, mmd.components.rpms.values())
# TODO: only the first error message is raised, perhaps concatenate
# the messages together?
for err_msg in err_msgs:
if err_msg:
raise UnprocessableEntity(err_msg)
pkg_dicts = pool.map(_scm_get_latest, mmd.components.rpms.values())
err_msg = ""
for pkg_dict in pkg_dicts:
if pkg_dict["error"]:
err_msg += pkg_dict["error"] + "\n"
else:
pkg_name = pkg_dict["pkg_name"]
pkg_ref = pkg_dict["pkg_ref"]
mmd.xmd['mbs']['rpms'][pkg_name] = {'ref': pkg_ref}
if err_msg:
raise UnprocessableEntity(err_msg)
def record_component_builds(mmd, module, initial_batch = 1,
previous_buildorder = None):
def merge_included_mmd(mmd, included_mmd):
"""
Merges two modulemds. This merges only metadata which are needed in
the `main` when it includes another module defined by `included_mmd`
"""
if 'rpms' in included_mmd.xmd['mbs']:
if 'rpms' not in mmd.xmd['mbs']:
mmd.xmd['mbs']['rpms'] = included_mmd.xmd['mbs']['rpms']
else:
mmd.xmd['mbs']['rpms'].update(included_mmd.xmd['mbs']['rpms'])
def record_component_builds(mmd, module, initial_batch=1,
previous_buildorder=None, main_mmd=None):
import koji # Placed here to avoid py2/py3 conflicts...
# Format the modulemd by putting in defaults and replacing streams that
@@ -600,16 +595,29 @@ def record_component_builds(mmd, module, initial_batch = 1,
db.session.commit()
raise
# List of (pkg_name, git_url) tuples to be used to check
# the availability of git URLs in parallel later.
full_urls = []
# When main_mmd is set, merge the metadata from this mmd to main_mmd,
# otherwise our current mmd is main_mmd.
if main_mmd:
# Check for components that are in both MMDs before merging since MBS
# currently can't handle that situation.
duplicate_components = [rpm for rpm in main_mmd.components.rpms.keys()
if rpm in mmd.components.rpms.keys()]
if duplicate_components:
error_msg = (
'The included module "{0}" in "{1}" have the following '
'conflicting components: {2}'
.format(mmd.name, main_mmd.name,
', '.join(duplicate_components)))
module.transition(conf, models.BUILD_STATES["failed"], error_msg)
db.session.add(module)
db.session.commit()
raise RuntimeError(error_msg)
merge_included_mmd(main_mmd, mmd)
else:
main_mmd = mmd
# If the modulemd yaml specifies components, then submit them for build
if mmd.components:
for pkgname, pkg in mmd.components.rpms.items():
full_url = "%s?#%s" % (pkg.repository, pkg.ref)
full_urls.append((pkgname, full_url))
components = mmd.components.all
components.sort(key=lambda x: x.buildorder)
@@ -632,12 +640,13 @@ def record_component_builds(mmd, module, initial_batch = 1,
full_url = pkg.repository + "?#" + pkg.ref
# It is OK to whitelist all URLs here, because the validity
# of every URL have been already checked in format_mmd(...).
mmd = _fetch_mmd(full_url, whitelist_url=True)[0]
batch = record_component_builds(mmd, module, batch,
previous_buildorder)
included_mmd = _fetch_mmd(full_url, whitelist_url=True)[0]
batch = record_component_builds(included_mmd, module, batch,
previous_buildorder, main_mmd)
continue
full_url = pkg.repository + "?#" + pkg.ref
pkgref = mmd.xmd['mbs']['rpms'][pkg.name]['ref']
full_url = pkg.repository + "?#" + pkgref
existing_build = models.ComponentBuild.query.filter_by(
module_id=module.id, package=pkg.name).first()
@@ -654,20 +663,41 @@ def record_component_builds(mmd, module, initial_batch = 1,
format="rpms",
scmurl=full_url,
batch=batch,
ref=pkg.ref
ref=pkgref
)
db.session.add(build)
return batch
def submit_module_build_from_yaml(username, yaml, optional_params=None):
def submit_module_build_from_yaml(username, handle, optional_params=None):
yaml = handle.read()
mmd = load_mmd(yaml)
# Mimic the way how default values are generated for modules that are stored in SCM
# We can take filename as the module name as opposed to repo name,
# and also we can take numeric representation of current datetime
# as opposed to datetime of the last commit
dt = datetime.utcfromtimestamp(int(time.time()))
def_name = str(handle.filename.split(".")[0])
def_version = int(dt.strftime("%Y%m%d%H%M%S"))
mmd.name = mmd.name or def_name
mmd.stream = mmd.stream or "master"
mmd.version = mmd.version or def_version
return submit_module_build(username, None, mmd, None, yaml, optional_params)
_url_check_re = re.compile(r"^[^:/]+:.*$")
def submit_module_build_from_scm(username, url, branch, allow_local_url=False,
optional_params=None):
# Translate local paths into file:// URL
if allow_local_url and not _url_check_re.match(url):
log.info(
"'{}' is not a valid URL, assuming local path".format(url))
url = os.path.abspath(url)
url = "file://" + url
mmd, scm, yaml = _fetch_mmd(url, branch, allow_local_url)
return submit_module_build(username, url, mmd, scm, yaml, optional_params)
@@ -766,6 +796,96 @@ def module_build_state_from_msg(msg):
% (state, type(state), list(models.BUILD_STATES.values())))
return state
def reuse_component(component, previous_component_build,
change_state_now=False):
"""
Reuses component build `previous_component_build` instead of building
component `component`
Returns the list of BaseMessage instances to be handled later by the
scheduler.
"""
import koji
log.info(
'Reusing component "{0}" from a previous module '
'build with the nvr "{1}"'.format(
component.package, previous_component_build.nvr))
component.reused_component_id = previous_component_build.id
component.task_id = previous_component_build.task_id
if change_state_now:
component.state = previous_component_build.state
else:
# Use BUILDING state here, because we want the state to change to
# COMPLETE by the fake KojiBuildChange message we are generating
# few lines below. If we would set it to the right state right
# here, we would miss the code path handling the KojiBuildChange
# which works only when switching from BUILDING to COMPLETE.
component.state = koji.BUILD_STATES['BUILDING']
component.state_reason = \
'Reused component from previous module build'
component.nvr = previous_component_build.nvr
nvr_dict = kobo.rpmlib.parse_nvr(component.nvr)
# Add this message to further_work so that the reused
# component will be tagged properly
return [
module_build_service.messaging.KojiBuildChange(
msg_id='reuse_component: fake msg',
build_id=None,
task_id=component.task_id,
build_new_state=previous_component_build.state,
build_name=component.package,
build_version=nvr_dict['version'],
build_release=nvr_dict['release'],
module_build_id=component.module_id,
state_reason=component.state_reason
)
]
def attempt_to_reuse_all_components(builder, session, module):
"""
Tries to reuse all the components in a build. The components are also
tagged to the tags using the `builder`.
Returns True if all components could be reused, otherwise False. When
False is returned, no component has been reused.
"""
# [(component, component_to_reuse), ...]
component_pairs = []
# Find out if we can reuse all components and cache component and
# component to reuse pairs.
for c in module.component_builds:
if c.package == "module-build-macros":
continue
component_to_reuse = get_reusable_component(
session, module, c.package)
if not component_to_reuse:
return False
component_pairs.append((c, component_to_reuse))
# Stores components we will tag to buildroot and final tag.
components_to_tag = []
# Reuse all components.
for c, component_to_reuse in component_pairs:
# Set the module.batch to the last batch we have.
if c.batch > module.batch:
module.batch = c.batch
# Reuse the component
reuse_component(c, component_to_reuse, True)
components_to_tag.append(c.nvr)
# Tag them
builder.buildroot_add_artifacts(components_to_tag, install=False)
builder.tag_artifacts(components_to_tag)
return True
def get_reusable_component(session, module, component_name):
"""
Returns the component (RPM) build of a module that can be reused
@@ -778,6 +898,11 @@ def get_reusable_component(session, module, component_name):
:return: the component (RPM) build SQLAlchemy object, if one is not found,
None is returned
"""
# We support components reusing only for koji and test backend.
if conf.system not in ['koji', 'test']:
return None
mmd = module.mmd()
# Find the latest module that is in the done or ready state
previous_module_build = session.query(models.ModuleBuild)\

View File

@@ -235,8 +235,8 @@ class YAMLFileHandler(BaseHandler):
self.validate_optional_params()
def post(self):
r = request.files["yaml"]
return submit_module_build_from_yaml(self.username, r.read(), optional_params=self.optional_params)
handle = request.files["yaml"]
return submit_module_build_from_yaml(self.username, handle, optional_params=self.optional_params)
def register_api_v1():

View File

@@ -20,3 +20,4 @@ qpid-python
six
sqlalchemy
futures # Python 2 only
tabulate

View File

@@ -183,7 +183,7 @@ def init_data():
session.commit()
def scheduler_init_data(communicator_state = None):
def scheduler_init_data(communicator_state=None):
db.session.remove()
db.drop_all()
db.create_all()

View File

@@ -0,0 +1,32 @@
document: modulemd
version: 1
data:
name: testmodule-variant
summary: A test module in all its whackiness
description: This module demonstrates how to write simple modulemd files And can be used for testing the build and release pipeline.
license:
module: [ MIT ]
dependencies:
buildrequires:
base-runtime: master
requires:
base-runtime: master
references:
community: https://fedoraproject.org/wiki/Modularity
documentation: https://fedoraproject.org/wiki/Fedora_Packaging_Guidelines_for_Modules
tracker: https://taiga.fedorainfracloud.org/project/modularity
profiles:
default:
rpms:
- python
api:
rpms:
- python
components:
rpms:
perl-List-Compare:
rationale: A dependency of tangerine.
ref: f25
python:
rationale: This is irrational
ref: f24

View File

@@ -29,8 +29,8 @@ data:
ref: f25
perl-Tangerine:
rationale: Provides API for this module and is a dependency of tangerine.
ref: f25
ref: f24
tangerine:
rationale: Provides API for this module.
buildorder: 10
ref: f25
ref: f23

View File

@@ -36,13 +36,14 @@ from module_build_service import db, models, conf
from mock import patch, PropertyMock
from tests import app, init_data
from tests import app, init_data, test_reuse_component_init_data
import os
import json
import itertools
from module_build_service.builder import KojiModuleBuilder, GenericBuilder
import module_build_service.scheduler.consumer
from module_build_service.messaging import MBSModule
base_dir = dirname(dirname(__file__))
cassette_dir = base_dir + '/vcr-request-data/'
@@ -72,7 +73,7 @@ class MockedSCM(object):
return scm_dir
def get_latest(self, branch = 'master'):
def get_latest(self, branch='master'):
return branch
class TestModuleBuilder(GenericBuilder):
@@ -204,8 +205,8 @@ class TestModuleBuilder(GenericBuilder):
pass
@patch("module_build_service.config.Config.system",
new_callable=PropertyMock, return_value = "test")
@patch("module_build_service.config.Config.system",
new_callable=PropertyMock, return_value="test")
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
return_value={
'srpm-build':
@@ -265,8 +266,8 @@ class TestBuild(unittest.TestCase):
# Check that components are tagged after the batch is built.
tag_groups = []
tag_groups.append(set([u'perl-Tangerine?#f25-1-1', u'perl-List-Compare?#f25-1-1']))
tag_groups.append(set([u'tangerine?#f25-1-1']))
tag_groups.append(set([u'perl-Tangerine?#f24-1-1', u'perl-List-Compare?#f25-1-1']))
tag_groups.append(set([u'tangerine?#f23-1-1']))
def on_tag_artifacts_cb(cls, artifacts):
self.assertEqual(tag_groups.pop(0), set(artifacts))
@@ -277,8 +278,8 @@ class TestBuild(unittest.TestCase):
# is built.
buildroot_groups = []
buildroot_groups.append(set([u'module-build-macros-0.1-1.module_fc4ed5f7.src.rpm-1-1']))
buildroot_groups.append(set([u'perl-Tangerine?#f25-1-1', u'perl-List-Compare?#f25-1-1']))
buildroot_groups.append(set([u'tangerine?#f25-1-1']))
buildroot_groups.append(set([u'perl-Tangerine?#f24-1-1', u'perl-List-Compare?#f25-1-1']))
buildroot_groups.append(set([u'tangerine?#f23-1-1']))
def on_buildroot_add_artifacts_cb(cls, artifacts, install):
self.assertEqual(buildroot_groups.pop(0), set(artifacts))
@@ -316,13 +317,13 @@ class TestBuild(unittest.TestCase):
return json.loads(rv.data)
with patch("module_build_service.config.Config.yaml_submit_allowed",
new_callable=PropertyMock, return_value = True):
new_callable=PropertyMock, return_value=True):
conf.set_item("yaml_submit_allowed", True)
data = submit()
self.assertEqual(data['id'], 1)
with patch("module_build_service.config.Config.yaml_submit_allowed",
new_callable=PropertyMock, return_value = False):
new_callable=PropertyMock, return_value=False):
data = submit()
self.assertEqual(data['status'], 403)
self.assertEqual(data['message'], 'YAML submission is not enabled')
@@ -431,8 +432,8 @@ class TestBuild(unittest.TestCase):
@timed(30)
@patch('module_build_service.auth.get_user', return_value=user)
@patch('module_build_service.scm.SCM')
@patch("module_build_service.config.Config.num_consecutive_builds",
new_callable=PropertyMock, return_value = 1)
@patch("module_build_service.config.Config.num_consecutive_builds",
new_callable=PropertyMock, return_value=1)
def test_submit_build_concurrent_threshold(self, conf_num_consecutive_builds,
mocked_scm, mocked_get_user,
conf_system, dbg):
@@ -476,13 +477,13 @@ class TestBuild(unittest.TestCase):
@patch('module_build_service.auth.get_user', return_value=user)
@patch('module_build_service.scm.SCM')
@patch("module_build_service.config.Config.num_consecutive_builds",
new_callable=PropertyMock, return_value = 2)
new_callable=PropertyMock, return_value=2)
def test_try_to_reach_concurrent_threshold(self, conf_num_consecutive_builds,
mocked_scm, mocked_get_user,
conf_system, dbg):
"""
Tests that we try to submit new component build right after
the previous one finished without waiting for all
the previous one finished without waiting for all
the num_consecutive_builds to finish.
"""
MockedSCM(mocked_scm, 'testmodule-more-components', 'testmodule-more-components.yaml',
@@ -531,7 +532,7 @@ class TestBuild(unittest.TestCase):
@patch('module_build_service.auth.get_user', return_value=user)
@patch('module_build_service.scm.SCM')
@patch("module_build_service.config.Config.num_consecutive_builds",
new_callable=PropertyMock, return_value = 1)
new_callable=PropertyMock, return_value=1)
def test_build_in_batch_fails(self, conf_num_consecutive_builds, mocked_scm,
mocked_get_user, conf_system, dbg):
"""
@@ -585,7 +586,7 @@ class TestBuild(unittest.TestCase):
@patch('module_build_service.auth.get_user', return_value=user)
@patch('module_build_service.scm.SCM')
@patch("module_build_service.config.Config.num_consecutive_builds",
new_callable=PropertyMock, return_value = 1)
new_callable=PropertyMock, return_value=1)
def test_all_builds_in_batch_fail(self, conf_num_consecutive_builds, mocked_scm,
mocked_get_user, conf_system, dbg):
"""
@@ -628,3 +629,107 @@ class TestBuild(unittest.TestCase):
# We should end up with batch 2 and never start batch 3, because
# there were failed components in batch 2.
self.assertEqual(c.module_build.batch, 2)
@timed(30)
@patch('module_build_service.auth.get_user', return_value=user)
@patch('module_build_service.scm.SCM')
def test_submit_build_reuse_all(self, mocked_scm, mocked_get_user,
conf_system, dbg):
"""
Tests that we do not try building module-build-macros when reusing all
components in a module build.
"""
test_reuse_component_init_data()
def on_build_cb(cls, artifact_name, source):
raise ValueError("All components should be reused, not build.")
TestModuleBuilder.on_build_cb = on_build_cb
# Check that components are tagged after the batch is built.
tag_groups = []
tag_groups.append(set(
['perl-Tangerine-0.23-1.module_testmodule_master_20170109091357',
'perl-List-Compare-0.53-5.module_testmodule_master_20170109091357',
'tangerine-0.22-3.module_testmodule_master_20170109091357']))
def on_tag_artifacts_cb(cls, artifacts):
self.assertEqual(tag_groups.pop(0), set(artifacts))
TestModuleBuilder.on_tag_artifacts_cb = on_tag_artifacts_cb
buildtag_groups = []
buildtag_groups.append(set(
['perl-Tangerine-0.23-1.module_testmodule_master_20170109091357',
'perl-List-Compare-0.53-5.module_testmodule_master_20170109091357',
'tangerine-0.22-3.module_testmodule_master_20170109091357']))
def on_buildroot_add_artifacts_cb(cls, artifacts, install):
self.assertEqual(buildtag_groups.pop(0), set(artifacts))
TestModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb
msgs = [MBSModule("local module build", 2, 1)]
stop = module_build_service.scheduler.make_simple_stop_condition(db.session)
module_build_service.scheduler.main(msgs, stop)
reused_component_ids = {"module-build-macros": None, "tangerine": 3,
"perl-Tangerine": 1, "perl-List-Compare": 2}
# All components should be built and module itself should be in "done"
# or "ready" state.
for build in models.ComponentBuild.query.filter_by(module_id=2).all():
self.assertEqual(build.state, koji.BUILD_STATES['COMPLETE'])
self.assertTrue(build.module_build.state in [models.BUILD_STATES["done"], models.BUILD_STATES["ready"]])
self.assertEqual(build.reused_component_id,
reused_component_ids[build.package])
@timed(30)
@patch('module_build_service.auth.get_user', return_value=user)
@patch('module_build_service.scm.SCM')
def test_submit_build_reuse_all_without_build_macros(self, mocked_scm, mocked_get_user,
conf_system, dbg):
"""
Tests that we can reuse components even when the reused module does
not have module-build-macros component.
"""
test_reuse_component_init_data()
models.ComponentBuild.query.filter_by(package="module-build-macros").delete()
self.assertEqual(len(models.ComponentBuild.query.filter_by(
package="module-build-macros").all()), 0)
db.session.commit()
def on_build_cb(cls, artifact_name, source):
raise ValueError("All components should be reused, not build.")
TestModuleBuilder.on_build_cb = on_build_cb
# Check that components are tagged after the batch is built.
tag_groups = []
tag_groups.append(set(
['perl-Tangerine-0.23-1.module_testmodule_master_20170109091357',
'perl-List-Compare-0.53-5.module_testmodule_master_20170109091357',
'tangerine-0.22-3.module_testmodule_master_20170109091357']))
def on_tag_artifacts_cb(cls, artifacts):
self.assertEqual(tag_groups.pop(0), set(artifacts))
TestModuleBuilder.on_tag_artifacts_cb = on_tag_artifacts_cb
buildtag_groups = []
buildtag_groups.append(set(
['perl-Tangerine-0.23-1.module_testmodule_master_20170109091357',
'perl-List-Compare-0.53-5.module_testmodule_master_20170109091357',
'tangerine-0.22-3.module_testmodule_master_20170109091357']))
def on_buildroot_add_artifacts_cb(cls, artifacts, install):
self.assertEqual(buildtag_groups.pop(0), set(artifacts))
TestModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb
msgs = [MBSModule("local module build", 2, 1)]
stop = module_build_service.scheduler.make_simple_stop_condition(db.session)
module_build_service.scheduler.main(msgs, stop)
# All components should be built and module itself should be in "done"
# or "ready" state.
for build in models.ComponentBuild.query.filter_by(module_id=2).all():
self.assertEqual(build.state, koji.BUILD_STATES['COMPLETE'])
self.assertTrue(build.module_build.state in [models.BUILD_STATES["done"], models.BUILD_STATES["ready"]] )
self.assertNotEqual(build.package, "module-build-macros")

View File

@@ -33,7 +33,7 @@ import module_build_service.builder
from mock import patch, MagicMock
from tests import conf
from tests import conf, init_data
from module_build_service.builder import KojiModuleBuilder
@@ -61,9 +61,11 @@ class FakeKojiModuleBuilder(KojiModuleBuilder):
class TestKojiBuilder(unittest.TestCase):
def setUp(self):
init_data()
self.config = mock.Mock()
self.config.koji_profile = conf.koji_profile
self.config.koji_repository_url = conf.koji_repository_url
self.module = module_build_service.models.ModuleBuild.query.filter_by(id=1).one()
def test_tag_to_repo(self):
""" Test that when a repo msg hits us and we have no match,
@@ -82,8 +84,8 @@ class TestKojiBuilder(unittest.TestCase):
attrs = {'checkForBuilds.return_value': None,
'checkForBuilds.side_effect': IOError}
mocked_kojiutil.configure_mock(**attrs)
fake_kmb = FakeKojiModuleBuilder(owner='Moe Szyslak',
module='nginx',
fake_kmb = FakeKojiModuleBuilder(owner=self.module.owner,
module=self.module,
config=conf,
tag_name='module-nginx-1.2',
components=[])
@@ -98,8 +100,8 @@ class TestKojiBuilder(unittest.TestCase):
Tests that buildroot_add_artifacts and tag_artifacts do not try to
tag already tagged artifacts
"""
builder = FakeKojiModuleBuilder(owner='Moe Szyslak',
module='nginx',
builder = FakeKojiModuleBuilder(owner=self.module.owner,
module=self.module,
config=conf,
tag_name='module-nginx-1.2',
components=[])
@@ -130,20 +132,20 @@ class TestKojiBuilder(unittest.TestCase):
class TestGetKojiClientSession(unittest.TestCase):
def setUp(self):
init_data()
self.config = mock.Mock()
self.config.koji_profile = conf.koji_profile
self.config.koji_config = conf.koji_config
self.owner = 'Matt Jia'
self.module = 'fool'
self.module = module_build_service.models.ModuleBuild.query.filter_by(id=1).one()
self.tag_name = 'module-fool-1.2'
@patch.object(koji.ClientSession, 'krb_login')
def test_proxyuser(self, mocked_krb_login):
KojiModuleBuilder(owner=self.owner,
KojiModuleBuilder(owner=self.module.owner,
module=self.module,
config=self.config,
tag_name=self.tag_name,
components=[])
args, kwargs = mocked_krb_login.call_args
self.assertTrue(set([('proxyuser', self.owner)]).issubset(set(kwargs.items())))
self.assertTrue(set([('proxyuser', self.module.owner)]).issubset(set(kwargs.items())))

37
tests/test_config.py Normal file
View File

@@ -0,0 +1,37 @@
# Copyright (c) 2016 Red Hat, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Written by Jan Kaluza <jkaluza@redhat.com>
from nose.tools import raises, eq_
import unittest
import mock
import os.path
from mock import patch
from module_build_service import conf
class TestConfig(unittest.TestCase):
def test_path_expanduser(self):
test_dir = "~/modulebuild/builds"
conf.mock_resultsdir = test_dir
self.assertEqual(conf.mock_resultsdir, os.path.expanduser(test_dir))

View File

@@ -55,7 +55,7 @@ class TestFedmsgMessaging(unittest.TestCase):
self.assertEqual(msg.build_new_state, 1)
@patch("module_build_service.config.Config.system",
new_callable=PropertyMock, return_value = "copr")
new_callable=PropertyMock, return_value="copr")
def test_copr_build_end(self, conf_system):
# http://fedora-fedmsg.readthedocs.io/en/latest/topics.html#copr-build-end
copr_build_end_msg = {
@@ -65,7 +65,7 @@ class TestFedmsgMessaging(unittest.TestCase):
'copr': 'mutt-kz',
'ip': '172.16.3.3',
'pid': 12010,
'pkg': 'mutt-kz-1.5.23.1-1.20150203.git.c8504a8a.fc21',
'pkg': 'mutt-kz', # Reality doesnt match the linked docs
'status': 1,
'user': 'fatka',
'version': '1.5.23.1-1.20150203.git.c8504a8a.fc21',
@@ -95,14 +95,14 @@ class TestFedmsgMessaging(unittest.TestCase):
# https://fedora-fedmsg.readthedocs.io/en/latest/topics.html#id134
buildsys_tag_msg = {
"msg": {
"build_id": 875961,
"name": "module-build-macros",
"tag_id": 619,
"instance": "primary",
"tag": "module-debugging-tools-master-20170405115403-build",
"user": "mbs/mbs.fedoraproject.org",
"version": "0.1",
"owner": "mbs/mbs.fedoraproject.org",
"build_id": 875961,
"name": "module-build-macros",
"tag_id": 619,
"instance": "primary",
"tag": "module-debugging-tools-master-20170405115403-build",
"user": "mbs/mbs.fedoraproject.org",
"version": "0.1",
"owner": "mbs/mbs.fedoraproject.org",
"release": "1.module_0c3d13fd"
},
'msg_id': '2015-51be4c8e-8ab6-4dcb-ac0d-37b257765c71',

View File

@@ -41,7 +41,7 @@ CASSETTES_DIR = path.join(
path.abspath(path.dirname(__file__)), '..', 'vcr-request-data')
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
return_value = {'build': [], 'srpm-build': []})
return_value={'build': [], 'srpm-build': []})
@patch("module_build_service.scheduler.consumer.get_global_consumer")
@patch("module_build_service.builder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
@@ -53,7 +53,7 @@ class TestPoller(unittest.TestCase):
def tearDown(self):
init_data()
def test_process_paused_module_builds(self, crete_builder,
def test_process_paused_module_builds(self, create_builder,
koji_get_session, global_consumer,
dbg):
"""
@@ -67,7 +67,7 @@ class TestPoller(unittest.TestCase):
koji_get_session.return_value = koji_session
builder = mock.MagicMock()
crete_builder.return_value = builder
create_builder.return_value = builder
# Change the batch to 2, so the module build is in state where
# it is not building anything, but the state is "build".
@@ -81,15 +81,14 @@ class TestPoller(unittest.TestCase):
poller.poll()
# Refresh our module_build object.
db.session.expunge(module_build)
module_build = models.ModuleBuild.query.filter_by(id=2).one()
db.session.refresh(module_build)
# Components should be in BUILDING state now.
components = module_build.current_batch()
for component in components:
self.assertEqual(component.state, koji.BUILD_STATES["BUILDING"])
def test_trigger_new_repo_when_failed(self, crete_builder,
def test_trigger_new_repo_when_failed(self, create_builder,
koji_get_session, global_consumer,
dbg):
"""
@@ -107,7 +106,7 @@ class TestPoller(unittest.TestCase):
builder = mock.MagicMock()
builder.buildroot_ready.return_value = False
crete_builder.return_value = builder
create_builder.return_value = builder
# Change the batch to 2, so the module build is in state where
# it is not building anything, but the state is "build".
@@ -123,7 +122,7 @@ class TestPoller(unittest.TestCase):
koji_session.newRepo.assert_called_once_with("module-testmodule-build")
def test_trigger_new_repo_when_succeded(self, crete_builder,
def test_trigger_new_repo_when_succeded(self, create_builder,
koji_get_session, global_consumer,
dbg):
"""
@@ -142,7 +141,7 @@ class TestPoller(unittest.TestCase):
builder = mock.MagicMock()
builder.buildroot_ready.return_value = False
crete_builder.return_value = builder
create_builder.return_value = builder
# Change the batch to 2, so the module build is in state where
# it is not building anything, but the state is "build".
@@ -155,6 +154,44 @@ class TestPoller(unittest.TestCase):
poller = MBSProducer(hub)
poller.poll()
# Refresh our module_build object.
db.session.refresh(module_build)
self.assertTrue(not koji_session.newRepo.called)
self.assertEqual(module_build.new_repo_task_id, 0)
def test_process_paused_module_builds_waiting_for_repo(
self, create_builder, koji_get_session, global_consumer, dbg):
"""
Tests that process_paused_module_builds does not start new batch
when we are waiting for repo.
"""
consumer = mock.MagicMock()
consumer.incoming = queue.Queue()
global_consumer.return_value = consumer
koji_session = mock.MagicMock()
koji_get_session.return_value = koji_session
builder = mock.MagicMock()
create_builder.return_value = builder
# Change the batch to 2, so the module build is in state where
# it is not building anything, but the state is "build".
module_build = models.ModuleBuild.query.filter_by(id=2).one()
module_build.batch = 2
module_build.new_repo_task_id = 123456
db.session.commit()
# Poll :)
hub = mock.MagicMock()
poller = MBSProducer(hub)
poller.poll()
# Refresh our module_build object.
db.session.refresh(module_build)
# Components should not be in building state
components = module_build.current_batch()
for component in components:
self.assertEqual(component.state, None)

View File

@@ -105,7 +105,7 @@ class TestRepoDone(unittest.TestCase):
@mock.patch('module_build_service.builder.KojiModuleBuilder.get_session')
@mock.patch('module_build_service.builder.KojiModuleBuilder.build')
@mock.patch('module_build_service.builder.KojiModuleBuilder.buildroot_connect')
@mock.patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", return_value = {'build': [], 'srpm-build': []})
@mock.patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", return_value={'build': [], 'srpm-build': []})
def test_failed_component_build(self, dbg, connect, build_fn, config, ready, list_tasks_fn):
""" Test that when a KojiModuleBuilder.build fails, the build is
marked as failed with proper state_reason.

View File

@@ -65,7 +65,7 @@ class TestTagTagged(unittest.TestCase):
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
return_value = {'build': [], 'srpm-build': []})
return_value={'build': [], 'srpm-build': []})
@patch("module_build_service.builder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo(self, create_builder, koji_get_session, dbg):
@@ -84,6 +84,13 @@ class TestTagTagged(unittest.TestCase):
create_builder.return_value = builder
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=2).one()
# Set previous components as COMPLETE and tagged.
module_build.batch = 1
for c in module_build.up_to_current_batch():
c.state = koji.BUILD_STATES["COMPLETE"]
c.tagged = True
module_build.batch = 2
for c in module_build.current_batch():
c.state = koji.BUILD_STATES["COMPLETE"]
@@ -118,7 +125,7 @@ class TestTagTagged(unittest.TestCase):
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
return_value = {'build': [], 'srpm-build': []})
return_value={'build': [], 'srpm-build': []})
@patch("module_build_service.builder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo_still_building_components(self, create_builder, koji_get_session, dbg):
@@ -154,7 +161,7 @@ class TestTagTagged(unittest.TestCase):
self.assertTrue(not koji_session.newRepo.called)
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
return_value = {'build': [], 'srpm-build': []})
return_value={'build': [], 'srpm-build': []})
@patch("module_build_service.builder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo_failed_components(self, create_builder, koji_get_session, dbg):
@@ -173,6 +180,13 @@ class TestTagTagged(unittest.TestCase):
create_builder.return_value = builder
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=2).one()
# Set previous components as COMPLETE and tagged.
module_build.batch = 1
for c in module_build.up_to_current_batch():
c.state = koji.BUILD_STATES["COMPLETE"]
c.tagged = True
module_build.batch = 2
component = module_build_service.models.ComponentBuild.query\
.filter_by(package='perl-Tangerine', module_id=module_build.id).one()
@@ -199,3 +213,68 @@ class TestTagTagged(unittest.TestCase):
# newRepo task_id should be stored in database, so we can check its
# status later in poller.
self.assertEqual(module_build.new_repo_task_id, 123456)
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups",
return_value = {'build': [], 'srpm-build': []})
@patch("module_build_service.builder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo_multiple_batches_tagged(
self, create_builder, koji_get_session, dbg):
"""
Test that newRepo is called just once and only when all components
are tagged even if we tag components from the multiple batches in the
same time.
"""
koji_session = mock.MagicMock()
koji_session.getTag = lambda tag_name: {'name': tag_name}
koji_session.getTaskInfo.return_value = {'state': koji.TASK_STATES['CLOSED']}
koji_session.newRepo.return_value = 123456
koji_get_session.return_value = koji_session
builder = mock.MagicMock()
builder.koji_session = koji_session
builder.buildroot_ready.return_value = False
create_builder.return_value = builder
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=2).one()
module_build.batch = 2
for c in module_build.current_batch():
c.state = koji.BUILD_STATES["COMPLETE"]
db.session.commit()
# Tag the first component to the buildroot.
msg = module_build_service.messaging.KojiTagChange(
'id', 'module-testmodule-build', "perl-Tangerine")
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
# newRepo should not be called, because there are still components
# to tag.
self.assertTrue(not koji_session.newRepo.called)
# Tag the second component to the buildroot.
msg = module_build_service.messaging.KojiTagChange(
'id', 'module-testmodule-build', "perl-List-Compare")
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
# newRepo should not be called, because there are still components
# to tag.
self.assertTrue(not koji_session.newRepo.called)
# Tag the component from first batch to the buildroot.
msg = module_build_service.messaging.KojiTagChange(
'id', 'module-testmodule-build', "module-build-macros")
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
# newRepo should be called now - all components have been tagged.
koji_session.newRepo.assert_called_once_with("module-testmodule-build")
# Refresh our module_build object.
db.session.expunge(module_build)
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=2).one()
# newRepo task_id should be stored in database, so we can check its
# status later in poller.
self.assertEqual(module_build.new_repo_task_id, 123456)

View File

@@ -28,7 +28,7 @@ import unittest
from nose.tools import raises
import module_build_service.scm
from module_build_service.errors import ValidationError
from module_build_service.errors import ValidationError, UnprocessableEntity
repo_path = 'file://' + os.path.dirname(__file__) + "/scm_data/testrepo"
@@ -83,7 +83,7 @@ class TestSCMModule(unittest.TestCase):
sourcedir = scm.checkout(self.tempdir)
scm.verify(sourcedir)
@raises(RuntimeError)
@raises(UnprocessableEntity)
def test_verify_unknown_branch(self):
scm = module_build_service.scm.SCM(repo_path, "unknown")
sourcedir = scm.checkout(self.tempdir)
@@ -102,7 +102,7 @@ class TestSCMModule(unittest.TestCase):
sourcedir = scm.checkout(self.tempdir)
scm.verify(sourcedir)
@raises(RuntimeError)
@raises(UnprocessableEntity)
def test_verify_unknown_hash(self):
target = '7035bd33614972ac66559ac1fdd019ff6027ad22'
scm = module_build_service.scm.SCM(repo_path + "?#" + target, "master")

View File

@@ -63,7 +63,7 @@ class MockedSCM(object):
return scm_dir
def get_latest(self, branch = 'master'):
def get_latest(self, branch='master'):
return self.commit if self.commit else branch
class TestUtils(unittest.TestCase):
@@ -81,11 +81,16 @@ class TestUtils(unittest.TestCase):
mocked_scm.return_value.commit = \
'620ec77321b2ea7b0d67d82992dda3e1d67055b4'
# For all the RPMs in testmodule, get_latest is called
hashes_returned = [
'4ceea43add2366d8b8c5a622a2fb563b625b9abf',
'fbed359411a1baa08d4a88e0d12d426fbf8f602c',
'76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb']
mocked_scm.return_value.get_latest.side_effect = hashes_returned
hashes_returned = {
'f24': '4ceea43add2366d8b8c5a622a2fb563b625b9abf',
'f23': 'fbed359411a1baa08d4a88e0d12d426fbf8f602c',
'f25': '76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb'}
original_refs = ["f23", "f24", "f25"]
def mocked_get_latest(branch="master"):
return hashes_returned[branch]
mocked_scm.return_value.get_latest = mocked_get_latest
mmd = modulemd.ModuleMetadata()
with open(path.join(BASE_DIR, '..', 'staged_data', 'testmodule.yaml')) \
as mmd_file:
@@ -95,9 +100,9 @@ class TestUtils(unittest.TestCase):
'?#620ec77321b2ea7b0d67d82992dda3e1d67055b4')
module_build_service.utils.format_mmd(mmd, scmurl)
# Make sure all the commit hashes were properly set on the RPMs
# Make sure that original refs are not changed.
mmd_pkg_refs = [pkg.ref for pkg in mmd.components.rpms.values()]
self.assertEqual(set(mmd_pkg_refs), set(hashes_returned))
self.assertEqual(set(mmd_pkg_refs), set(original_refs))
self.assertEqual(mmd.buildrequires, {'base-runtime': 'master'})
xmd = {
@@ -108,10 +113,14 @@ class TestUtils(unittest.TestCase):
'ref': '464026abf9cbe10fac1d800972e3229ac4d01975',
'stream': 'master',
'version': '20170404161234'}},
'rpms': {'perl-List-Compare': {'ref': '76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb'},
'perl-Tangerine': {'ref': '4ceea43add2366d8b8c5a622a2fb563b625b9abf'},
'tangerine': {'ref': 'fbed359411a1baa08d4a88e0d12d426fbf8f602c'}},
'scmurl': 'git://pkgs.stg.fedoraproject.org/modules/testmodule'
'.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4',
}
}
self.assertEqual(mmd.xmd, xmd)
@vcr.use_cassette(
@@ -119,10 +128,13 @@ class TestUtils(unittest.TestCase):
@patch('module_build_service.scm.SCM')
def test_format_mmd_empty_scmurl(self, mocked_scm):
# For all the RPMs in testmodule, get_latest is called
mocked_scm.return_value.get_latest.side_effect = [
'4ceea43add2366d8b8c5a622a2fb563b625b9abf',
'fbed359411a1baa08d4a88e0d12d426fbf8f602c',
'76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb']
hashes_returned = {
'f24': '4ceea43add2366d8b8c5a622a2fb563b625b9abf',
'f23': 'fbed359411a1baa08d4a88e0d12d426fbf8f602c',
'f25': '76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb'}
def mocked_get_latest(branch="master"):
return hashes_returned[branch]
mocked_scm.return_value.get_latest = mocked_get_latest
mmd = modulemd.ModuleMetadata()
with open(path.join(BASE_DIR, '..', 'staged_data', 'testmodule.yaml')) \
@@ -138,6 +150,9 @@ class TestUtils(unittest.TestCase):
'ref': '464026abf9cbe10fac1d800972e3229ac4d01975',
'stream': 'master',
'version': '20170404161234'}},
'rpms': {'perl-List-Compare': {'ref': '76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb'},
'perl-Tangerine': {'ref': '4ceea43add2366d8b8c5a622a2fb563b625b9abf'},
'tangerine': {'ref': 'fbed359411a1baa08d4a88e0d12d426fbf8f602c'}},
'scmurl': None,
}
}
@@ -358,13 +373,15 @@ class TestUtils(unittest.TestCase):
self.assertTrue(str(cm.exception).endswith(' No value provided.'))
@vcr.use_cassette(
path.join(CASSETTES_DIR, 'tests.test_utils.TestUtils.test_format_mmd'))
@patch('module_build_service.scm.SCM')
def test_resubmit(self, mocked_scm):
"""
Tests that the module resubmit reintializes the module state and
component states properly.
"""
MockedSCM(mocked_scm, 'testmodule', 'testmodule-bootstrap.yaml',
MockedSCM(mocked_scm, 'testmodule', 'testmodule.yaml',
'620ec77321b2ea7b0d67d82992dda3e1d67055b4')
with app.app_context():
test_reuse_component_init_data()
@@ -396,6 +413,48 @@ class TestUtils(unittest.TestCase):
self.assertEqual(failed_component.state, None)
self.assertEqual(canceled_component.state, None)
@vcr.use_cassette(
path.join(CASSETTES_DIR, 'tests.test_utils.TestUtils.test_format_mmd'))
@patch('module_build_service.scm.SCM')
def test_record_component_builds_duplicate_components(self, mocked_scm):
with app.app_context():
test_reuse_component_init_data()
mocked_scm.return_value.commit = \
'620ec77321b2ea7b0d67d82992dda3e1d67055b4'
# For all the RPMs in testmodule, get_latest is called
hashes_returned = {
'f25': '4ceea43add2366d8b8c5a622a2fb563b625b9abf',
'f24': 'fbed359411a1baa08d4a88e0d12d426fbf8f602c'}
def mocked_get_latest(branch="master"):
return hashes_returned[branch]
mocked_scm.return_value.get_latest = mocked_get_latest
testmodule_variant_mmd_path = path.join(
BASE_DIR, '..', 'staged_data', 'testmodule-variant.yaml')
testmodule_variant_mmd = modulemd.ModuleMetadata()
with open(testmodule_variant_mmd_path) as mmd_file:
testmodule_variant_mmd.loads(mmd_file)
module_build = \
db.session.query(models.ModuleBuild).filter_by(id=1).one()
mmd = module_build.mmd()
error_msg = (
'The included module "testmodule-variant" in "testmodule" have '
'the following conflicting components: perl-List-Compare')
try:
module_build_service.utils.record_component_builds(
testmodule_variant_mmd, module_build, main_mmd=mmd)
assert False, 'A RuntimeError was expected but was not raised'
except RuntimeError as e:
self.assertEqual(e.message, error_msg)
self.assertEqual(module_build.state, models.BUILD_STATES['failed'])
self.assertEqual(module_build.state_reason, error_msg)
class DummyModuleBuilder(GenericBuilder):
"""
Dummy module builder
@@ -458,7 +517,7 @@ class DummyModuleBuilder(GenericBuilder):
def list_tasks_for_components(self, component_builds=None, state='active'):
pass
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", return_value = {'build': [], 'srpm-build': []})
@patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", return_value={'build': [], 'srpm-build': []})
class TestBatches(unittest.TestCase):
def setUp(self):

View File

@@ -24,16 +24,19 @@ import unittest
import json
import time
import vcr
import modulemd as _modulemd
import module_build_service.scm
from mock import patch, Mock, PropertyMock
from shutil import copyfile
from os import path, mkdir
from os.path import dirname
import modulemd as _modulemd
import hashlib
from tests import app, init_data
from module_build_service.errors import UnprocessableEntity
from module_build_service.models import ComponentBuild, ModuleBuild
import module_build_service.scm
from module_build_service import conf
@@ -43,8 +46,9 @@ anonymous_user = ('anonymous', set(['packager']))
base_dir = dirname(dirname(__file__))
cassette_dir = base_dir + '/vcr-request-data/'
class MockedSCM(object):
def __init__(self, mocked_scm, name, mmd_filenames, commit=None):
def __init__(self, mocked_scm, name, mmd_filenames, commit=None, checkout_raise=False):
"""
Adds default testing checkout, get_latest and name methods
to mocked_scm SCM class.
@@ -61,7 +65,15 @@ class MockedSCM(object):
self.mmd_filenames = mmd_filenames
self.checkout_id = 0
self.mocked_scm.return_value.checkout = self.checkout
if checkout_raise:
self.mocked_scm.return_value.checkout.side_effect = \
UnprocessableEntity(
"checkout: The requested commit hash was not found within "
"the repository. Perhaps you forgot to push. The original "
"message was: ")
else:
self.mocked_scm.return_value.checkout = self.checkout
self.mocked_scm.return_value.name = self.name
self.mocked_scm.return_value.commit = self.commit
self.mocked_scm.return_value.get_latest = self.get_latest
@@ -85,7 +97,7 @@ class MockedSCM(object):
return scm_dir
def get_latest(self, branch='master'):
return branch
return hashlib.sha1(branch).hexdigest()[:10]
class TestViews(unittest.TestCase):
@@ -226,6 +238,11 @@ class TestViews(unittest.TestCase):
data = json.loads(rv.data)
self.assertEquals(data['meta']['total'], 10)
def test_query_builds_filter_koji_tag(self):
rv = self.client.get('/module-build-service/1/module-builds/?koji_tag=module-nginx-1.2')
data = json.loads(rv.data)
self.assertEquals(data['meta']['total'], 10)
def test_query_builds_filter_completed_before(self):
rv = self.client.get(
'/module-build-service/1/module-builds/?completed_before=2016-09-03T11:30:00Z')
@@ -423,7 +440,7 @@ class TestViews(unittest.TestCase):
@patch('module_build_service.scm.SCM')
def test_submit_build_scm_parallalization(self, mocked_scm,
mocked_get_user):
def mocked_scm_get_latest(branch = "master"):
def mocked_scm_get_latest(branch="master"):
time.sleep(1)
return branch
@@ -477,8 +494,8 @@ class TestViews(unittest.TestCase):
@patch('module_build_service.auth.get_user', return_value=user)
@patch('module_build_service.scm.SCM')
@patch("module_build_service.config.Config.modules_allow_repository",
new_callable=PropertyMock, return_value = True)
@patch("module_build_service.config.Config.modules_allow_repository",
new_callable=PropertyMock, return_value=True)
def test_submit_build_includedmodule(self, conf, mocked_scm, mocked_get_user):
mocked_scm_obj = MockedSCM(mocked_scm, "includedmodules",
["includedmodules.yaml", "testmodule.yaml"])
@@ -513,6 +530,17 @@ class TestViews(unittest.TestCase):
self.assertEquals(batches['tangerine'], 3)
self.assertEquals(batches["file"], 4)
build = ModuleBuild.query.filter(ModuleBuild.id == data['id']).one()
mmd = build.mmd()
# Test that RPMs are properly merged in case of included modules in mmd.
xmd_rpms = {'ed': {'ref': '40bd001563'},
'perl-List-Compare': {'ref': '2ee8474e44'},
'tangerine': {'ref': 'd29d5c24b8'},
'file': {'ref': 'a2740663f8'},
'perl-Tangerine': {'ref': '27785f9f05'}}
self.assertEqual(mmd.xmd['mbs']['rpms'], xmd_rpms)
@patch('module_build_service.auth.get_user', return_value=user)
@patch('module_build_service.scm.SCM')
def test_submit_build_includedmodule_custom_repo_not_allowed(self,
@@ -558,7 +586,7 @@ class TestViews(unittest.TestCase):
return_value=('sammy', set(["packager", "mbs-admin"])))
def test_cancel_build_admin(self, mocked_get_user):
with patch("module_build_service.config.Config.admin_groups",
new_callable=PropertyMock, return_value = set(["mbs-admin"])):
new_callable=PropertyMock, return_value=set(["mbs-admin"])):
rv = self.client.patch('/module-build-service/1/module-builds/30',
data=json.dumps({'state': 'failed'}))
data = json.loads(rv.data)
@@ -570,7 +598,7 @@ class TestViews(unittest.TestCase):
return_value=('sammy', set(["packager"])))
def test_cancel_build_no_admin(self, mocked_get_user):
with patch("module_build_service.config.Config.admin_groups",
new_callable=PropertyMock, return_value = set(["mbs-admin"])):
new_callable=PropertyMock, return_value=set(["mbs-admin"])):
rv = self.client.patch('/module-build-service/1/module-builds/30',
data=json.dumps({'state': 'failed'}))
data = json.loads(rv.data)
@@ -711,3 +739,20 @@ class TestViews(unittest.TestCase):
r3 = self.client.patch(url, data=json.dumps({'state': 'failed', 'owner': 'foo'}))
self.assertEquals(r3.status_code, 400)
self.assertIn("The request contains 'owner' parameter", json.loads(r3.data)['message'])
@patch('module_build_service.auth.get_user', return_value=user)
@patch('module_build_service.scm.SCM')
def test_submit_build_commit_hash_not_found(self, mocked_scm, mocked_get_user):
MockedSCM(mocked_scm, 'testmodule', 'testmodule.yaml',
'7035bd33614972ac66559ac1fdd019ff6027ad22', checkout_raise=True)
rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps(
{'branch': 'master', 'scmurl': 'git://pkgs.stg.fedoraproject.org/modules/'
'testmodule.git?#7035bd33614972ac66559ac1fdd019ff6027ad22'}))
data = json.loads(rv.data)
self.assertIn("The requested commit hash was not found within the repository.",
data['message'])
self.assertIn("Perhaps you forgot to push. The original message was: ",
data['message'])
self.assertEquals(data['status'], 422)
self.assertEquals(data['error'], 'Unprocessable Entity')

File diff suppressed because it is too large Load Diff