Merge #170 Initial version of mock backend

This commit is contained in:
Jan Kaluža
2016-11-10 08:58:59 +00:00
7 changed files with 425 additions and 170 deletions

View File

@@ -27,13 +27,18 @@ import logging
import os
import ssl
from shutil import rmtree
import getpass
from module_build_service import app, conf, db
from module_build_service.config import Config
from module_build_service import models
from module_build_service.pdc import (
get_pdc_client_session, get_module, get_module_runtime_dependencies,
get_module_tag, get_module_build_dependencies, get_module_repo)
import module_build_service.auth
import module_build_service.scheduler.main
from module_build_service.utils import submit_module_build
from module_build_service.messaging import RidaModule
manager = Manager(app)
@@ -101,6 +106,23 @@ def upgradedb():
flask_migrate.upgrade()
@manager.command
def cleardb():
models.ModuleBuild.query.delete()
models.ComponentBuild.query.delete()
@manager.command
def build_module_locally(url):
username = getpass.getuser()
cleardb()
submit_module_build(username, url)
msgs = []
msgs.append(RidaModule("fake msg", 1, 1))
module_build_service.scheduler.main.main(msgs, True)
@manager.command
def gendevfedmsgcert(pki_dir='/opt/module_build_service/pki', force=False):
"""

View File

@@ -44,12 +44,15 @@ import random
import string
import kobo.rpmlib
import xmlrpclib
import shutil
import subprocess
import munch
from OpenSSL.SSL import SysCallError
from module_build_service import log, db
from module_build_service import conf, log, db
from module_build_service.models import ModuleBuild
import module_build_service.scheduler.main
import module_build_service.utils
logging.basicConfig(level=logging.DEBUG)
@@ -166,6 +169,9 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
elif backend == "copr":
return CoprModuleBuilder(owner=owner, module=module,
config=config, **extra)
elif backend == "mock":
return MockModuleBuilder(owner=owner, module=module,
config=config, **extra)
else:
raise ValueError("Builder backend='%s' not recognized" % backend)
@@ -845,3 +851,178 @@ class CoprModuleBuilder(GenericBuilder):
def get_disttag_srpm(disttag):
# @FIXME
return KojiModuleBuilder.get_disttag_srpm(disttag)
class MockModuleBuilder(GenericBuilder):
"""
See http://blog.samalik.com/copr-in-the-modularity-world/
especially section "Building a stack"
"""
backend = "mock"
def __init__(self, owner, module, config, tag_name):
self.module_str = module
self.tag_name = tag_name
self.config = config
self.tag_dir = os.path.join("/tmp/", tag_name)
if not os.path.exists(self.tag_dir):
os.makedirs(self.tag_dir)
log.info("MockModuleBuilder initialized, tag_name=%s, tag_dir=%s" %
(tag_name, self.tag_dir))
def buildroot_connect(self):
pass
def buildroot_prep(self):
pass
def buildroot_resume(self):
pass
def buildroot_ready(self, artifacts=None):
return True
def buildroot_add_dependency(self, dependencies):
pass
def buildroot_add_artifacts(self, artifacts, install=False):
pass
def buildroot_add_repos(self, dependencies):
pass
def _send_repo_done(self):
msg = module_build_service.messaging.KojiRepoChange(
msg_id='a faked internal message',
repo_tag=self.tag_name + "-build",
)
module_build_service.scheduler.main.outgoing_work_queue_put(msg)
def _send_build_change(self, state, source):
nvr = kobo.rpmlib.parse_nvr(source)
# build_id=1 and task_id=1 are OK here, because we are building just
# one RPM at the time.
msg = module_build_service.messaging.KojiBuildChange(
msg_id='a faked internal message',
build_id=1,
task_id=1,
build_name=nvr["name"],
build_new_state=state,
build_release=nvr["release"],
build_version=nvr["version"]
)
module_build_service.scheduler.main.outgoing_work_queue_put(msg)
def _execute_cmd(self, args):
log.debug("Executing command: %s" % args)
ret = subprocess.call(args)
if ret != 0:
raise RuntimeError("Command '%s' returned non-zero value %d"
% (cmd, ret))
def build_srpm(self, artifact_name, source):
"""
Builds the artifact from the SRPM.
"""
try:
# Initialize mock.
self._execute_cmd(["mock", "-r", self.config.mock_config, "--init"])
# Install all RPMs from our tag_dir to mock.
self._execute_cmd(["mock", "-r", self.config.mock_config,
"--copyin", self.tag_dir, "/tmp"])
rpms = [os.path.join("/tmp", self.tag_name, rpm) for rpm
in os.listdir(self.tag_dir)
if rpm.endswith(".rpm") and not rpm.endswith(".src.rpm")]
if rpms:
self._execute_cmd(["mock", "-r", self.config.mock_config,
"--install"] + rpms)
# Start the build and store results to tag_dir
# TODO: Maybe this should not block in the future, but for local
# builds it is not a big problem.
self._execute_cmd(["mock", "-r", self.config.mock_config,
"--no-clean", "--rebuild", source,
"--resultdir=%s" % self.tag_dir])
# Emit messages simulating complete build. These messages
# are put in the scheduler.main._work_queue and are handled
# by MBS after the build_srpm() method returns and scope gets
# back to scheduler.main.main() method.
self._send_repo_done()
self._send_build_change(koji.BUILD_STATES['COMPLETE'], source)
self._send_repo_done()
except Exception as e:
log.error("Error while building artifact %s: %s" % (artifact_name,
str(e)))
# Emit messages simulating complete build. These messages
# are put in the scheduler.main._work_queue and are handled
# by MBS after the build_srpm() method returns and scope gets
# back to scheduler.main.main() method.
self._send_repo_done()
self._send_build_change(koji.BUILD_STATES['FAILED'], source)
self._send_repo_done()
# Return the "building" state. Real state will be taken by MBS
# from the messages emitted above.
state = koji.BUILD_STATES['BUILDING']
reason = "Submitted %s to Koji" % (artifact_name)
return 1, state, reason, None
def build_from_scm(self, artifact_name, source):
"""
Builds the artifact from the SCM based source.
"""
td = None
owd = os.getcwd()
ret = 1, koji.BUILD_STATES["FAILED"], "Cannot create SRPM", None
try:
log.debug('Cloning source URL: %s' % source)
# Create temp dir and clone the repo there.
td = tempfile.mkdtemp()
scm = module_build_service.scm.SCM(source)
cod = scm.checkout(td)
# Use configured command to create SRPM out of the SCM repo.
log.debug("Creating SRPM")
os.chdir(cod)
self._execute_cmd(self.config.mock_build_srpm_cmd.split(" "))
# Find out the built SRPM and build it normally.
for f in os.listdir(cod):
if f.endswith(".src.rpm"):
log.info("Created SRPM %s" % f)
source = os.path.join(cod, f)
ret = self.build_srpm(artifact_name, source)
break
finally:
os.chdir(owd)
try:
if td is not None:
shutil.rmtree(td)
except Exception as e:
log.warning(
"Failed to remove temporary directory {!r}: {}".format(
td, str(e)))
return ret
def build(self, artifact_name, source):
log.info("Starting building artifact %s: %s" % (artifact_name, source))
# Git sources are treated specially.
if source.startswith("git://"):
return self.build_from_scm(artifact_name, source)
else:
return self.build_srpm(artifact_name, source)
@staticmethod
def get_disttag_srpm(disttag):
# @FIXME
return KojiModuleBuilder.get_disttag_srpm(disttag)

View File

@@ -81,6 +81,8 @@ class Config(object):
self._amq_cert_file = ""
self._amq_private_key_file = ""
self._amq_trusted_cert_file = ""
self._mock_config = "fedora-25-x86_64"
self._mock_build_srpm_cmd = "fedpkg --dist f25 srpm"
@property
def system(self):
@@ -90,8 +92,8 @@ class Config(object):
@system.setter
def system(self, s):
s = str(s)
if s not in ("koji", "copr"):
raise ValueError("Unsupported buildsystem.")
if s not in ("koji", "copr", "mock"):
raise ValueError("Unsupported buildsystem: %s." % s)
self._system = s
@property
@@ -203,6 +205,22 @@ class Config(object):
raise ValueError("polling_interval must be >= 0")
self._polling_interval = i
@property
def mock_config(self):
return self._mock_config
@mock_config.setter
def mock_config(self, s):
self._mock_config = str(s)
@property
def mock_build_srpm_cmd(self):
return self._mock_build_srpm_cmd
@mock_build_srpm_cmd.setter
def mock_build_srpm_cmd(self, s):
self._mock_build_srpm_cmd = str(s)
@property
def koji_config(self):
"""Koji URL."""

View File

@@ -23,7 +23,7 @@
""" Handlers for module change events on the message bus. """
from module_build_service import models, log
from module_build_service import conf, models, log
import module_build_service.builder
import module_build_service.pdc
import module_build_service.utils
@@ -85,31 +85,39 @@ def wait(config, session, msg):
tag = None
dependencies = None
pdc_session = module_build_service.pdc.get_pdc_client_session(config)
pdc_query = {
'name': module_info['name'],
'version': module_info['version'],
'release': module_info['release'],
}
if conf.system == "mock":
# In case of mock, we do not try to get anything from pdc,
# just generate our own koji_tag to identify the module in messages.
tag = '-'.join(['module', module_info['name'],
str(module_info['version']), str(module_info['release'])])
# TODO: Dependencies
else:
# TODO: Move this to separate func
pdc_session = module_build_service.pdc.get_pdc_client_session(config)
pdc_query = {
'name': module_info['name'],
'version': module_info['version'],
'release': module_info['release'],
}
@module_build_service.utils.retry(interval=10, timeout=30, wait_on=ValueError)
def _get_deps_and_tag():
log.info("Getting %s deps from pdc" % module_info['name'])
dependencies = module_build_service.pdc.get_module_build_dependencies(
pdc_session, pdc_query, strict=True)
log.info("Getting %s tag from pdc" % module_info['name'])
tag = module_build_service.pdc.get_module_tag(
pdc_session, pdc_query, strict=True)
return dependencies, tag
@module_build_service.utils.retry(interval=10, timeout=30, wait_on=ValueError)
def _get_deps_and_tag():
log.info("Getting %s deps from pdc" % module_info['name'])
dependencies = module_build_service.pdc.get_module_build_dependencies(
pdc_session, pdc_query, strict=True)
log.info("Getting %s tag from pdc" % module_info['name'])
tag = module_build_service.pdc.get_module_tag(
pdc_session, pdc_query, strict=True)
return dependencies, tag
try:
dependencies, tag = _get_deps_and_tag()
except ValueError:
log.exception("Failed to get module info from PDC. Max retries reached.")
build.transition(config, state="failed")
session.commit()
raise
try:
dependencies, tag = _get_deps_and_tag()
except ValueError:
log.exception("Failed to get module info from PDC. Max retries reached.")
build.transition(config, state="failed")
session.commit()
raise
log.debug("Found tag=%s for module %r" % (tag, build))
# Hang on to this information for later. We need to know which build is

View File

@@ -76,8 +76,9 @@ class MessageIngest(threading.Thread):
class MessageWorker(threading.Thread):
def __init__(self, incoming_work_queue, *args, **kwargs):
def __init__(self, incoming_work_queue, stop_after_build, *args, **kwargs):
self.incoming_work_queue = incoming_work_queue
self.stop_after_build = stop_after_build
super(MessageWorker, self).__init__(*args, **kwargs)
# These are our main lookup tables for figuring out what to run in response
@@ -128,6 +129,7 @@ class MessageWorker(threading.Thread):
if msg is STOP_WORK:
log.info("Worker thread received STOP_WORK, shutting down...")
os._exit(0)
break
try:
@@ -148,6 +150,9 @@ class MessageWorker(threading.Thread):
handler = self.on_repo_change
elif type(msg) == module_build_service.messaging.RidaModule:
handler = self.on_module_change[module_build_state_from_msg(msg)]
if (self.stop_after_build and module_build_state_from_msg(msg)
in [models.BUILD_STATES["failed"], models.BUILD_STATES["ready"]]):
self.incoming_work_queue.put(STOP_WORK)
else:
log.debug("Unhandled message...")
return
@@ -236,6 +241,9 @@ class Poller(threading.Thread):
# @TODO
pass
elif conf.system == "mock":
pass
else:
raise NotImplementedError("Buildsystem %r is not supported." % conf.system)
@@ -277,17 +285,24 @@ class Poller(threading.Thread):
log.warning("process_lingering_module_builds is not yet implemented...")
def main():
log.info("Starting module_build_service_daemon.")
try:
work_queue = queue.Queue()
_work_queue = queue.Queue()
def outgoing_work_queue_put(msg):
_work_queue.put(msg)
def main(initial_msgs = [], return_after_build = False):
log.info("Starting module_build_service_daemon.")
for msg in initial_msgs:
outgoing_work_queue_put(msg)
try:
# This ingest thread puts work on the queue
messaging_thread = MessageIngest(work_queue)
messaging_thread = MessageIngest(_work_queue)
# This poller does other work, but also sometimes puts work in queue.
polling_thread = Poller(work_queue)
polling_thread = Poller(_work_queue)
# This worker takes work off the queue and handles it.
worker_thread = MessageWorker(work_queue)
worker_thread = MessageWorker(_work_queue, return_after_build)
messaging_thread.start()
polling_thread.start()
@@ -295,4 +310,4 @@ def main():
except KeyboardInterrupt:
# FIXME: Make this less brutal
os._exit()
os._exit(0)

View File

@@ -26,9 +26,16 @@ from datetime import datetime
import re
import functools
import time
import shutil
import tempfile
import os
import modulemd
from module_build_service import log, models
from module_build_service.errors import ValidationError
from module_build_service.errors import ValidationError, UnprocessableEntity
from module_build_service import app, conf, db, log
from module_build_service.errors import (
ValidationError, Unauthorized, UnprocessableEntity, Conflict, NotFound)
from multiprocessing.dummy import Pool as ThreadPool
def retry(timeout=120, interval=30, wait_on=Exception):
""" A decorator that allows to retry a section of code...
@@ -163,3 +170,138 @@ def filter_module_builds(flask_request):
page = flask_request.args.get('page', 1, type=int)
per_page = flask_request.args.get('per_page', 10, type=int)
return query.paginate(page, per_page, False)
def submit_module_build(username, url):
# Import it here, because SCM uses utils methods
# and fails to import them because of dep-chain.
import module_build_service.scm
yaml = ""
td = None
try:
log.debug('Verifying modulemd')
td = tempfile.mkdtemp()
scm = module_build_service.scm.SCM(url, conf.scmurls)
cod = scm.checkout(td)
cofn = os.path.join(cod, (scm.name + ".yaml"))
with open(cofn, "r") as mmdfile:
yaml = mmdfile.read()
finally:
try:
if td is not None:
shutil.rmtree(td)
except Exception as e:
log.warning(
"Failed to remove temporary directory {!r}: {}".format(
td, str(e)))
mmd = modulemd.ModuleMetadata()
try:
mmd.loads(yaml)
except:
log.error('Invalid modulemd')
raise UnprocessableEntity('Invalid modulemd')
module = models.ModuleBuild.query.filter_by(name=mmd.name,
version=mmd.version,
release=mmd.release).first()
if module:
log.debug('Checking whether module build already exist.')
# TODO: make this configurable, we might want to allow
# resubmitting any stuck build on DEV no matter the state
if module.state not in (models.BUILD_STATES['failed'],):
log.error('Module (state=%s) already exists. '
'Only new or failed builds are allowed.'
% module.state)
raise Conflict('Module (state=%s) already exists. '
'Only new or failed builds are allowed.'
% module.state)
log.debug('Resuming existing module build %r' % module)
module.username = username
module.transition(conf, models.BUILD_STATES["init"])
log.info("Resumed existing module build in previous state %s"
% module.state)
else:
log.debug('Creating new module build')
module = models.ModuleBuild.create(
db.session,
conf,
name=mmd.name,
version=mmd.version,
release=mmd.release,
modulemd=yaml,
scmurl=url,
username=username
)
# List of (pkg_name, git_url) tuples to be used to check
# the availability of git URLs paralelly later.
full_urls = []
# If the modulemd yaml specifies components, then submit them for build
if mmd.components:
for pkgname, pkg in mmd.components.rpms.packages.items():
try:
if pkg.get("repository") and not conf.rpms_allow_repository:
raise Unauthorized(
"Custom component repositories aren't allowed")
if pkg.get("cache") and not conf.rpms_allow_cache:
raise Unauthorized("Custom component caches aren't allowed")
if not pkg.get("repository"):
pkg["repository"] = conf.rpms_default_repository + pkgname
if not pkg.get("cache"):
pkg["cache"] = conf.rpms_default_cache + pkgname
if not pkg.get("commit"):
try:
pkg["commit"] = module_build_service.scm.SCM(
pkg["repository"]).get_latest()
except Exception as e:
raise UnprocessableEntity(
"Failed to get the latest commit: %s" % pkgname)
except Exception:
module.transition(conf, models.BUILD_STATES["failed"])
db.session.add(module)
db.session.commit()
raise
full_url = pkg["repository"] + "?#" + pkg["commit"]
full_urls.append((pkgname, full_url))
log.debug("Checking scm urls")
# Checks the availability of SCM urls.
pool = ThreadPool(10)
err_msgs = pool.map(lambda data: "Cannot checkout {}".format(data[0])
if not module_build_service.scm.SCM(data[1]).is_available()
else None, full_urls)
for err_msg in err_msgs:
if err_msg:
raise UnprocessableEntity(err_msg)
for pkgname, pkg in mmd.components.rpms.packages.items():
full_url = pkg["repository"] + "?#" + pkg["commit"]
existing_build = models.ComponentBuild.query.filter_by(
module_id=module.id, package=pkgname).first()
if (existing_build
and existing_build.state != models.BUILD_STATES['done']):
existing_build.state = models.BUILD_STATES['init']
db.session.add(existing_build)
else:
# XXX: what about components that were present in previous
# builds but are gone now (component reduction)?
build = models.ComponentBuild(
module_id=module.id,
package=pkgname,
format="rpms",
scmurl=full_url,
)
db.session.add(build)
module.modulemd = mmd.dumps()
module.transition(conf, models.BUILD_STATES["wait"])
db.session.add(module)
db.session.commit()
log.info("%s submitted build of %s-%s-%s", username, mmd.name,
mmd.version, mmd.release)
return module

View File

@@ -33,16 +33,12 @@ import json
import modulemd
import os
import module_build_service.auth
import module_build_service.scm
import shutil
import tempfile
import re
from module_build_service import app, conf, db, log
from module_build_service import models
from module_build_service.utils import pagination_metadata, filter_module_builds
from module_build_service.utils import pagination_metadata, filter_module_builds, submit_module_build
from module_build_service.errors import (
ValidationError, Unauthorized, UnprocessableEntity, Conflict, NotFound)
from multiprocessing.dummy import Pool as ThreadPool
api_definition = {
'module_build_submit': {
@@ -127,134 +123,7 @@ class ModuleBuildAPI(MethodView):
log.error("The submitted scmurl %r is not valid" % url)
raise Unauthorized("The submitted scmurl %s is not valid" % url)
yaml = ""
td = None
try:
log.debug('Verifying modulemd')
td = tempfile.mkdtemp()
scm = module_build_service.scm.SCM(url, conf.scmurls)
cod = scm.checkout(td)
cofn = os.path.join(cod, (scm.name + ".yaml"))
with open(cofn, "r") as mmdfile:
yaml = mmdfile.read()
finally:
try:
if td is not None:
shutil.rmtree(td)
except Exception as e:
log.warning(
"Failed to remove temporary directory {!r}: {}".format(
td, str(e)))
mmd = modulemd.ModuleMetadata()
try:
mmd.loads(yaml)
except:
log.error('Invalid modulemd')
raise UnprocessableEntity('Invalid modulemd')
module = models.ModuleBuild.query.filter_by(name=mmd.name,
version=mmd.version,
release=mmd.release).first()
if module:
log.debug('Checking whether module build already exist.')
# TODO: make this configurable, we might want to allow
# resubmitting any stuck build on DEV no matter the state
if module.state not in (models.BUILD_STATES['failed'],):
log.error('Module (state=%s) already exists. '
'Only new or failed builds are allowed.'
% module.state)
raise Conflict('Module (state=%s) already exists. '
'Only new or failed builds are allowed.'
% module.state)
log.debug('Resuming existing module build %r' % module)
module.username = username
module.transition(conf, models.BUILD_STATES["init"])
log.info("Resumed existing module build in previous state %s"
% module.state)
else:
log.debug('Creating new module build')
module = models.ModuleBuild.create(
db.session,
conf,
name=mmd.name,
version=mmd.version,
release=mmd.release,
modulemd=yaml,
scmurl=url,
username=username
)
# List of (pkg_name, git_url) tuples to be used to check
# the availability of git URLs paralelly later.
full_urls = []
# If the modulemd yaml specifies components, then submit them for build
if mmd.components:
for pkgname, pkg in mmd.components.rpms.packages.items():
try:
if pkg.get("repository") and not conf.rpms_allow_repository:
raise Unauthorized(
"Custom component repositories aren't allowed")
if pkg.get("cache") and not conf.rpms_allow_cache:
raise Unauthorized("Custom component caches aren't allowed")
if not pkg.get("repository"):
pkg["repository"] = conf.rpms_default_repository + pkgname
if not pkg.get("cache"):
pkg["cache"] = conf.rpms_default_cache + pkgname
if not pkg.get("commit"):
try:
pkg["commit"] = module_build_service.scm.SCM(
pkg["repository"]).get_latest()
except Exception as e:
raise UnprocessableEntity(
"Failed to get the latest commit: %s" % pkgname)
except Exception:
module.transition(conf, models.BUILD_STATES["failed"])
db.session.add(module)
db.session.commit()
raise
full_url = pkg["repository"] + "?#" + pkg["commit"]
full_urls.append((pkgname, full_url))
log.debug("Checking scm urls")
# Checks the availability of SCM urls.
pool = ThreadPool(10)
err_msgs = pool.map(lambda data: "Cannot checkout {}".format(data[0])
if not module_build_service.scm.SCM(data[1]).is_available()
else None, full_urls)
for err_msg in err_msgs:
if err_msg:
raise UnprocessableEntity(err_msg)
for pkgname, pkg in mmd.components.rpms.packages.items():
full_url = pkg["repository"] + "?#" + pkg["commit"]
existing_build = models.ComponentBuild.query.filter_by(
module_id=module.id, package=pkgname).first()
if (existing_build
and existing_build.state != models.BUILD_STATES['done']):
existing_build.state = models.BUILD_STATES['init']
db.session.add(existing_build)
else:
# XXX: what about components that were present in previous
# builds but are gone now (component reduction)?
build = models.ComponentBuild(
module_id=module.id,
package=pkgname,
format="rpms",
scmurl=full_url,
)
db.session.add(build)
module.modulemd = mmd.dumps()
module.transition(conf, models.BUILD_STATES["wait"])
db.session.add(module)
db.session.commit()
log.info("%s submitted build of %s-%s-%s", username, mmd.name,
mmd.version, mmd.release)
module = submit_module_build(username, url)
return jsonify(module.json()), 201