Fix #223, #224 - Use generated Mock config, define local RPM repository there and make it available in Mock chroot after batch is built.

This commit is contained in:
Jan Kaluza
2016-11-25 12:15:32 +01:00
parent 4f6d683645
commit dd3eb61b53
5 changed files with 248 additions and 15 deletions

View File

@@ -113,6 +113,7 @@ class DevConfiguration(BaseConfiguration):
# FAS_PASSWORD = commands.getoutput('pass your_fas_password').strip()
KOJI_ARCHES = ['x86_64']
KOJI_REPOSITORY_URL = 'http://kojipkgs.stg.fedoraproject.org/repos'
class TestConfiguration(BaseConfiguration):

View File

@@ -106,16 +106,105 @@ def cleardb():
models.ModuleBuild.query.delete()
models.ComponentBuild.query.delete()
def _insert_fake_baseruntime():
import sqlalchemy as sa
import modulemd
yaml = """
document: modulemd
version: 1
data:
name: base-runtime
stream: master
version: 3
summary: A fake base-runtime module, used to bootstrap the infrastructure.
description: ...
profiles:
buildroot:
rpms:
- bash
- bzip2
- coreutils
- cpio
- diffutils
- fedora-release
- findutils
- gawk
- gcc
- gcc-c++
- grep
- gzip
- info
- make
- patch
- redhat-rpm-config
- rpm-build
- sed
- shadow-utils
- tar
- unzip
- util-linux
- which
- xz
srpm-buildroot:
rpms:
- bash
- fedora-release
- fedpkg-minimal
- gnupg2
- redhat-rpm-config
- rpm-build
- shadow-utils
"""
mmd = modulemd.ModuleMetadata()
mmd.loads(yaml)
module = models.ModuleBuild.create(
db.session,
conf,
name=mmd.name,
stream=mmd.stream,
version=mmd.version,
modulemd=yaml,
scmurl='...',
username='modularity',
)
module.state = models.BUILD_STATES['done']
module.state_reason = 'Artificially created.'
db.session.commit()
@manager.command
def build_module_locally(url):
conf.set_item("system", "mock")
username = getpass.getuser()
cleardb()
# Use our own local SQLite3 database.
confdir = os.path.abspath(os.path.dirname(__file__))
dbdir = os.path.abspath(os.path.join(confdir, '..')) if confdir.endswith('conf') \
else confdir
dbpath = '/{0}'.format(os.path.join(dbdir, '.mbs_local_build.db'))
dburi = 'sqlite://' + dbpath
app.config["SQLALCHEMY_DATABASE_URI"] = dburi
conf.set_item("sqlalchemy_database_uri", dburi)
if os.path.exists(dbpath):
os.remove(dbpath)
# Create the database and insert fake base-runtime module there. This is
# normally done by the flask_migrate.upgrade(), but I (jkaluza) do not
# call it here, because after that call, all the logged messages are not
# printed to stdout/stderr and are ignored... I did not find a way how to
# fix that.
#
# In the future, we should use PDC to get what we need from the fake module,
# so it's probably not big problem.
db.create_all()
_insert_fake_baseruntime()
username = getpass.getuser()
submit_module_build(username, url)
msgs = []
msgs.append(RidaModule("fake msg", 1, 1))
msgs.append(RidaModule("local module build", 2, 1))
module_build_service.scheduler.main.main(msgs, True)

View File

@@ -923,20 +923,130 @@ class MockModuleBuilder(GenericBuilder):
# Global build_id/task_id we increment when new build is executed.
_build_id = 1
MOCK_CONFIG_TEMPLATE = """
config_opts['root'] = '$root'
config_opts['target_arch'] = '$arch'
config_opts['legal_host_arches'] = ('$arch',)
config_opts['chroot_setup_cmd'] = 'install $group'
config_opts['dist'] = ''
config_opts['extra_chroot_dirs'] = [ '/run/lock', ]
config_opts['releasever'] = ''
config_opts['package_manager'] = 'dnf'
config_opts['yum.conf'] = \"\"\"
[main]
keepcache=1
debuglevel=2
reposdir=/dev/null
logfile=/var/log/yum.log
retries=20
obsoletes=1
gpgcheck=0
assumeyes=1
syslog_ident=mock
syslog_device=
install_weak_deps=0
metadata_expire=0
mdpolicy=group:primary
# repos
$repos
\"\"\"
"""
def __init__(self, owner, module, config, tag_name):
self.module_str = module
self.tag_name = tag_name
self.config = config
self.groups = []
self.arch = "x86_64" # TODO: We may need to change that in the future
self.repos = ""
# Create main directory for this tag
self.tag_dir = os.path.join(self.config.mock_resultsdir, tag_name)
if not os.path.exists(self.tag_dir):
os.makedirs(self.tag_dir)
# Create "results" sub-directory for this tag to store build results
# and local repository.
self.resultsdir = os.path.join(self.tag_dir, "results")
if not os.path.exists(self.resultsdir):
os.makedirs(self.resultsdir)
# Remove old files from the previous build of this tag but only
# before the first build is done, otherwise we would remove files
# which we already build in this module build.
if MockModuleBuilder._build_id == 1:
# Remove all RPMs from the results directory, but keep old logs.
for name in os.listdir(self.resultsdir):
if name.endswith(".rpm"):
os.remove(os.path.join(self.resultsdir, name))
# Remove the old RPM repository from the results directory.
if os.path.exists(os.path.join(self.resultsdir, "/repodata/repomd.xml")):
os.remove(os.path.join(self.resultsdir, "/repodata/repomd.xml"))
# Create "config" sub-directory.
self.configdir = os.path.join(self.tag_dir, "config")
if not os.path.exists(self.configdir):
os.makedirs(self.configdir)
# Generate path to mock config and add local repository there.
self.mock_config = os.path.join(self.configdir, "mock.cfg")
self._add_repo("localrepo", "file://" + self.resultsdir)
log.info("MockModuleBuilder initialized, tag_name=%s, tag_dir=%s" %
(tag_name, self.tag_dir))
def _createrepo(self):
"""
Creates the repository using "createrepo_c" command in the resultsdir.
"""
path = self.resultsdir
if os.path.exists(path + '/repodata/repomd.xml'):
comm = ['/usr/bin/createrepo_c', '--update', path]
else:
comm = ['/usr/bin/createrepo_c', path]
cmd = subprocess.Popen(
comm, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = cmd.communicate()
return out, err
def _add_repo(self, name, baseurl):
"""
Adds repository to Mock config file. Call _write_mock_config() to
actually write the config file to filesystem.
"""
self.repos += "[%s]\n" % name
self.repos += "name=%s\n" % name
self.repos += "baseurl=%s\n" % baseurl
self.repos += "enabled=1\n"
def _write_mock_config(self):
"""
Writes Mock config file to self.configdir/mock.cfg.
"""
# We want to write confing only before the first build, otherwise
# we would overwrite it in the middle of module build which would
# break the build.
if MockModuleBuilder._build_id != 1:
return
config = str(MockModuleBuilder.MOCK_CONFIG_TEMPLATE)
config = config.replace("$root", self.tag_name)
config = config.replace("$arch", self.arch)
config = config.replace("$group", " ".join(self.groups))
config = config.replace("$repos", self.repos)
with open(os.path.join(self.configdir, "mock.cfg"), 'w') as f:
f.write(config)
def buildroot_connect(self, groups):
pass
self.groups = groups["build"]
log.debug("Mock builder groups: %s" % self.groups)
self._write_mock_config()
def buildroot_prep(self):
pass
@@ -945,6 +1055,8 @@ class MockModuleBuilder(GenericBuilder):
pass
def buildroot_ready(self, artifacts=None):
log.debug("Creating repository in %s" % self.resultsdir)
self._createrepo()
return True
def buildroot_add_dependency(self, dependencies):
@@ -954,7 +1066,12 @@ class MockModuleBuilder(GenericBuilder):
pass
def buildroot_add_repos(self, dependencies):
pass
# TODO: We support only dependencies from Koji here. This should be
# extended to Copr in the future.
for tag in dependencies:
baseurl = KojiModuleBuilder.repo_from_tag(self.config, tag, self.arch)
self._add_repo(tag, baseurl)
self._write_mock_config()
def _send_repo_done(self):
msg = module_build_service.messaging.KojiRepoChange(
@@ -998,14 +1115,14 @@ class MockModuleBuilder(GenericBuilder):
"""
try:
# Initialize mock.
self._execute_cmd(["mock", "-r", self.config.mock_config, "--init"])
self._execute_cmd(["mock", "-r", self.mock_config, "--init"])
# Start the build and store results to tag_dir
# Start the build and store results to resultsdir
# TODO: Maybe this should not block in the future, but for local
# builds it is not a big problem.
self._execute_cmd(["mock", "-r", self.config.mock_config,
self._execute_cmd(["mock", "-r", self.mock_config,
"--no-clean", "--rebuild", source,
"--resultdir=%s" % self.tag_dir])
"--resultdir=%s" % self.resultsdir])
# Emit messages simulating complete build. These messages
# are put in the scheduler.main._work_queue and are handled
@@ -1016,7 +1133,7 @@ class MockModuleBuilder(GenericBuilder):
MockModuleBuilder._build_id)
self._send_repo_done()
with open(os.path.join(self.tag_dir, "status.log"), 'w') as f:
with open(os.path.join(self.resultsdir, "status.log"), 'w') as f:
f.write("complete\n")
except Exception as e:
log.error("Error while building artifact %s: %s" % (artifact_name,
@@ -1030,7 +1147,7 @@ class MockModuleBuilder(GenericBuilder):
self._send_build_change(koji.BUILD_STATES['FAILED'], source,
MockModuleBuilder._build_id)
self._send_repo_done()
with open(os.path.join(self.tag_dir, "status.log"), 'w') as f:
with open(os.path.join(self.resultsdir, "status.log"), 'w') as f:
f.write("failed\n")
self._save_log("state.log", artifact_name)

View File

@@ -56,7 +56,7 @@ def get_variant_dict(data):
if not isinstance(data, dict):
return False
for attr in ('name', 'version', 'release'):
for attr in ('name', 'version'):
if attr not in data.keys():
return False
return True
@@ -98,7 +98,10 @@ def get_variant_dict(data):
elif is_module_dict(data):
result = {'variant_id': data['name'], 'variant_version': data['version'], 'variant_release': data['release']}
result = {'variant_id': data['name'], 'variant_version': data['version']}
if 'release' in data:
result['variant_release'] = data['release']
if not result:
raise ValueError("Couldn't get variant_dict from %s" % data)

View File

@@ -84,14 +84,37 @@ def wait(config, session, msg):
pass
tag = None
dependencies = None
dependencies = []
if conf.system == "mock":
# In case of mock, we do not try to get anything from pdc,
# just generate our own koji_tag to identify the module in messages.
tag = '-'.join(['module', module_info['name'],
str(module_info['stream']), str(module_info['version'])])
# TODO: Dependencies
for name, stream in build.mmd().buildrequires.items():
pdc_session = module_build_service.pdc.get_pdc_client_session(config)
pdc_query = {
'name': name,
'version': stream
}
@module_build_service.utils.retry(interval=10, timeout=30, wait_on=ValueError)
def _get_module():
log.info("Getting %s from pdc (query %r)" % (module_info['name'], pdc_query))
return module_build_service.pdc.get_module_tag(
pdc_session, pdc_query, strict=True)
try:
dependencies.append(_get_module())
except ValueError:
reason = "Failed to get module info from PDC. Max retries reached."
log.exception(reason)
build.transition(config, state="failed", state_reason=reason)
session.commit()
raise
else:
# TODO: Move this to separate func
pdc_session = module_build_service.pdc.get_pdc_client_session(config)