From 439721da997327175510261eee1e25cf56d49fcf Mon Sep 17 00:00:00 2001 From: Ralph Bean Date: Thu, 17 Nov 2016 16:04:08 -0500 Subject: [PATCH] Gather build groups from module profiles. Fixes #181. When initializing the buildroot for a module build, we used to set up some build 'groups' for the tag: `build` and `srpm-build`. These are the lists of RPMs that koji is supposed to install into the buildroot before anything else is done. Crucial stuff goes here, like `git` in the `srpm-build` group so that koji can clone the repo in the first place. We had those lists hardcoded before. This list changes that to use the `buildroot` and `srpm-buildroot` profiles of the modules which are our dependencies (recursively). This will allow people like @psabata and the base-runtime to make changes to the build groups for the generational core and work around their own problems, instead of having to ask us to expand that list. There were a couple ways to do this: - I could've cloned the SCM repos for all dependencies and gotten their profiles from the modulemd source there. This seemed flimsy because we only want to depend on the profiles of modules that were *really* built. - We could modify PDC to stuff the modulemd contents in there. We already get some dep and tag info from PDC. My thought here was that it would be too heavyweight to store every copy of the modulemd file in PDC for every build ever. We already have it in MBS. - Lastly, and this is what I did here, I just referred to MBS' own database to get the profiles. This seems to work just fine. One side-effect is that we need the build profiles from the manually bootstrapped modules that were put together by hand, and were never built in the MBS. In order to work around that, I added an alembic upgrade script which pre-populates the database with one fake bootstrapped base-runtime module. We can expand on that in the future if we need. --- .../0ef60c3ed440_insert_fake_base_runtime.py | 89 +++++++++++++++++++ module_build_service/builder.py | 65 +++----------- module_build_service/models.py | 44 ++++++++- .../scheduler/handlers/modules.py | 14 ++- 4 files changed, 156 insertions(+), 56 deletions(-) create mode 100644 migrations/versions/0ef60c3ed440_insert_fake_base_runtime.py diff --git a/migrations/versions/0ef60c3ed440_insert_fake_base_runtime.py b/migrations/versions/0ef60c3ed440_insert_fake_base_runtime.py new file mode 100644 index 00000000..8a60467d --- /dev/null +++ b/migrations/versions/0ef60c3ed440_insert_fake_base_runtime.py @@ -0,0 +1,89 @@ +"""Insert fake base-runtime. + +Revision ID: 0ef60c3ed440 +Revises: 145347916a56 +Create Date: 2016-11-17 15:39:22.984051 + +""" + + +# revision identifiers, used by Alembic. +revision = '0ef60c3ed440' +down_revision = '145347916a56' + +from alembic import op +import sqlalchemy as sa + +import os +import modulemd + +yaml = """ +document: modulemd +version: 1 +data: + name: base-runtime + stream: master + version: 3 + summary: A fake base-runtime module, used to bootstrap the infrastructure. + description: ... + profiles: + buildroot: + rpms: + - bash + - bzip2 + - coreutils + - cpio + - diffutils + - fedora-release + - findutils + - gawk + - gcc + - gcc-c++ + - grep + - gzip + - info + - make + - patch + - redhat-rpm-config + - rpm-build + - sed + - shadow-utils + - tar + - unzip + - util-linux + - which + - xz + srpm-buildroot: + rpms: + - bash + - fedora-release + - fedpkg-minimal + - gnupg2 + - redhat-rpm-config + - rpm-build + - shadow-utils +""" + +def upgrade(): + from module_build_service import models, conf + engine = op.get_bind().engine + session = sa.orm.scoped_session(sa.orm.sessionmaker(bind=engine)) + + mmd = modulemd.ModuleMetadata() + mmd.loads(yaml) + module = models.ModuleBuild.create( + session, + conf, + name=mmd.name, + stream=mmd.stream, + version=mmd.version, + modulemd=yaml, + scmurl='...', + username='modularity', + ) + module.state = models.BUILD_STATES['done'] + module.state_reason = 'Artificially created.' + session.commit() + +def downgrade(): + pass diff --git a/module_build_service/builder.py b/module_build_service/builder.py index d66eaf48..c66ca4e6 100644 --- a/module_build_service/builder.py +++ b/module_build_service/builder.py @@ -57,46 +57,6 @@ import module_build_service.utils logging.basicConfig(level=logging.DEBUG) -# TODO: read defaults from module_build_service's config -KOJI_DEFAULT_GROUPS = { - 'build': [ - 'bash', - 'bzip2', - 'coreutils', - 'cpio', - 'diffutils', - 'fedora-release', - 'findutils', - 'gawk', - 'gcc', - 'gcc-c++', - 'grep', - 'gzip', - 'info', - 'make', - 'patch', - 'redhat-rpm-config', - 'rpm-build', - 'sed', - 'shadow-utils', - 'tar', - 'unzip', - 'util-linux', - 'which', - 'xz', - ], - 'srpm-build': [ - 'bash', - 'fedora-release', - 'fedpkg-minimal', - 'gnupg2', - 'redhat-rpm-config', - 'rpm-build', - 'shadow-utils', - ] -} - - """ Example workflows - helps to see the difference in implementations Copr workflow: @@ -194,7 +154,7 @@ class GenericBuilder(six.with_metaclass(ABCMeta)): raise ValueError("Builder backend='%s' not recognized" % backend) @abstractmethod - def buildroot_connect(self): + def buildroot_connect(self, groups): """ This is an idempotent call to create or resume and validate the build environment. .build() should immediately fail if .buildroot_connect() @@ -456,7 +416,7 @@ chmod 644 %buildroot/%_rpmconfigdir/macros.d/macros.modules return koji_session - def buildroot_connect(self): + def buildroot_connect(self, groups): log.info("%r connecting buildroot." % self) # Create or update individual tags @@ -466,16 +426,13 @@ chmod 644 %buildroot/%_rpmconfigdir/macros.d/macros.modules self.module_build_tag = self._koji_create_tag( self.tag_name + "-build", self.arches, perm="admin") - # TODO: handle in buildroot_add_artifact(install=true) and track groups as module buildrequires - groups = KOJI_DEFAULT_GROUPS - if groups: - @module_build_service.utils.retry(wait_on=SysCallError, interval=5) - def add_groups(): - return self._koji_add_groups_to_tag( - dest_tag=self.module_build_tag, - groups=groups, - ) - add_groups() + @module_build_service.utils.retry(wait_on=SysCallError, interval=5) + def add_groups(): + return self._koji_add_groups_to_tag( + dest_tag=self.module_build_tag, + groups=groups, + ) + add_groups() # Add main build target. self.module_target = self._koji_add_target(self.tag_name, @@ -818,7 +775,7 @@ class CoprModuleBuilder(GenericBuilder): from copr.client import CoprClient return CoprClient.create_from_file_config(config.copr_config) - def buildroot_connect(self): + def buildroot_connect(self, groups): """ This is an idempotent call to create or resume and validate the build environment. .build() should immediately fail if .buildroot_connect() @@ -978,7 +935,7 @@ class MockModuleBuilder(GenericBuilder): log.info("MockModuleBuilder initialized, tag_name=%s, tag_dir=%s" % (tag_name, self.tag_dir)) - def buildroot_connect(self): + def buildroot_connect(self, groups): pass def buildroot_prep(self): diff --git a/module_build_service/models.py b/module_build_service/models.py index 14cf4f63..1c17e065 100644 --- a/module_build_service/models.py +++ b/module_build_service/models.py @@ -29,7 +29,7 @@ import contextlib from datetime import datetime -from sqlalchemy import engine_from_config +from sqlalchemy import engine_from_config, or_ from sqlalchemy.orm import validates, scoped_session, sessionmaker import modulemd as _modulemd @@ -289,6 +289,48 @@ class ModuleBuild(RidaBase): return tasks + def resolve_profiles(self, session, key, seen=None): + """ Gather dependency profiles named `key` of modules we depend on. + + This is used to find the union of all 'buildroot' profiles of a + module's dependencies. + + https://pagure.io/fm-orchestrator/issue/181 + """ + + seen = seen or [] # Initialize to an empty list. + result = set() + for name, stream in self.mmd().buildrequires.items(): + # First, guard against infinite recursion + if name in seen: + continue + + # Find the latest of the dep in our db of built modules. + dep = session.query(ModuleBuild)\ + .filter(ModuleBuild.name==name)\ + .filter(ModuleBuild.stream==stream)\ + .filter(or_( + ModuleBuild.state==BUILD_STATES["done"], + ModuleBuild.state==BUILD_STATES["ready"], + )).order_by('version').first() + + # XXX - We may want to make this fatal one day, but warn for now. + if not dep: + log.warn("Could not find built dep " + "%s/%s for %r" % (name, stream, self)) + continue + + # Take note of what rpms are in this dep's profile. + profiles = dep.mmd().profiles + if key in profiles: + result |= profiles[key].rpms + + # And recurse to all modules that are deps of our dep. + result |= dep.resolve_profiles(session, key, seen + [name]) + + # Return the union of all rpms in all profiles of the given key. + return result + def __repr__(self): return "" % ( self.name, self.stream, self.version, diff --git a/module_build_service/scheduler/handlers/modules.py b/module_build_service/scheduler/handlers/modules.py index 413fbe36..58414a36 100644 --- a/module_build_service/scheduler/handlers/modules.py +++ b/module_build_service/scheduler/handlers/modules.py @@ -120,6 +120,18 @@ def wait(config, session, msg): session.commit() raise + try: + groups = { + 'build': build.resolve_profiles(session, 'buildroot'), + 'srpm-build': build.resolve_profiles(session, 'srpm-buildroot'), + } + except ValueError: + reason = "Failed to gather buildroot groups from SCM." + log.exception(reason) + build.transition(config, state="failed", state_reason=reason) + session.commit() + raise + log.debug("Found tag=%s for module %r" % (tag, build)) # Hang on to this information for later. We need to know which build is # associated with which koji tag, so that when their repos are regenerated @@ -130,7 +142,7 @@ def wait(config, session, msg): builder = module_build_service.builder.GenericBuilder.create( build.owner, build.name, config.system, config, tag_name=tag) - builder.buildroot_connect() + builder.buildroot_connect(groups) log.debug("Adding dependencies %s into buildroot for module %s" % (dependencies, module_info)) builder.buildroot_add_repos(dependencies) # inject dist-tag into buildroot