Merge #1277 Take the list of arches for -build Koji tag from buildrequired modules.

This commit is contained in:
Jan Kaluža
2019-06-07 13:36:48 +00:00
16 changed files with 328 additions and 50 deletions

View File

@@ -52,7 +52,6 @@ from module_build_service.errors import ProgrammingError
from module_build_service.builder.base import GenericBuilder
from module_build_service.builder.KojiContentGenerator import KojiContentGenerator
from module_build_service.utils import get_reusable_components, get_reusable_module
from module_build_service.utils import get_build_arches
logging.basicConfig(level=logging.DEBUG)
@@ -180,10 +179,10 @@ class KojiModuleBuilder(GenericBuilder):
log.debug("Using koji_config: %s" % config.koji_config)
self.koji_session = self.get_session(config)
self.arches = get_build_arches(self.mmd, self.config)
self.arches = [arch.name for arch in self.module.arches]
if not self.arches:
raise ValueError("No arches specified in the config.")
raise ValueError("No arches specified in module build.")
# These eventually get populated by calling _connect and __prep is set to True
self.module_tag = None # string
@@ -1307,3 +1306,16 @@ class KojiModuleBuilder(GenericBuilder):
tags.append(t["name"])
return tags
@classmethod
def get_module_build_arches(cls, module):
"""
:param ModuleBuild module: Get the list of architectures associated with
the module build in the build system.
:return: list of architectures
"""
koji_session = KojiModuleBuilder.get_session(conf, login=False)
tag = koji_session.getTag(module.koji_tag)
if not tag:
raise ValueError("Unknown Koji tag %r." % module.koji_tag)
return tag["arches"].split(" ")

View File

@@ -52,6 +52,20 @@ from module_build_service import models
logging.basicConfig(level=logging.DEBUG)
def detect_arch():
"""
Helper method to detect the local host architecture. Fallbacks to `conf.arch_fallback`.
"""
if conf.arch_autodetect:
arch_detected = platform.machine()
if arch_detected:
return arch_detected
log.warning("Couldn't determine machine arch. Falling back to configured arch.")
return conf.arch_fallback
class MockModuleBuilder(GenericBuilder):
backend = "mock"
# Global build_id/task_id we increment when new build is executed.
@@ -94,15 +108,7 @@ class MockModuleBuilder(GenericBuilder):
self.koji_session = None
# Auto-detect arch (if possible) or fallback to the configured one
if conf.arch_autodetect:
arch_detected = platform.machine()
if arch_detected:
self.arch = arch_detected
else:
log.warning("Couldn't determine machine arch. Falling back to configured arch.")
self.arch = conf.arch_fallback
else:
self.arch = conf.arch_fallback
self.arch = detect_arch()
log.info("Machine arch setting: {}".format(self.arch))
# Create main directory for this tag
@@ -149,6 +155,17 @@ class MockModuleBuilder(GenericBuilder):
# Workaround koji specific code in modules.py
return {"name": self.tag_name}
@classmethod
def get_module_build_arches(cls, module):
"""
:param ModuleBuild module: Get the list of architectures associated with
the module build in the build system.
:return: list of architectures
"""
# Return local architecture, because all the modules built locally are built
# just against this architecture.
return [detect_arch()]
def _createrepo(self, include_module_yaml=False):
"""
Creates the repository using "createrepo_c" command in the resultsdir.

View File

@@ -346,6 +346,15 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
"""
raise NotImplementedError()
@classmethod
def get_module_build_arches(cls, module):
"""
:param ModuleBuild module: Get the list of architectures associated with
the module build in the build system.
:return: list of architectures
"""
return GenericBuilder.backends[conf.system].get_module_build_arches(module)
@classmethod
def recover_orphaned_artifact(cls, component_build):
"""

View File

@@ -589,13 +589,15 @@ class Config(object):
"default": "",
"desc": "The Greenwave decision context that determines a module's gating status.",
},
"allowed_disttag_marking_module_names": {
"allowed_privileged_module_names": {
"type": list,
"default": [],
"desc": (
"List of modules that are allowed to influence the RPM disttag when "
"buildrequired. These modules can set xmd.mbs.disttag_marking to do so. MBS "
"will use this list order to determine which modules take precedence."
"List of modules that are allowed to influence the RPM buildroot when "
"buildrequired. These modules can set xmd.mbs.disttag_marking to do change "
"the RPM disttag, or set the xmd.mbs.koji_tag_arches to set the arches "
"for which the modules are built. MBS will use this list order to determine "
"which modules take precedence."
),
},
"stream_suffixes": {

View File

@@ -48,7 +48,9 @@ import module_build_service.scheduler.consumer
manager = Manager(create_app)
help_args = ("-?", "--help")
manager.help_args = help_args
migrate = flask_migrate.Migrate(app, db)
migrations_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrations')
migrate = flask_migrate.Migrate(app, db, directory=migrations_dir)
manager.add_command("db", flask_migrate.MigrateCommand)
manager.add_option("-d", "--debug", dest="debug", action="store_true")
manager.add_option("-v", "--verbose", dest="verbose", action="store_true")

View File

@@ -0,0 +1,36 @@
"""Add module_arches and module_builds_to_arches tables.
Revision ID: bf861b6af29a
Revises: 65ad4fcdbce6
Create Date: 2019-06-03 13:33:40.540567
"""
# revision identifiers, used by Alembic.
revision = 'bf861b6af29a'
down_revision = '65ad4fcdbce6'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('module_arches',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('module_builds_to_arches',
sa.Column('module_build_id', sa.Integer(), nullable=False),
sa.Column('module_arch_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['module_arch_id'], ['module_arches.id'], ),
sa.ForeignKeyConstraint(['module_build_id'], ['module_builds.id'], ),
sa.UniqueConstraint('module_build_id', 'module_arch_id', name='unique_module_to_arch')
)
def downgrade():
op.drop_table('module_builds_to_arches')
op.drop_table('module_arches')

View File

@@ -182,6 +182,17 @@ module_builds_to_virtual_streams = db.Table(
)
module_builds_to_arches = db.Table(
"module_builds_to_arches",
db.Column("module_build_id", db.Integer, db.ForeignKey("module_builds.id"), nullable=False),
db.Column(
"module_arch_id", db.Integer, db.ForeignKey("module_arches.id"),
nullable=False),
db.UniqueConstraint(
"module_build_id", "module_arch_id", name="unique_module_to_arch"),
)
class ModuleBuild(MBSBase):
__tablename__ = "module_builds"
id = db.Column(db.Integer, primary_key=True)
@@ -211,6 +222,13 @@ class ModuleBuild(MBSBase):
virtual_streams = db.relationship(
"VirtualStream", secondary=module_builds_to_virtual_streams, back_populates="module_builds")
# List of arches against which the module is built.
# NOTE: It is not filled for imported modules, because imported module builds have not been
# built by MBS.
arches = db.relationship(
"ModuleArch", secondary=module_builds_to_arches, back_populates="module_builds",
order_by="ModuleArch.name")
# A monotonically increasing integer that represents which batch or
# iteration this module is currently on for successive rebuilds of its
# components. Think like 'mockchain --recurse'
@@ -863,6 +881,7 @@ class ModuleBuild(MBSBase):
"state_url": state_url,
"stream_version": self.stream_version,
"virtual_streams": [virtual_stream.name for virtual_stream in self.virtual_streams],
"arches": [arch.name for arch in self.arches],
})
return rv
@@ -1005,6 +1024,18 @@ class VirtualStream(MBSBase):
return "<VirtualStream id={} name={}>".format(self.id, self.name)
class ModuleArch(MBSBase):
__tablename__ = "module_arches"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, nullable=False, unique=True)
module_builds = db.relationship(
"ModuleBuild", secondary=module_builds_to_arches, back_populates="arches"
)
def __repr__(self):
return "<ModuleArch id={} name={}>".format(self.id, self.name)
class ModuleBuildTrace(MBSBase):
__tablename__ = "module_builds_trace"
id = db.Column(db.Integer, primary_key=True)

View File

@@ -34,6 +34,7 @@ from module_build_service.utils import (
get_rpm_release,
generate_koji_tag,
record_filtered_rpms,
record_module_build_arches,
)
from module_build_service.errors import UnprocessableEntity, Forbidden, ValidationError
from module_build_service.utils.ursine import handle_stream_collision_modules
@@ -156,6 +157,7 @@ def init(config, session, msg):
failure_reason = "unspec"
try:
mmd = build.mmd()
record_module_build_arches(mmd, build, session)
record_component_builds(mmd, build, session=session)
# The ursine.handle_stream_collision_modules is Koji specific.
if conf.system in ["koji", "test"]:

View File

@@ -325,7 +325,7 @@ def get_rpm_release(module_build):
# Looping through all the non-base modules that are allowed to set the disttag_marking
# and the base modules to see what the disttag marking should be. Doing it this way
# preserves the order in the configurations.
for module in conf.allowed_disttag_marking_module_names + conf.base_module_names:
for module in conf.allowed_privileged_module_names + conf.base_module_names:
module_in_xmd = buildrequires.get(module)
if not module_in_xmd:
@@ -350,7 +350,7 @@ def get_rpm_release(module_build):
if module not in conf.base_module_names:
continue
# If we've made it past all the modules in
# conf.allowed_disttag_marking_module_names, and the base module doesn't have
# conf.allowed_privileged_module_names, and the base module doesn't have
# the disttag_marking set, then default to the stream of the first base module
marking = module_obj.stream
br_module_marking = marking + "+"
@@ -674,16 +674,70 @@ def get_build_arches(mmd, config):
:param config: config (module_build_service.config.Config instance)
:return list of architectures
"""
arches = config.arches
# Imported here to allow import of utils in GenericBuilder.
import module_build_service.builder
nsvc = mmd.get_nsvc()
# Handle BASE_MODULE_ARCHES. Find out the base modules in buildrequires
# section of XMD and set the Koji tag arches according to it.
# At first, handle BASE_MODULE_ARCHES - this overrides any other option.
# Find out the base modules in buildrequires section of XMD and
# set the Koji tag arches according to it.
if "mbs" in mmd.get_xmd():
for req_name, req_data in mmd.get_xmd()["mbs"]["buildrequires"].items():
ns = ":".join([req_name, req_data["stream"]])
if ns in config.base_module_arches:
arches = config.base_module_arches[ns]
break
log.info("Setting build arches of %s to %r based on the BASE_MODULE_ARCHES." % (
nsvc, arches))
return arches
# Check whether the module contains the `koji_tag_arches`. This is used only
# by special modules defining the layered products.
try:
arches = mmd.get_xmd()["mbs"]["koji_tag_arches"]
log.info("Setting build arches of %s to %r based on the koji_tag_arches." % (
nsvc, arches))
return arches
except KeyError:
pass
# Check the base/layered-product module this module buildrequires and try to get the
# list of arches from there.
try:
buildrequires = mmd.get_xmd()["mbs"]["buildrequires"]
except (ValueError, KeyError):
log.warning(
"Module {0} does not have buildrequires in its xmd".format(mmd.get_nsvc()))
buildrequires = None
if buildrequires:
# Looping through all the privileged modules that are allowed to set koji tag arches
# and the base modules to see what the koji tag arches should be. Doing it this way
# preserves the order in the configurations.
with models.make_session(conf) as session:
for module in conf.allowed_privileged_module_names + conf.base_module_names:
module_in_xmd = buildrequires.get(module)
if not module_in_xmd:
continue
module_obj = models.ModuleBuild.get_build_from_nsvc(
session,
module,
module_in_xmd["stream"],
module_in_xmd["version"],
module_in_xmd["context"],
)
if not module_obj:
continue
arches = module_build_service.builder.GenericBuilder.get_module_build_arches(
module_obj)
if arches:
log.info("Setting build arches of %s to %r based on the buildrequired "
"module %r." % (nsvc, arches, module_obj))
return arches
# As a last resort, return just the preconfigured list of arches.
arches = config.arches
log.info("Setting build arches of %s to %r based on default ARCHES." % (nsvc, arches))
return arches

View File

@@ -41,10 +41,33 @@ import module_build_service.scm
from module_build_service import conf, db, log, models, Modulemd
from module_build_service.errors import ValidationError, UnprocessableEntity, Forbidden, Conflict
from module_build_service.utils import (
to_text_type, deps_to_dict, mmd_to_str, load_mmd, load_mmd_file
to_text_type, deps_to_dict, mmd_to_str, load_mmd, load_mmd_file,
get_build_arches
)
def record_module_build_arches(mmd, build, session):
"""
Finds out the list of build arches against which the ModuleBuld `build` should be built
and records them to `build.arches`.
:param Modulemd mmd: The MMD file associated with a ModuleBuild.
:param ModuleBuild build: The ModuleBuild.
:param session: Database session.
"""
arches = get_build_arches(mmd, conf)
for arch in arches:
arch_obj = session.query(models.ModuleArch).filter_by(name=arch).first()
if not arch_obj:
arch_obj = models.ModuleArch(name=arch)
session.add(arch_obj)
session.commit()
if arch_obj not in build.arches:
build.arches.append(arch_obj)
session.add(build)
def record_filtered_rpms(mmd):
"""Record filtered RPMs that should not be installed into buildroot
@@ -313,10 +336,14 @@ def validate_mmd(mmd):
name = mmd.get_module_name()
xmd = mmd.get_xmd()
if "mbs" in xmd:
allowed_to_mark_disttag = name in conf.allowed_disttag_marking_module_names
if not (set(xmd["mbs"].keys()) == {"disttag_marking"} and allowed_to_mark_disttag):
if name not in conf.allowed_privileged_module_names:
raise ValidationError('The "mbs" xmd field is reserved for MBS')
allowed_keys = ["disttag_marking", "koji_tag_arches"]
for key in xmd["mbs"].keys():
if key not in allowed_keys:
raise ValidationError('The "mbs" xmd field is reserved for MBS')
if name in conf.base_module_names:
raise ValidationError(
'You cannot build a module named "{}" since it is a base module'.format(name))

View File

@@ -103,10 +103,16 @@ def patch_zeromq_time_sleep():
patch_zeromq_time_sleep()
def clean_database(add_platform_module=True):
def clean_database(add_platform_module=True, add_default_arches=True):
db.session.commit()
db.drop_all()
db.create_all()
if add_default_arches:
arch_obj = module_build_service.models.ModuleArch(name="x86_64")
db.session.add(arch_obj)
db.session.commit()
if add_platform_module:
mmd = load_mmd_file(os.path.join(base_dir, "staged_data", "platform.yaml"))
import_mmd(db.session, mmd)
@@ -143,6 +149,7 @@ def init_data(data_size=10, contexts=False, multiple_stream_versions=False, scra
def _populate_data(session, data_size=10, contexts=False, scratch=False):
arch = module_build_service.models.ModuleArch.query.get(1)
num_contexts = 2 if contexts else 1
for index in range(data_size):
for context in range(num_contexts):
@@ -164,6 +171,7 @@ def _populate_data(session, data_size=10, contexts=False, scratch=False):
time_completed=datetime(2016, 9, 3, 11, 25, 32) + timedelta(minutes=(index * 10)),
rebuild_strategy="changed-and-after",
)
build_one.arches.append(arch)
if contexts:
build_one.stream = str(index)
@@ -229,6 +237,7 @@ def _populate_data(session, data_size=10, contexts=False, scratch=False):
time_completed=datetime(2016, 9, 3, 11, 27, 19) + timedelta(minutes=(index * 10)),
rebuild_strategy="changed-and-after",
)
build_two.arches.append(arch)
session.add(build_two)
session.commit()
@@ -328,6 +337,7 @@ def scheduler_init_data(tangerine_state=None, scratch=False):
mmd.get_rpm_component("tangerine").set_buildorder(0)
platform_br = module_build_service.models.ModuleBuild.query.get(1)
arch = module_build_service.models.ModuleArch.query.get(1)
module_build = module_build_service.models.ModuleBuild(
name="testmodule",
@@ -351,6 +361,7 @@ def scheduler_init_data(tangerine_state=None, scratch=False):
modulemd=mmd_to_str(mmd),
)
module_build.arches.append(arch)
module_build.buildrequires.append(platform_br)
build_one_component_release = get_rpm_release(module_build)
@@ -425,6 +436,7 @@ def reuse_component_init_data():
mmd = load_mmd_file(formatted_testmodule_yml_path)
platform_br = module_build_service.models.ModuleBuild.query.get(1)
arch = module_build_service.models.ModuleArch.query.get(1)
build_one = module_build_service.models.ModuleBuild(
name="testmodule",
@@ -453,6 +465,7 @@ def reuse_component_init_data():
xmd["mbs"]["commit"] = "ff1ea79fc952143efeed1851aa0aa006559239ba"
mmd.set_xmd(xmd)
build_one.modulemd = mmd_to_str(mmd)
build_one.arches.append(arch)
build_one.buildrequires.append(platform_br)
build_one.component_builds.extend([
@@ -535,6 +548,7 @@ def reuse_component_init_data():
xmd["mbs"]["commit"] = "55f4a0a2e6cc255c88712a905157ab39315b8fd8"
mmd.set_xmd(xmd)
build_two.modulemd = mmd_to_str(mmd)
build_two.arches.append(arch)
build_two.buildrequires.append(platform_br)
build_two.component_builds.extend([
@@ -705,6 +719,7 @@ def make_module(
xmd=None,
store_to_db=True,
virtual_streams=None,
arches=None,
):
"""
Creates new models.ModuleBuild defined by `nsvc` string with requires
@@ -724,6 +739,8 @@ def make_module(
:param bool store_to_db: whether to store created module metadata to the
database.
:param list virtual_streams: List of virtual streams provided by this module.
:param list arches: List of architectures this module is built against.
If set to None, ["x86_64"] is used as a default.
:return: New Module Build if set to store module metadata to database,
otherwise the module metadata is returned.
:rtype: ModuleBuild or Modulemd.Module
@@ -821,4 +838,18 @@ def make_module(
module_build.virtual_streams.append(vs_obj)
db.session.commit()
if arches is None:
arches = ["x86_64"]
for arch in arches:
arch_obj = db.session.query(module_build_service.models.ModuleArch).filter_by(
name=arch).first()
if not arch_obj:
arch_obj = module_build_service.models.ModuleArch(name=arch)
db.session.add(arch_obj)
db.session.commit()
if arch_obj not in module_build.arches:
module_build.arches.append(arch_obj)
db.session.commit()
return module_build

View File

@@ -136,6 +136,10 @@ class FakeModuleBuilder(GenericBuilder):
FakeModuleBuilder.DEFAULT_GROUPS = None
FakeModuleBuilder.backend = "test"
@classmethod
def get_module_build_arches(cls, module):
return ["x86_64"]
def buildroot_connect(self, groups):
default_groups = FakeModuleBuilder.DEFAULT_GROUPS or {
"srpm-build": set([

View File

@@ -98,6 +98,10 @@ class FakeKojiModuleBuilder(KojiModuleBuilder):
return koji_session
@classmethod
def get_module_build_arches(cls, module):
return ["x86_64"]
class TestKojiBuilder:
def setup_method(self, test_method):
@@ -463,13 +467,11 @@ class TestKojiBuilder:
assert weights == {"httpd": 1.5, "apr": 1.5}
session.krb_login.assert_called_once()
@patch.object(conf, "base_module_arches", new={"platform:xx": ["x86_64", "i686"]})
@pytest.mark.parametrize("blocklist", [False, True])
@pytest.mark.parametrize("custom_whitelist", [False, True])
@pytest.mark.parametrize("repo_include_all", [False, True])
@pytest.mark.parametrize("override_arches", [False, True])
def test_buildroot_connect(
self, custom_whitelist, blocklist, repo_include_all, override_arches
self, custom_whitelist, blocklist, repo_include_all
):
if blocklist:
mmd = self.module.mmd()
@@ -495,14 +497,7 @@ class TestKojiBuilder:
mmd.set_xmd(xmd)
self.module.modulemd = mmd_to_str(mmd)
if override_arches:
mmd = self.module.mmd()
xmd = mmd.get_xmd()
mbs_options = xmd["mbs"] if "mbs" in xmd.keys() else {}
mbs_options["buildrequires"] = {"platform": {"stream": "xx"}}
xmd["mbs"] = mbs_options
mmd.set_xmd(xmd)
self.module.modulemd = mmd_to_str(mmd)
self.module.arches.append(module_build_service.models.ModuleArch(name="i686"))
builder = FakeKojiModuleBuilder(
owner=self.module.owner,
@@ -549,10 +544,7 @@ class TestKojiBuilder:
expected_calls = []
assert session.packageListBlock.mock_calls == expected_calls
if override_arches:
expected_arches = "x86_64 i686"
else:
expected_arches = "i686 armv7hl x86_64"
expected_arches = "x86_64 i686"
expected_calls = [
mock.call(
@@ -824,9 +816,18 @@ class TestKojiBuilder:
@patch.dict("sys.modules", krbV=MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_ensure_builder_use_a_logged_in_koji_session(self, ClientSession):
builder = KojiModuleBuilder("owner", MagicMock(), conf, "module-tag", [])
builder = KojiModuleBuilder("owner", self.module, conf, "module-tag", [])
builder.koji_session.krb_login.assert_called_once()
@patch.dict("sys.modules", krbV=MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_module_build_arches(self, ClientSession):
arches = "x86_64 i686 ppc64le aarch64 s390x"
session = ClientSession.return_value
session.getTag.return_value = {"arches": arches}
ret = KojiModuleBuilder.get_module_build_arches(self.module)
assert " ".join(ret) == arches
class TestGetDistTagSRPM:
"""Test KojiModuleBuilder.get_disttag_srpm"""

View File

@@ -59,7 +59,8 @@ class TestModuleInit:
)
@patch("module_build_service.scm.SCM")
@patch("module_build_service.scheduler.handlers.modules.handle_stream_collision_modules")
def test_init_basic(self, rscm, mocked_scm, built_rpms):
@patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])
def test_init_basic(self, get_build_arches, rscm, mocked_scm, built_rpms):
FakeSCM(
mocked_scm,
"testmodule",
@@ -119,7 +120,8 @@ class TestModuleInit:
assert mmd_to_str(old_mmd) == mmd_to_str(new_mmd)
@patch("module_build_service.scm.SCM")
def test_init_scm_not_available(self, mocked_scm):
@patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])
def test_init_scm_not_available(self, get_build_arches, mocked_scm):
def mocked_scm_get_latest():
raise RuntimeError("Failed in mocked_scm_get_latest")
@@ -141,7 +143,8 @@ class TestModuleInit:
return_value=True,
)
@patch("module_build_service.scm.SCM")
def test_init_includedmodule(self, mocked_scm, mocked_mod_allow_repo):
@patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])
def test_init_includedmodule(self, get_build_arches, mocked_scm, mocked_mod_allow_repo):
FakeSCM(mocked_scm, "includedmodules", ["testmodule_init.yaml"])
includedmodules_yml_path = os.path.join(self.staged_data_dir, "includedmodules.yaml")
mmd = load_mmd_file(includedmodules_yml_path)
@@ -177,7 +180,9 @@ class TestModuleInit:
@patch("module_build_service.models.ModuleBuild.from_module_event")
@patch("module_build_service.scm.SCM")
def test_init_when_get_latest_raises(self, mocked_scm, mocked_from_module_event):
@patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])
def test_init_when_get_latest_raises(
self, get_build_arches, mocked_scm, mocked_from_module_event):
FakeSCM(
mocked_scm,
"testmodule",

View File

@@ -311,6 +311,40 @@ class TestUtils:
def teardown_method(self, test_method):
clean_database()
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_build_arches(self, ClientSession):
session = ClientSession.return_value
session.getTag.return_value = {"arches": "ppc64le"}
mmd = load_mmd_file(path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml"))
r = module_build_service.utils.get_build_arches(mmd, conf)
assert r == ["ppc64le"]
@patch(
"module_build_service.config.Config.allowed_privileged_module_names",
new_callable=mock.PropertyMock,
return_value=["testmodule"],
)
def test_get_build_arches_koji_tag_arches(self, cfg):
mmd = load_mmd_file(path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml"))
xmd = mmd.get_xmd()
xmd["mbs"]["koji_tag_arches"] = ["ppc64", "ppc64le"]
mmd.set_xmd(xmd)
r = module_build_service.utils.get_build_arches(mmd, conf)
assert r == ["ppc64", "ppc64le"]
@patch.object(conf, "base_module_arches", new={"platform:xx": ["x86_64", "i686"]})
def test_get_build_arches_base_module_override(self):
mmd = load_mmd_file(path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml"))
xmd = mmd.get_xmd()
mbs_options = xmd["mbs"] if "mbs" in xmd.keys() else {}
mbs_options["buildrequires"] = {"platform": {"stream": "xx"}}
xmd["mbs"] = mbs_options
mmd.set_xmd(xmd)
r = module_build_service.utils.get_build_arches(mmd, conf)
assert r == ["x86_64", "i686"]
@pytest.mark.parametrize("context", ["c1", None])
def test_import_mmd_contexts(self, context):
mmd = load_mmd_file(path.join(BASE_DIR, "..", "staged_data", "formatted_testmodule.yaml"))
@@ -425,13 +459,13 @@ class TestUtils:
assert release == "module+fedora28+2+814cfa39"
@patch(
"module_build_service.config.Config.allowed_disttag_marking_module_names",
"module_build_service.config.Config.allowed_privileged_module_names",
new_callable=mock.PropertyMock,
return_value=["build"],
)
def test_get_rpm_release_metadata_br_stream_override(self, mock_admmn):
"""
Test that when a module buildrequires a module in conf.allowed_disttag_marking_module_names,
Test that when a module buildrequires a module in conf.allowed_privileged_module_names,
and that module has the xmd.mbs.disttag_marking field set, it should influence the disttag.
"""
scheduler_init_data(1)
@@ -477,6 +511,17 @@ class TestUtils:
release = module_build_service.utils.get_rpm_release(build_one)
assert release == "scrmod+f28+2+814cfa39"
@patch("module_build_service.utils.submit.get_build_arches")
def test_record_module_build_arches(self, get_build_arches):
get_build_arches.return_value = ["x86_64", "i686"]
scheduler_init_data(1)
build = models.ModuleBuild.query.get(2)
build.arches = []
module_build_service.utils.record_module_build_arches(build.mmd(), build, db.session)
arches = set([arch.name for arch in build.arches])
assert arches == set(get_build_arches.return_value)
@pytest.mark.parametrize(
"scmurl",
[

View File

@@ -2373,7 +2373,7 @@ class TestViews:
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
@patch(
"module_build_service.config.Config.allowed_disttag_marking_module_names",
"module_build_service.config.Config.allowed_privileged_module_names",
new_callable=PropertyMock,
return_value=["build"],
)