From 5e401bd598fe66165c9d9b26e558f9de45e50aa3 Mon Sep 17 00:00:00 2001 From: mprahl Date: Fri, 3 Jan 2020 09:11:44 -0500 Subject: [PATCH] Split utils/general.py This puts backend specific code in either the builder or scheduler subpackage. This puts API specific code in the new web subpackage. Lastly, any code shared between the API and backend is placed in the common subpackage. --- .../builder/KojiContentGenerator.py | 2 +- .../builder/KojiModuleBuilder.py | 71 +- .../builder/MockModuleBuilder.py | 120 ++- module_build_service/builder/base.py | 34 +- module_build_service/builder/utils.py | 176 ++++- module_build_service/common/utils.py | 226 ++++++ module_build_service/manage.py | 13 +- module_build_service/models.py | 5 +- module_build_service/resolver/DBResolver.py | 2 +- module_build_service/resolver/MBSResolver.py | 2 +- .../scheduler/handlers/components.py | 2 +- .../scheduler/handlers/modules.py | 8 +- module_build_service/scm.py | 39 +- module_build_service/utils/__init__.py | 1 - module_build_service/utils/general.py | 721 ------------------ module_build_service/utils/mse.py | 3 +- module_build_service/utils/submit.py | 80 +- module_build_service/utils/views.py | 2 +- module_build_service/views.py | 2 +- module_build_service/web/__init__.py | 0 module_build_service/web/utils.py | 19 + tests/__init__.py | 5 +- tests/conftest.py | 4 +- tests/test_build/test_build.py | 14 +- tests/test_builder/test_builder_utils.py | 449 ++++++++--- tests/test_builder/test_koji.py | 20 +- tests/test_builder/test_mock.py | 78 +- tests/test_common/test_utils.py | 152 ++++ tests/test_content_generator.py | 9 +- tests/test_manage.py | 2 +- tests/test_models/test_models.py | 2 +- tests/test_resolver/test_db.py | 2 +- tests/test_resolver/test_koji.py | 2 +- tests/test_resolver/test_local.py | 2 +- tests/test_resolver/test_mbs.py | 4 +- tests/test_scheduler/test_batches.py | 2 +- tests/test_scheduler/test_default_modules.py | 4 +- tests/test_scheduler/test_module_init.py | 3 +- tests/test_utils/test_utils.py | 432 +---------- tests/test_utils/test_utils_mse.py | 7 +- tests/test_views/test_views.py | 5 +- 41 files changed, 1373 insertions(+), 1353 deletions(-) create mode 100644 module_build_service/common/utils.py delete mode 100644 module_build_service/utils/general.py create mode 100644 module_build_service/web/__init__.py create mode 100644 module_build_service/web/utils.py create mode 100644 tests/test_common/test_utils.py diff --git a/module_build_service/builder/KojiContentGenerator.py b/module_build_service/builder/KojiContentGenerator.py index 0afc22a3..b82d9d2e 100644 --- a/module_build_service/builder/KojiContentGenerator.py +++ b/module_build_service/builder/KojiContentGenerator.py @@ -23,9 +23,9 @@ import pungi.arch from module_build_service import conf, log, build_logs, Modulemd from module_build_service.common.koji import get_session, koji_retrying_multicall_map +from module_build_service.common.utils import load_mmd, mmd_to_str, to_text_type from module_build_service.db_session import db_session from module_build_service.scm import SCM -from module_build_service.utils import to_text_type, load_mmd, mmd_to_str logging.basicConfig(level=logging.DEBUG) diff --git a/module_build_service/builder/KojiModuleBuilder.py b/module_build_service/builder/KojiModuleBuilder.py index b74fe98d..e310dbd1 100644 --- a/module_build_service/builder/KojiModuleBuilder.py +++ b/module_build_service/builder/KojiModuleBuilder.py @@ -1,6 +1,9 @@ # -*- coding: utf-8 -*- # SPDX-License-Identifier: MIT +import contextlib import copy +import hashlib +import locale import logging import os import koji @@ -14,15 +17,12 @@ import string import kobo.rpmlib import threading -import locale from itertools import chain from OpenSSL.SSL import SysCallError import textwrap from module_build_service import log, conf, models -import module_build_service.scm -import module_build_service.utils -from module_build_service.builder.utils import execute_cmd +from module_build_service.builder.utils import execute_cmd, get_rpm_release, validate_koji_tag from module_build_service.common.retry import retry from module_build_service.db_session import db_session from module_build_service.builder import GenericBuilder @@ -31,11 +31,18 @@ from module_build_service.common.koji import ( get_session, koji_multicall_map, koji_retrying_multicall_map, ) from module_build_service.scheduler import events -from module_build_service.utils import get_reusable_components, get_reusable_module, set_locale +from module_build_service.utils import get_reusable_components, get_reusable_module logging.basicConfig(level=logging.DEBUG) +@contextlib.contextmanager +def set_locale(*args, **kwargs): + saved = locale.setlocale(locale.LC_ALL) + yield locale.setlocale(*args, **kwargs) + locale.setlocale(locale.LC_ALL, saved) + + class KojiModuleBuilder(GenericBuilder): """ Koji specific builder class """ @@ -43,7 +50,7 @@ class KojiModuleBuilder(GenericBuilder): _build_lock = threading.Lock() region = dogpile.cache.make_region().configure("dogpile.cache.memory") - @module_build_service.utils.validate_koji_tag("tag_name") + @validate_koji_tag("tag_name") def __init__(self, db_session, owner, module, config, tag_name, components): """ :param db_session: SQLAlchemy session object. @@ -372,6 +379,44 @@ class KojiModuleBuilder(GenericBuilder): log.debug("Wrote srpm into %s" % srpm_paths[0]) return srpm_paths[0] + @staticmethod + def generate_koji_tag( + name, stream, version, context, max_length=256, scratch=False, scratch_id=0, + ): + """Generate a koji tag for a module + + Generally, a module's koji tag is in format ``module-N-S-V-C``. However, if + it is longer than maximum length, old format ``module-hash`` is used. + + :param str name: a module's name + :param str stream: a module's stream + :param str version: a module's version + :param str context: a module's context + :param int max_length: the maximum length the Koji tag can be before + falling back to the old format of "module-". Default is 256 + characters, which is the maximum length of a tag Koji accepts. + :param bool scratch: a flag indicating if the generated tag will be for + a scratch module build + :param int scratch_id: for scratch module builds, a unique build identifier + :return: a Koji tag + :rtype: str + """ + if scratch: + prefix = "scrmod-" + # use unique suffix so same commit can be resubmitted + suffix = "+" + str(scratch_id) + else: + prefix = "module-" + suffix = "" + nsvc_list = [name, stream, str(version), context] + nsvc_tag = prefix + "-".join(nsvc_list) + suffix + if len(nsvc_tag) + len("-build") > max_length: + # Fallback to the old format of 'module-' if the generated koji tag + # name is longer than max_length + nsvc_hash = hashlib.sha1(".".join(nsvc_list).encode("utf-8")).hexdigest()[:16] + return prefix + nsvc_hash + suffix + return nsvc_tag + def buildroot_connect(self, groups): log.info("%r connecting buildroot." % self) @@ -413,7 +458,7 @@ class KojiModuleBuilder(GenericBuilder): # checks the length with '-build' at the end, but we know we will never append '-build', # so we can safely have the name check be more characters target_length = 50 + len("-build") - target = module_build_service.utils.generate_koji_tag( + target = self.generate_koji_tag( self.module.name, self.module.stream, self.module.version, @@ -576,7 +621,7 @@ class KojiModuleBuilder(GenericBuilder): # If the build cannot be found in the tags, it may be untagged as a result # of some earlier inconsistent situation. Let's find the task_info # based on the list of untagged builds - release = module_build_service.utils.get_rpm_release(self.db_session, self.module) + release = get_rpm_release(self.db_session, self.module) untagged = self.koji_session.untaggedBuilds(name=component_build.package) for untagged_build in untagged: if untagged_build["release"].endswith(release): @@ -721,7 +766,7 @@ class KojiModuleBuilder(GenericBuilder): """ return "%s/%s/latest/%s" % (config.koji_repository_url, tag_name, arch) - @module_build_service.utils.validate_koji_tag("tag", post="") + @validate_koji_tag("tag", post="") def _get_tag(self, tag, strict=True): if isinstance(tag, dict): tag = tag["name"] @@ -731,7 +776,7 @@ class KojiModuleBuilder(GenericBuilder): raise SystemError("Unknown tag: %s" % tag) return taginfo - @module_build_service.utils.validate_koji_tag(["tag_name"], post="") + @validate_koji_tag(["tag_name"], post="") def _koji_add_many_tag_inheritance(self, tag_name, parent_tags): tag = self._get_tag(tag_name) # highest priority num is at the end @@ -766,7 +811,7 @@ class KojiModuleBuilder(GenericBuilder): if inheritance_data: self.koji_session.setInheritanceData(tag["id"], inheritance_data) - @module_build_service.utils.validate_koji_tag("dest_tag") + @validate_koji_tag("dest_tag") def _koji_add_groups_to_tag(self, dest_tag, groups): """Add groups to a tag as well as packages listed by group @@ -800,7 +845,7 @@ class KojiModuleBuilder(GenericBuilder): for pkg in packages: self.koji_session.groupPackageListAdd(dest_tag, group, pkg) - @module_build_service.utils.validate_koji_tag("tag_name") + @validate_koji_tag("tag_name") def _koji_create_tag(self, tag_name, arches=None, perm=None): """Create a tag in Koji @@ -922,7 +967,7 @@ class KojiModuleBuilder(GenericBuilder): args = [[build_tag_name, package] for package in packages] koji_multicall_map(self.koji_session, self.koji_session.packageListUnblock, args) - @module_build_service.utils.validate_koji_tag(["build_tag", "dest_tag"]) + @validate_koji_tag(["build_tag", "dest_tag"]) def _koji_add_target(self, name, build_tag, dest_tag): """Add build target if it doesn't exist or validate the existing one diff --git a/module_build_service/builder/MockModuleBuilder.py b/module_build_service/builder/MockModuleBuilder.py index a27e6741..808e36af 100644 --- a/module_build_service/builder/MockModuleBuilder.py +++ b/module_build_service/builder/MockModuleBuilder.py @@ -2,31 +2,32 @@ # SPDX-License-Identifier: MIT import logging import os -import koji -import kobo.rpmlib import pipes -import platform import re import subprocess import threading -from module_build_service import conf, log +import dnf +import koji +import kobo.rpmlib +import platform + +from module_build_service import conf, log, Modulemd from module_build_service.common.koji import get_session +from module_build_service.common.utils import import_mmd, mmd_to_str import module_build_service.scm import module_build_service.utils -import module_build_service.scheduler from module_build_service.builder import GenericBuilder from module_build_service.builder.utils import ( create_local_repo_from_koji_tag, execute_cmd, find_srpm, get_koji_config, + validate_koji_tag, ) -from module_build_service.utils.general import mmd_to_str from module_build_service.db_session import db_session from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder from module_build_service.scheduler import events - from module_build_service import models logging.basicConfig(level=logging.DEBUG) @@ -46,6 +47,107 @@ def detect_arch(): return conf.arch_fallback +def import_fake_base_module(nsvc): + """ + Creates and imports new fake base module to be used with offline local builds. + + :param str nsvc: name:stream:version:context of a module. + """ + name, stream, version, context = nsvc.split(":") + mmd = Modulemd.ModuleStreamV2.new(name, stream) + mmd.set_version(int(version)) + mmd.set_context(context) + mmd.set_summary("fake base module") + mmd.set_description("fake base module") + mmd.add_module_license("GPL") + + buildroot = Modulemd.Profile.new("buildroot") + for rpm in conf.default_buildroot_packages: + buildroot.add_rpm(rpm) + mmd.add_profile(buildroot) + + srpm_buildroot = Modulemd.Profile.new("srpm-buildroot") + for rpm in conf.default_srpm_buildroot_packages: + srpm_buildroot.add_rpm(rpm) + mmd.add_profile(srpm_buildroot) + + xmd = {"mbs": {}} + xmd_mbs = xmd["mbs"] + xmd_mbs["buildrequires"] = {} + xmd_mbs["requires"] = {} + xmd_mbs["commit"] = "ref_%s" % context + xmd_mbs["mse"] = "true" + # Use empty "repofile://" URI for base module. The base module will use the + # `conf.base_module_names` list as list of default repositories. + xmd_mbs["koji_tag"] = "repofile://" + mmd.set_xmd(xmd) + + import_mmd(db_session, mmd, False) + + +def get_local_releasever(): + """ + Returns the $releasever variable used in the system when expanding .repo files. + """ + dnf_base = dnf.Base() + return dnf_base.conf.releasever + + +def import_builds_from_local_dnf_repos(platform_id=None): + """ + Imports the module builds from all available local repositories to MBS DB. + + This is used when building modules locally without any access to MBS infra. + This method also generates and imports the base module according to /etc/os-release. + + :param str platform_id: The `name:stream` of a fake platform module to generate in this + method. When not set, the /etc/os-release is parsed to get the PLATFORM_ID. + """ + log.info("Loading available RPM repositories.") + dnf_base = dnf.Base() + dnf_base.read_all_repos() + + log.info("Importing available modules to MBS local database.") + for repo in dnf_base.repos.values(): + try: + repo.load() + except Exception as e: + log.warning(str(e)) + continue + mmd_data = repo.get_metadata_content("modules") + mmd_index = Modulemd.ModuleIndex.new() + ret, _ = mmd_index.update_from_string(mmd_data, True) + if not ret: + log.warning("Loading the repo '%s' failed", repo.name) + continue + + for module_name in mmd_index.get_module_names(): + for mmd in mmd_index.get_module(module_name).get_all_streams(): + xmd = mmd.get_xmd() + xmd["mbs"] = {} + xmd["mbs"]["koji_tag"] = "repofile://" + repo.repofile + xmd["mbs"]["mse"] = True + xmd["mbs"]["commit"] = "unknown" + mmd.set_xmd(xmd) + + import_mmd(db_session, mmd, False) + + if not platform_id: + # Parse the /etc/os-release to find out the local platform:stream. + with open("/etc/os-release", "r") as fd: + for l in fd.readlines(): + if not l.startswith("PLATFORM_ID"): + continue + platform_id = l.split("=")[1].strip("\"' \n") + if not platform_id: + raise ValueError("Cannot get PLATFORM_ID from /etc/os-release.") + + # Create the fake platform:stream:1:000000 module to fulfill the + # dependencies for local offline build and also to define the + # srpm-buildroot and buildroot. + import_fake_base_module("%s:1:000000" % platform_id) + + class MockModuleBuilder(GenericBuilder): backend = "mock" # Global build_id/task_id we increment when new build is executed. @@ -75,7 +177,7 @@ class MockModuleBuilder(GenericBuilder): else: raise IOError("None of {} yum config files found.".format(conf.yum_config_file)) - @module_build_service.utils.validate_koji_tag("tag_name") + @validate_koji_tag("tag_name") def __init__(self, db_session, owner, module, config, tag_name, components): self.db_session = db_session self.module_str = module.name @@ -84,7 +186,7 @@ class MockModuleBuilder(GenericBuilder): self.config = config self.groups = [] self.enabled_modules = [] - self.releasever = module_build_service.utils.get_local_releasever() + self.releasever = get_local_releasever() self.yum_conf = MockModuleBuilder.yum_config_template self.koji_session = None diff --git a/module_build_service/builder/base.py b/module_build_service/builder/base.py index 27745f03..a4850076 100644 --- a/module_build_service/builder/base.py +++ b/module_build_service/builder/base.py @@ -7,11 +7,10 @@ import dogpile.cache from abc import ABCMeta, abstractmethod from requests.exceptions import ConnectionError -from module_build_service import conf, log +from module_build_service import conf, log, models from module_build_service.common.retry import retry from module_build_service.models import BUILD_STATES from module_build_service.resolver import GenericResolver -from module_build_service.utils import create_dogpile_key_generator_func """ @@ -30,6 +29,37 @@ Koji workflow """ +def create_dogpile_key_generator_func(skip_first_n_args=0): + """ + Creates dogpile key_generator function with additional features: + + - when models.ModuleBuild is an argument of method cached by dogpile-cache, + the ModuleBuild.id is used as a key. Therefore it is possible to cache + data per particular module build, while normally, it would be per + ModuleBuild.__str__() output, which contains also batch and other data + which changes during the build of a module. + - it is able to skip first N arguments of a cached method. This is useful + when the db.session is part of cached method call, and the caching should + work no matter what session instance is passed to cached method argument. + """ + + def key_generator(namespace, fn): + fname = fn.__name__ + + def generate_key(*arg, **kwarg): + key_template = fname + "_" + for s in arg[skip_first_n_args:]: + if type(s) == models.ModuleBuild: + key_template += str(s.id) + else: + key_template += str(s) + "_" + return key_template + + return generate_key + + return key_generator + + class GenericBuilder(six.with_metaclass(ABCMeta)): """ External Api for builders diff --git a/module_build_service/builder/utils.py b/module_build_service/builder/utils.py index e922915c..081b9d5b 100644 --- a/module_build_service/builder/utils.py +++ b/module_build_service/builder/utils.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # SPDX-License-Identifier: MIT +import hashlib +import inspect import os import shutil import subprocess @@ -10,7 +12,8 @@ from multiprocessing.dummy import Pool as ThreadPool import requests -from module_build_service import log +from module_build_service import conf, log, models +from module_build_service.errors import ValidationError, ProgrammingError logging.basicConfig(level=logging.DEBUG) @@ -170,3 +173,174 @@ def create_local_repo_from_koji_tag(config, tag, repo_dir, archs=None): execute_cmd(["/usr/bin/createrepo_c", repo_dir]) return True + + +def get_rpm_release(db_session, module_build): + """ + Generates the dist tag for the specified module + + :param db_session: SQLAlchemy session object. + :param module_build: a models.ModuleBuild object + :return: a string of the module's dist tag + """ + dist_str = ".".join([ + module_build.name, + module_build.stream, + str(module_build.version), + str(module_build.context), + ]).encode("utf-8") + dist_hash = hashlib.sha1(dist_str).hexdigest()[:8] + + # We need to share the same auto-incrementing index in dist tag between all MSE builds. + # We can achieve that by using the lowest build ID of all the MSE siblings including + # this module build. + mse_build_ids = module_build.siblings(db_session) + [module_build.id or 0] + mse_build_ids.sort() + index = mse_build_ids[0] + try: + buildrequires = module_build.mmd().get_xmd()["mbs"]["buildrequires"] + except (ValueError, KeyError): + log.warning( + "Module build {0} does not have buildrequires in its xmd".format(module_build.id)) + buildrequires = None + + # Determine which buildrequired module will influence the disttag + br_module_marking = "" + # If the buildrequires are recorded in the xmd then we can try to find the base module that + # is buildrequired + if buildrequires: + # Looping through all the non-base modules that are allowed to set the disttag_marking + # and the base modules to see what the disttag marking should be. Doing it this way + # preserves the order in the configurations. + for module in conf.allowed_privileged_module_names + conf.base_module_names: + module_in_xmd = buildrequires.get(module) + + if not module_in_xmd: + continue + + module_obj = models.ModuleBuild.get_build_from_nsvc( + db_session, + module, + module_in_xmd["stream"], + module_in_xmd["version"], + module_in_xmd["context"], + ) + if not module_obj: + continue + + try: + marking = module_obj.mmd().get_xmd()["mbs"]["disttag_marking"] + # We must check for a KeyError because a Variant object doesn't support the `get` + # method + except KeyError: + if module not in conf.base_module_names: + continue + # If we've made it past all the modules in + # conf.allowed_privileged_module_names, and the base module doesn't have + # the disttag_marking set, then default to the stream of the first base module + marking = module_obj.stream + br_module_marking = marking + "+" + break + else: + log.warning( + "Module build {0} does not buildrequire a base module ({1})".format( + module_build.id, " or ".join(conf.base_module_names)) + ) + + # use alternate prefix for scratch module build components so they can be identified + prefix = "scrmod+" if module_build.scratch else conf.default_dist_tag_prefix + + return "{prefix}{base_module_marking}{index}+{dist_hash}".format( + prefix=prefix, base_module_marking=br_module_marking, index=index, dist_hash=dist_hash + ) + + +def validate_koji_tag(tag_arg_names, pre="", post="-", dict_key="name"): + """ + Used as a decorator validates koji tag arg(s)' value(s) + against configurable list of koji tag prefixes. + Supported arg value types are: dict, list, str + + :param tag_arg_names: Str or list of parameters to validate. + :param pre: Prepend this optional string (e.g. '.' in case of disttag + validation) to each koji tag prefix. + :param post: Append this string/delimiter ('-' by default) to each koji + tag prefix. + :param dict_key: In case of a dict arg, inspect this key ('name' by default). + """ + + if not isinstance(tag_arg_names, list): + tag_arg_names = [tag_arg_names] + + def validation_decorator(function): + def wrapper(*args, **kwargs): + call_args = inspect.getcallargs(function, *args, **kwargs) + + # if module name is in allowed_privileged_module_names or base_module_names lists + # we don't have to validate it since they could use an arbitrary Koji tag + try: + if call_args['self'].module_str in \ + conf.allowed_privileged_module_names + conf.base_module_names: + # skip validation + return function(*args, **kwargs) + except (AttributeError, KeyError): + pass + + for tag_arg_name in tag_arg_names: + err_subject = "Koji tag validation:" + + # If any of them don't appear in the function, then fail. + if tag_arg_name not in call_args: + raise ProgrammingError( + "{} Inspected argument {} is not within function args." + " The function was: {}.".format( + err_subject, tag_arg_name, function.__name__ + ) + ) + + tag_arg_val = call_args[tag_arg_name] + + # First, check that we have some value + if not tag_arg_val: + raise ValidationError( + "{} Can not validate {}. No value provided.".format( + err_subject, tag_arg_name) + ) + + # If any of them are a dict, then use the provided dict_key + if isinstance(tag_arg_val, dict): + if dict_key not in tag_arg_val: + raise ProgrammingError( + "{} Inspected dict arg {} does not contain {} key." + " The function was: {}.".format( + err_subject, tag_arg_name, dict_key, function.__name__) + ) + tag_list = [tag_arg_val[dict_key]] + elif isinstance(tag_arg_val, list): + tag_list = tag_arg_val + else: + tag_list = [tag_arg_val] + + # Check to make sure the provided values match our whitelist. + for allowed_prefix in conf.koji_tag_prefixes: + if all([t.startswith(pre + allowed_prefix + post) for t in tag_list]): + break + else: + # Only raise this error if the given tags don't start with + # *any* of our allowed prefixes. + raise ValidationError( + "Koji tag validation: {} does not satisfy any of allowed prefixes: {}" + .format(tag_list, [pre + p + post for p in conf.koji_tag_prefixes]) + ) + + # Finally.. after all that validation, call the original function + # and return its value. + return function(*args, **kwargs) + + # We're replacing the original function with our synthetic wrapper, + # but dress it up to make it look more like the original function. + wrapper.__name__ = function.__name__ + wrapper.__doc__ = function.__doc__ + return wrapper + + return validation_decorator diff --git a/module_build_service/common/utils.py b/module_build_service/common/utils.py new file mode 100644 index 00000000..eb3f9024 --- /dev/null +++ b/module_build_service/common/utils.py @@ -0,0 +1,226 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: MIT +from datetime import datetime +from functools import partial +import os + +from gi.repository.GLib import Error as ModuleMDError +from six import string_types, text_type + +from module_build_service import conf, log, Modulemd +from module_build_service.errors import UnprocessableEntity + + +def to_text_type(s): + """ + Converts `s` to `text_type`. In case it fails, returns `s`. + """ + try: + return text_type(s, "utf-8") + except TypeError: + return s + + +def load_mmd(yaml, is_file=False): + if not yaml: + raise UnprocessableEntity('The input modulemd was empty') + + target_mmd_version = Modulemd.ModuleStreamVersionEnum.TWO + try: + if is_file: + mmd = Modulemd.ModuleStream.read_file(yaml, True) + else: + mmd = Modulemd.ModuleStream.read_string(to_text_type(yaml), True) + mmd.validate() + if mmd.get_mdversion() < target_mmd_version: + mmd = mmd.upgrade(target_mmd_version) + elif mmd.get_mdversion() > target_mmd_version: + log.error("Encountered a modulemd file with the version %d", mmd.get_mdversion()) + raise UnprocessableEntity( + "The modulemd version cannot be greater than {}".format(target_mmd_version)) + except ModuleMDError as e: + not_found = False + if is_file: + error = "The modulemd {} is invalid.".format(os.path.basename(yaml)) + if os.path.exists(yaml): + with open(yaml, "rt") as yaml_hdl: + log.debug("Modulemd that failed to load:\n%s", yaml_hdl.read()) + else: + not_found = True + error = "The modulemd file {} was not found.".format(os.path.basename(yaml)) + log.error("The modulemd file %s was not found.", yaml) + else: + error = "The modulemd is invalid." + log.debug("Modulemd that failed to load:\n%s", yaml) + + if "modulemd-error-quark: " in str(e): + error = "{} The error was '{}'.".format( + error, str(e).split("modulemd-error-quark: ")[-1]) + elif "Unknown ModuleStream version" in str(e): + error = ( + "{}. The modulemd version can't be greater than {}." + .format(error, target_mmd_version) + ) + elif not_found is False: + error = "{} Please verify the syntax is correct.".format(error) + + log.exception(error) + raise UnprocessableEntity(error) + + return mmd + + +load_mmd_file = partial(load_mmd, is_file=True) + + +def import_mmd(db_session, mmd, check_buildrequires=True): + """ + Imports new module build defined by `mmd` to MBS database using `session`. + If it already exists, it is updated. + + The ModuleBuild.koji_tag is set according to xmd['mbs]['koji_tag']. + The ModuleBuild.state is set to "ready". + The ModuleBuild.rebuild_strategy is set to "all". + The ModuleBuild.owner is set to "mbs_import". + + :param db_session: SQLAlchemy session object. + :param mmd: module metadata being imported into database. + :type mmd: Modulemd.ModuleStream + :param bool check_buildrequires: When True, checks that the buildrequires defined in the MMD + have matching records in the `mmd["xmd"]["mbs"]["buildrequires"]` and also fills in + the `ModuleBuild.buildrequires` according to this data. + :return: module build (ModuleBuild), + log messages collected during import (list) + :rtype: tuple + """ + from module_build_service import models + + xmd = mmd.get_xmd() + # Set some defaults in xmd["mbs"] if they're not provided by the user + if "mbs" not in xmd: + xmd["mbs"] = {"mse": True} + + if not mmd.get_context(): + mmd.set_context(models.DEFAULT_MODULE_CONTEXT) + + # NSVC is used for logging purpose later. + nsvc = mmd.get_nsvc() + if nsvc is None: + msg = "Both the name and stream must be set for the modulemd being imported." + log.error(msg) + raise UnprocessableEntity(msg) + + name = mmd.get_module_name() + stream = mmd.get_stream_name() + version = str(mmd.get_version()) + context = mmd.get_context() + + xmd_mbs = xmd["mbs"] + + disttag_marking = xmd_mbs.get("disttag_marking") + + # If it is a base module, then make sure the value that will be used in the RPM disttags + # doesn't contain a dash since a dash isn't allowed in the release field of the NVR + if name in conf.base_module_names: + if disttag_marking and "-" in disttag_marking: + msg = "The disttag_marking cannot contain a dash" + log.error(msg) + raise UnprocessableEntity(msg) + if not disttag_marking and "-" in stream: + msg = "The stream cannot contain a dash unless disttag_marking is set" + log.error(msg) + raise UnprocessableEntity(msg) + + virtual_streams = xmd_mbs.get("virtual_streams", []) + + # Verify that the virtual streams are the correct type + if virtual_streams and ( + not isinstance(virtual_streams, list) + or any(not isinstance(vs, string_types) for vs in virtual_streams) + ): + msg = "The virtual streams must be a list of strings" + log.error(msg) + raise UnprocessableEntity(msg) + + if check_buildrequires: + deps = mmd.get_dependencies() + if len(deps) > 1: + raise UnprocessableEntity( + "The imported module's dependencies list should contain just one element") + + if "buildrequires" not in xmd_mbs: + # Always set buildrequires if it is not there, because + # get_buildrequired_base_modules requires xmd/mbs/buildrequires exists. + xmd_mbs["buildrequires"] = {} + mmd.set_xmd(xmd) + + if len(deps) > 0: + brs = set(deps[0].get_buildtime_modules()) + xmd_brs = set(xmd_mbs["buildrequires"].keys()) + if brs - xmd_brs: + raise UnprocessableEntity( + "The imported module buildrequires other modules, but the metadata in the " + 'xmd["mbs"]["buildrequires"] dictionary is missing entries' + ) + + if "koji_tag" not in xmd_mbs: + log.warning("'koji_tag' is not set in xmd['mbs'] for module {}".format(nsvc)) + log.warning("koji_tag will be set to None for imported module build.") + + # Log messages collected during import + msgs = [] + + # Get the ModuleBuild from DB. + build = models.ModuleBuild.get_build_from_nsvc(db_session, name, stream, version, context) + if build: + msg = "Updating existing module build {}.".format(nsvc) + log.info(msg) + msgs.append(msg) + else: + build = models.ModuleBuild() + db_session.add(build) + + build.name = name + build.stream = stream + build.version = version + build.koji_tag = xmd_mbs.get("koji_tag") + build.state = models.BUILD_STATES["ready"] + build.modulemd = mmd_to_str(mmd) + build.context = context + build.owner = "mbs_import" + build.rebuild_strategy = "all" + now = datetime.utcnow() + build.time_submitted = now + build.time_modified = now + build.time_completed = now + if build.name in conf.base_module_names: + build.stream_version = models.ModuleBuild.get_stream_version(stream) + + # Record the base modules this module buildrequires + if check_buildrequires: + for base_module in build.get_buildrequired_base_modules(db_session): + if base_module not in build.buildrequires: + build.buildrequires.append(base_module) + + build.update_virtual_streams(db_session, virtual_streams) + + db_session.commit() + + msg = "Module {} imported".format(nsvc) + log.info(msg) + msgs.append(msg) + + return build, msgs + + +def mmd_to_str(mmd): + """ + Helper method to convert a Modulemd.ModuleStream object to a YAML string. + + :param Modulemd.ModuleStream mmd: the modulemd to convert + :return: the YAML string of the modulemd + :rtype: str + """ + index = Modulemd.ModuleIndex() + index.add_module_stream(mmd) + return to_text_type(index.dump_to_string()) diff --git a/module_build_service/manage.py b/module_build_service/manage.py index d88dc361..e70664cd 100755 --- a/module_build_service/manage.py +++ b/module_build_service/manage.py @@ -10,15 +10,10 @@ import getpass import textwrap from werkzeug.datastructures import FileStorage -from module_build_service import app, conf, db, create_app -from module_build_service import models -from module_build_service.utils import ( - submit_module_build_from_yaml, - load_local_builds, - load_mmd_file, - import_mmd, - import_builds_from_local_dnf_repos, -) +from module_build_service import app, conf, create_app, db, models +from module_build_service.builder.MockModuleBuilder import import_builds_from_local_dnf_repos +from module_build_service.common.utils import load_mmd_file, import_mmd +from module_build_service.utils import submit_module_build_from_yaml, load_local_builds from module_build_service.db_session import db_session from module_build_service.errors import StreamAmbigous import module_build_service.messaging diff --git a/module_build_service/models.py b/module_build_service/models.py index 7ec35b6d..b99b3bd0 100644 --- a/module_build_service/models.py +++ b/module_build_service/models.py @@ -18,6 +18,7 @@ from sqlalchemy.orm import validates, load_only import module_build_service.messaging from module_build_service import db, log, get_url_for, conf +from module_build_service.common.utils import load_mmd from module_build_service.errors import UnprocessableEntity from module_build_service.scheduler import events @@ -451,8 +452,6 @@ class ModuleBuild(MBSBase): return db_session.query(ModuleBuild).filter_by(koji_tag=tag).first() def mmd(self): - from module_build_service.utils import load_mmd - try: return load_mmd(self.modulemd) except UnprocessableEntity: @@ -503,8 +502,6 @@ class ModuleBuild(MBSBase): :rtype: Contexts :return: Named tuple with build_context, runtime_context and context hashes. """ - from module_build_service.utils.general import load_mmd - try: mmd = load_mmd(mmd_str) except UnprocessableEntity: diff --git a/module_build_service/resolver/DBResolver.py b/module_build_service/resolver/DBResolver.py index ef6db0c8..2b4308eb 100644 --- a/module_build_service/resolver/DBResolver.py +++ b/module_build_service/resolver/DBResolver.py @@ -6,7 +6,7 @@ from module_build_service import log, db from module_build_service.resolver.base import GenericResolver from module_build_service import models from module_build_service.errors import UnprocessableEntity -from module_build_service.utils.general import load_mmd +from module_build_service.common.utils import load_mmd import sqlalchemy diff --git a/module_build_service/resolver/MBSResolver.py b/module_build_service/resolver/MBSResolver.py index a311cc75..89d2718f 100644 --- a/module_build_service/resolver/MBSResolver.py +++ b/module_build_service/resolver/MBSResolver.py @@ -7,9 +7,9 @@ import kobo.rpmlib from module_build_service import conf from module_build_service import models +from module_build_service.common.utils import load_mmd, import_mmd from module_build_service.errors import UnprocessableEntity from module_build_service.resolver.KojiResolver import KojiResolver -from module_build_service.utils.general import import_mmd, load_mmd from module_build_service.utils.request_utils import requests_session log = logging.getLogger() diff --git a/module_build_service/scheduler/handlers/components.py b/module_build_service/scheduler/handlers/components.py index d24966d2..e2440ef3 100644 --- a/module_build_service/scheduler/handlers/components.py +++ b/module_build_service/scheduler/handlers/components.py @@ -8,7 +8,7 @@ import koji from module_build_service import celery_app, conf, models, log from module_build_service.builder import GenericBuilder from module_build_service.common.koji import get_session -from module_build_service.utils.general import mmd_to_str +from module_build_service.common.utils import mmd_to_str from module_build_service.db_session import db_session from module_build_service.scheduler import events from module_build_service.scheduler.batches import continue_batch_build diff --git a/module_build_service/scheduler/handlers/modules.py b/module_build_service/scheduler/handlers/modules.py index 4c94bb6f..d0d1aaa2 100644 --- a/module_build_service/scheduler/handlers/modules.py +++ b/module_build_service/scheduler/handlers/modules.py @@ -3,14 +3,15 @@ """ Handlers for module change events on the message bus. """ from module_build_service import celery_app, conf, models, log, build_logs +from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder +from module_build_service.builder.utils import get_rpm_release +from module_build_service.common.utils import mmd_to_str from module_build_service.common.retry import retry import module_build_service.resolver import module_build_service.utils from module_build_service.utils import ( attempt_to_reuse_all_components, record_component_builds, - get_rpm_release, - generate_koji_tag, record_filtered_rpms, record_module_build_arches ) @@ -25,7 +26,6 @@ from module_build_service.scheduler import events from module_build_service.utils.ursine import handle_stream_collision_modules from requests.exceptions import ConnectionError -from module_build_service.utils import mmd_to_str import koji import six.moves.xmlrpc_client as xmlrpclib @@ -246,7 +246,7 @@ def generate_module_build_koji_tag(build): """ log.info("Getting tag for %s:%s:%s", build.name, build.stream, build.version) if conf.system in ["koji", "test"]: - return generate_koji_tag( + return KojiModuleBuilder.generate_koji_tag( build.name, build.stream, build.version, diff --git a/module_build_service/scm.py b/module_build_service/scm.py index 529b000d..f54f8ae1 100644 --- a/module_build_service/scm.py +++ b/module_build_service/scm.py @@ -17,7 +17,44 @@ from module_build_service.errors import ( UnprocessableEntity, ProgrammingError, ) -from module_build_service.utils.general import scm_url_schemes + + +def scm_url_schemes(terse=False): + """ + Definition of URL schemes supported by both frontend and scheduler. + + NOTE: only git URLs in the following formats are supported atm: + git:// + git+http:// + git+https:// + git+rsync:// + http:// + https:// + file:// + + :param terse=False: Whether to return terse list of unique URL schemes + even without the "://". + """ + + scm_types = { + "git": ( + "git://", + "git+http://", + "git+https://", + "git+rsync://", + "http://", + "https://", + "file://", + ) + } + + if not terse: + return scm_types + else: + scheme_list = [] + for scm_type, scm_schemes in scm_types.items(): + scheme_list.extend([scheme[:-3] for scheme in scm_schemes]) + return list(set(scheme_list)) class SCM(object): diff --git a/module_build_service/utils/__init__.py b/module_build_service/utils/__init__.py index fe548da2..4b880ac8 100644 --- a/module_build_service/utils/__init__.py +++ b/module_build_service/utils/__init__.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- # SPDX-License-Identifier: MIT -from module_build_service.utils.general import * # noqa from module_build_service.utils.mse import * # noqa from module_build_service.utils.views import * # noqa from module_build_service.utils.reuse import * # noqa diff --git a/module_build_service/utils/general.py b/module_build_service/utils/general.py deleted file mode 100644 index 36462733..00000000 --- a/module_build_service/utils/general.py +++ /dev/null @@ -1,721 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: MIT -import os -import inspect -import hashlib -import locale -import contextlib -from datetime import datetime -from functools import partial - -from six import text_type, string_types -from gi.repository.GLib import Error as ModuleMDError - -from module_build_service import conf, log, models, Modulemd -from module_build_service.db_session import db_session -from module_build_service.errors import ValidationError, ProgrammingError, UnprocessableEntity - - -def to_text_type(s): - """ - Converts `s` to `text_type`. In case it fails, returns `s`. - """ - try: - return text_type(s, "utf-8") - except TypeError: - return s - - -def load_mmd(yaml, is_file=False): - if not yaml: - raise UnprocessableEntity('The input modulemd was empty') - - target_mmd_version = Modulemd.ModuleStreamVersionEnum.TWO - try: - if is_file: - mmd = Modulemd.ModuleStream.read_file(yaml, True) - else: - mmd = Modulemd.ModuleStream.read_string(to_text_type(yaml), True) - mmd.validate() - if mmd.get_mdversion() < target_mmd_version: - mmd = mmd.upgrade(target_mmd_version) - elif mmd.get_mdversion() > target_mmd_version: - log.error("Encountered a modulemd file with the version %d", mmd.get_mdversion()) - raise UnprocessableEntity( - "The modulemd version cannot be greater than {}".format(target_mmd_version)) - except ModuleMDError as e: - not_found = False - if is_file: - error = "The modulemd {} is invalid.".format(os.path.basename(yaml)) - if os.path.exists(yaml): - with open(yaml, "rt") as yaml_hdl: - log.debug("Modulemd that failed to load:\n%s", yaml_hdl.read()) - else: - not_found = True - error = "The modulemd file {} was not found.".format(os.path.basename(yaml)) - log.error("The modulemd file %s was not found.", yaml) - else: - error = "The modulemd is invalid." - log.debug("Modulemd that failed to load:\n%s", yaml) - - if "modulemd-error-quark: " in str(e): - error = "{} The error was '{}'.".format( - error, str(e).split("modulemd-error-quark: ")[-1]) - elif "Unknown ModuleStream version" in str(e): - error = ( - "{}. The modulemd version can't be greater than {}." - .format(error, target_mmd_version) - ) - elif not_found is False: - error = "{} Please verify the syntax is correct.".format(error) - - log.exception(error) - raise UnprocessableEntity(error) - - return mmd - - -load_mmd_file = partial(load_mmd, is_file=True) - - -def scm_url_schemes(terse=False): - """ - Definition of URL schemes supported by both frontend and scheduler. - - NOTE: only git URLs in the following formats are supported atm: - git:// - git+http:// - git+https:// - git+rsync:// - http:// - https:// - file:// - - :param terse=False: Whether to return terse list of unique URL schemes - even without the "://". - """ - - scm_types = { - "git": ( - "git://", - "git+http://", - "git+https://", - "git+rsync://", - "http://", - "https://", - "file://", - ) - } - - if not terse: - return scm_types - else: - scheme_list = [] - for scm_type, scm_schemes in scm_types.items(): - scheme_list.extend([scheme[:-3] for scheme in scm_schemes]) - return list(set(scheme_list)) - - -def module_build_state_from_msg(msg): - state = int(msg.module_build_state) - # TODO better handling - assert state in models.BUILD_STATES.values(), "state=%s(%s) is not in %s" % ( - state, - type(state), - list(models.BUILD_STATES.values()), - ) - return state - - -def generate_koji_tag(name, stream, version, context, max_length=256, scratch=False, scratch_id=0): - """Generate a koji tag for a module - - Generally, a module's koji tag is in format ``module-N-S-V-C``. However, if - it is longer than maximum length, old format ``module-hash`` is used. - - :param str name: a module's name - :param str stream: a module's stream - :param str version: a module's version - :param str context: a module's context - :param int max_length: the maximum length the Koji tag can be before - falling back to the old format of "module-". Default is 256 - characters, which is the maximum length of a tag Koji accepts. - :param bool scratch: a flag indicating if the generated tag will be for - a scratch module build - :param int scratch_id: for scratch module builds, a unique build identifier - :return: a Koji tag - :rtype: str - """ - if scratch: - prefix = "scrmod-" - # use unique suffix so same commit can be resubmitted - suffix = "+" + str(scratch_id) - else: - prefix = "module-" - suffix = "" - nsvc_list = [name, stream, str(version), context] - nsvc_tag = prefix + "-".join(nsvc_list) + suffix - if len(nsvc_tag) + len("-build") > max_length: - # Fallback to the old format of 'module-' if the generated koji tag - # name is longer than max_length - nsvc_hash = hashlib.sha1(".".join(nsvc_list).encode("utf-8")).hexdigest()[:16] - return prefix + nsvc_hash + suffix - return nsvc_tag - - -def validate_koji_tag(tag_arg_names, pre="", post="-", dict_key="name"): - """ - Used as a decorator validates koji tag arg(s)' value(s) - against configurable list of koji tag prefixes. - Supported arg value types are: dict, list, str - - :param tag_arg_names: Str or list of parameters to validate. - :param pre: Prepend this optional string (e.g. '.' in case of disttag - validation) to each koji tag prefix. - :param post: Append this string/delimiter ('-' by default) to each koji - tag prefix. - :param dict_key: In case of a dict arg, inspect this key ('name' by default). - """ - - if not isinstance(tag_arg_names, list): - tag_arg_names = [tag_arg_names] - - def validation_decorator(function): - def wrapper(*args, **kwargs): - call_args = inspect.getcallargs(function, *args, **kwargs) - - # if module name is in allowed_privileged_module_names or base_module_names lists - # we don't have to validate it since they could use an arbitrary Koji tag - try: - if call_args['self'].module_str in \ - conf.allowed_privileged_module_names + conf.base_module_names: - # skip validation - return function(*args, **kwargs) - except (AttributeError, KeyError): - pass - - for tag_arg_name in tag_arg_names: - err_subject = "Koji tag validation:" - - # If any of them don't appear in the function, then fail. - if tag_arg_name not in call_args: - raise ProgrammingError( - "{} Inspected argument {} is not within function args." - " The function was: {}.".format( - err_subject, tag_arg_name, function.__name__ - ) - ) - - tag_arg_val = call_args[tag_arg_name] - - # First, check that we have some value - if not tag_arg_val: - raise ValidationError( - "{} Can not validate {}. No value provided.".format( - err_subject, tag_arg_name) - ) - - # If any of them are a dict, then use the provided dict_key - if isinstance(tag_arg_val, dict): - if dict_key not in tag_arg_val: - raise ProgrammingError( - "{} Inspected dict arg {} does not contain {} key." - " The function was: {}.".format( - err_subject, tag_arg_name, dict_key, function.__name__) - ) - tag_list = [tag_arg_val[dict_key]] - elif isinstance(tag_arg_val, list): - tag_list = tag_arg_val - else: - tag_list = [tag_arg_val] - - # Check to make sure the provided values match our whitelist. - for allowed_prefix in conf.koji_tag_prefixes: - if all([t.startswith(pre + allowed_prefix + post) for t in tag_list]): - break - else: - # Only raise this error if the given tags don't start with - # *any* of our allowed prefixes. - raise ValidationError( - "Koji tag validation: {} does not satisfy any of allowed prefixes: {}" - .format(tag_list, [pre + p + post for p in conf.koji_tag_prefixes]) - ) - - # Finally.. after all that validation, call the original function - # and return its value. - return function(*args, **kwargs) - - # We're replacing the original function with our synthetic wrapper, - # but dress it up to make it look more like the original function. - wrapper.__name__ = function.__name__ - wrapper.__doc__ = function.__doc__ - return wrapper - - return validation_decorator - - -def get_rpm_release(db_session, module_build): - """ - Generates the dist tag for the specified module - - :param db_session: SQLAlchemy session object. - :param module_build: a models.ModuleBuild object - :return: a string of the module's dist tag - """ - dist_str = ".".join([ - module_build.name, - module_build.stream, - str(module_build.version), - str(module_build.context), - ]).encode("utf-8") - dist_hash = hashlib.sha1(dist_str).hexdigest()[:8] - - # We need to share the same auto-incrementing index in dist tag between all MSE builds. - # We can achieve that by using the lowest build ID of all the MSE siblings including - # this module build. - mse_build_ids = module_build.siblings(db_session) + [module_build.id or 0] - mse_build_ids.sort() - index = mse_build_ids[0] - try: - buildrequires = module_build.mmd().get_xmd()["mbs"]["buildrequires"] - except (ValueError, KeyError): - log.warning( - "Module build {0} does not have buildrequires in its xmd".format(module_build.id)) - buildrequires = None - - # Determine which buildrequired module will influence the disttag - br_module_marking = "" - # If the buildrequires are recorded in the xmd then we can try to find the base module that - # is buildrequired - if buildrequires: - # Looping through all the non-base modules that are allowed to set the disttag_marking - # and the base modules to see what the disttag marking should be. Doing it this way - # preserves the order in the configurations. - for module in conf.allowed_privileged_module_names + conf.base_module_names: - module_in_xmd = buildrequires.get(module) - - if not module_in_xmd: - continue - - module_obj = models.ModuleBuild.get_build_from_nsvc( - db_session, - module, - module_in_xmd["stream"], - module_in_xmd["version"], - module_in_xmd["context"], - ) - if not module_obj: - continue - - try: - marking = module_obj.mmd().get_xmd()["mbs"]["disttag_marking"] - # We must check for a KeyError because a Variant object doesn't support the `get` - # method - except KeyError: - if module not in conf.base_module_names: - continue - # If we've made it past all the modules in - # conf.allowed_privileged_module_names, and the base module doesn't have - # the disttag_marking set, then default to the stream of the first base module - marking = module_obj.stream - br_module_marking = marking + "+" - break - else: - log.warning( - "Module build {0} does not buildrequire a base module ({1})".format( - module_build.id, " or ".join(conf.base_module_names)) - ) - - # use alternate prefix for scratch module build components so they can be identified - prefix = "scrmod+" if module_build.scratch else conf.default_dist_tag_prefix - - return "{prefix}{base_module_marking}{index}+{dist_hash}".format( - prefix=prefix, base_module_marking=br_module_marking, index=index, dist_hash=dist_hash - ) - - -def create_dogpile_key_generator_func(skip_first_n_args=0): - """ - Creates dogpile key_generator function with additional features: - - - when models.ModuleBuild is an argument of method cached by dogpile-cache, - the ModuleBuild.id is used as a key. Therefore it is possible to cache - data per particular module build, while normally, it would be per - ModuleBuild.__str__() output, which contains also batch and other data - which changes during the build of a module. - - it is able to skip first N arguments of a cached method. This is useful - when the db.session is part of cached method call, and the caching should - work no matter what session instance is passed to cached method argument. - """ - - def key_generator(namespace, fn): - fname = fn.__name__ - - def generate_key(*arg, **kwarg): - key_template = fname + "_" - for s in arg[skip_first_n_args:]: - if type(s) == models.ModuleBuild: - key_template += str(s.id) - else: - key_template += str(s) + "_" - return key_template - - return generate_key - - return key_generator - - -def import_mmd(db_session, mmd, check_buildrequires=True): - """ - Imports new module build defined by `mmd` to MBS database using `session`. - If it already exists, it is updated. - - The ModuleBuild.koji_tag is set according to xmd['mbs]['koji_tag']. - The ModuleBuild.state is set to "ready". - The ModuleBuild.rebuild_strategy is set to "all". - The ModuleBuild.owner is set to "mbs_import". - - :param db_session: SQLAlchemy session object. - :param mmd: module metadata being imported into database. - :type mmd: Modulemd.ModuleStream - :param bool check_buildrequires: When True, checks that the buildrequires defined in the MMD - have matching records in the `mmd["xmd"]["mbs"]["buildrequires"]` and also fills in - the `ModuleBuild.buildrequires` according to this data. - :return: module build (ModuleBuild), - log messages collected during import (list) - :rtype: tuple - """ - xmd = mmd.get_xmd() - # Set some defaults in xmd["mbs"] if they're not provided by the user - if "mbs" not in xmd: - xmd["mbs"] = {"mse": True} - - if not mmd.get_context(): - mmd.set_context(models.DEFAULT_MODULE_CONTEXT) - - # NSVC is used for logging purpose later. - nsvc = mmd.get_nsvc() - if nsvc is None: - msg = "Both the name and stream must be set for the modulemd being imported." - log.error(msg) - raise UnprocessableEntity(msg) - - name = mmd.get_module_name() - stream = mmd.get_stream_name() - version = str(mmd.get_version()) - context = mmd.get_context() - - xmd_mbs = xmd["mbs"] - - disttag_marking = xmd_mbs.get("disttag_marking") - - # If it is a base module, then make sure the value that will be used in the RPM disttags - # doesn't contain a dash since a dash isn't allowed in the release field of the NVR - if name in conf.base_module_names: - if disttag_marking and "-" in disttag_marking: - msg = "The disttag_marking cannot contain a dash" - log.error(msg) - raise UnprocessableEntity(msg) - if not disttag_marking and "-" in stream: - msg = "The stream cannot contain a dash unless disttag_marking is set" - log.error(msg) - raise UnprocessableEntity(msg) - - virtual_streams = xmd_mbs.get("virtual_streams", []) - - # Verify that the virtual streams are the correct type - if virtual_streams and ( - not isinstance(virtual_streams, list) - or any(not isinstance(vs, string_types) for vs in virtual_streams) - ): - msg = "The virtual streams must be a list of strings" - log.error(msg) - raise UnprocessableEntity(msg) - - if check_buildrequires: - deps = mmd.get_dependencies() - if len(deps) > 1: - raise UnprocessableEntity( - "The imported module's dependencies list should contain just one element") - - if "buildrequires" not in xmd_mbs: - # Always set buildrequires if it is not there, because - # get_buildrequired_base_modules requires xmd/mbs/buildrequires exists. - xmd_mbs["buildrequires"] = {} - mmd.set_xmd(xmd) - - if len(deps) > 0: - brs = set(deps[0].get_buildtime_modules()) - xmd_brs = set(xmd_mbs["buildrequires"].keys()) - if brs - xmd_brs: - raise UnprocessableEntity( - "The imported module buildrequires other modules, but the metadata in the " - 'xmd["mbs"]["buildrequires"] dictionary is missing entries' - ) - - if "koji_tag" not in xmd_mbs: - log.warning("'koji_tag' is not set in xmd['mbs'] for module {}".format(nsvc)) - log.warning("koji_tag will be set to None for imported module build.") - - # Log messages collected during import - msgs = [] - - # Get the ModuleBuild from DB. - build = models.ModuleBuild.get_build_from_nsvc(db_session, name, stream, version, context) - if build: - msg = "Updating existing module build {}.".format(nsvc) - log.info(msg) - msgs.append(msg) - else: - build = models.ModuleBuild() - db_session.add(build) - - build.name = name - build.stream = stream - build.version = version - build.koji_tag = xmd_mbs.get("koji_tag") - build.state = models.BUILD_STATES["ready"] - build.modulemd = mmd_to_str(mmd) - build.context = context - build.owner = "mbs_import" - build.rebuild_strategy = "all" - now = datetime.utcnow() - build.time_submitted = now - build.time_modified = now - build.time_completed = now - if build.name in conf.base_module_names: - build.stream_version = models.ModuleBuild.get_stream_version(stream) - - # Record the base modules this module buildrequires - if check_buildrequires: - for base_module in build.get_buildrequired_base_modules(db_session): - if base_module not in build.buildrequires: - build.buildrequires.append(base_module) - - build.update_virtual_streams(db_session, virtual_streams) - - db_session.commit() - - msg = "Module {} imported".format(nsvc) - log.info(msg) - msgs.append(msg) - - return build, msgs - - -def import_fake_base_module(nsvc): - """ - Creates and imports new fake base module to be used with offline local builds. - - :param str nsvc: name:stream:version:context of a module. - """ - name, stream, version, context = nsvc.split(":") - mmd = Modulemd.ModuleStreamV2.new(name, stream) - mmd.set_version(int(version)) - mmd.set_context(context) - mmd.set_summary("fake base module") - mmd.set_description("fake base module") - mmd.add_module_license("GPL") - - buildroot = Modulemd.Profile.new("buildroot") - for rpm in conf.default_buildroot_packages: - buildroot.add_rpm(rpm) - mmd.add_profile(buildroot) - - srpm_buildroot = Modulemd.Profile.new("srpm-buildroot") - for rpm in conf.default_srpm_buildroot_packages: - srpm_buildroot.add_rpm(rpm) - mmd.add_profile(srpm_buildroot) - - xmd = {"mbs": {}} - xmd_mbs = xmd["mbs"] - xmd_mbs["buildrequires"] = {} - xmd_mbs["requires"] = {} - xmd_mbs["commit"] = "ref_%s" % context - xmd_mbs["mse"] = "true" - # Use empty "repofile://" URI for base module. The base module will use the - # `conf.base_module_names` list as list of default repositories. - xmd_mbs["koji_tag"] = "repofile://" - mmd.set_xmd(xmd) - - import_mmd(db_session, mmd, False) - - -def get_local_releasever(): - """ - Returns the $releasever variable used in the system when expanding .repo files. - """ - # Import DNF here to not force it as a hard MBS dependency. - import dnf - - dnf_base = dnf.Base() - return dnf_base.conf.releasever - - -def import_builds_from_local_dnf_repos(platform_id=None): - """ - Imports the module builds from all available local repositories to MBS DB. - - This is used when building modules locally without any access to MBS infra. - This method also generates and imports the base module according to /etc/os-release. - - :param str platform_id: The `name:stream` of a fake platform module to generate in this - method. When not set, the /etc/os-release is parsed to get the PLATFORM_ID. - """ - # Import DNF here to not force it as a hard MBS dependency. - import dnf - - log.info("Loading available RPM repositories.") - dnf_base = dnf.Base() - dnf_base.read_all_repos() - - log.info("Importing available modules to MBS local database.") - for repo in dnf_base.repos.values(): - try: - repo.load() - except Exception as e: - log.warning(str(e)) - continue - mmd_data = repo.get_metadata_content("modules") - mmd_index = Modulemd.ModuleIndex.new() - ret, _ = mmd_index.update_from_string(mmd_data, True) - if not ret: - log.warning("Loading the repo '%s' failed", repo.name) - continue - - for module_name in mmd_index.get_module_names(): - for mmd in mmd_index.get_module(module_name).get_all_streams(): - xmd = mmd.get_xmd() - xmd["mbs"] = {} - xmd["mbs"]["koji_tag"] = "repofile://" + repo.repofile - xmd["mbs"]["mse"] = True - xmd["mbs"]["commit"] = "unknown" - mmd.set_xmd(xmd) - - import_mmd(db_session, mmd, False) - - if not platform_id: - # Parse the /etc/os-release to find out the local platform:stream. - with open("/etc/os-release", "r") as fd: - for l in fd.readlines(): - if not l.startswith("PLATFORM_ID"): - continue - platform_id = l.split("=")[1].strip("\"' \n") - if not platform_id: - raise ValueError("Cannot get PLATFORM_ID from /etc/os-release.") - - # Create the fake platform:stream:1:000000 module to fulfill the - # dependencies for local offline build and also to define the - # srpm-buildroot and buildroot. - import_fake_base_module("%s:1:000000" % platform_id) - - -def get_build_arches(mmd, config): - """ - Returns the list of architectures for which the module `mmd` should be built. - - :param mmd: Module MetaData - :param config: config (module_build_service.config.Config instance) - :return list of architectures - """ - # Imported here to allow import of utils in GenericBuilder. - from module_build_service.builder import GenericBuilder - - nsvc = mmd.get_nsvc() - - # At first, handle BASE_MODULE_ARCHES - this overrides any other option. - # Find out the base modules in buildrequires section of XMD and - # set the Koji tag arches according to it. - if "mbs" in mmd.get_xmd(): - for req_name, req_data in mmd.get_xmd()["mbs"]["buildrequires"].items(): - ns = ":".join([req_name, req_data["stream"]]) - if ns in config.base_module_arches: - arches = config.base_module_arches[ns] - log.info("Setting build arches of %s to %r based on the BASE_MODULE_ARCHES." % ( - nsvc, arches)) - return arches - - # Check whether the module contains the `koji_tag_arches`. This is used only - # by special modules defining the layered products. - try: - arches = mmd.get_xmd()["mbs"]["koji_tag_arches"] - log.info("Setting build arches of %s to %r based on the koji_tag_arches." % ( - nsvc, arches)) - return arches - except KeyError: - pass - - # Check the base/layered-product module this module buildrequires and try to get the - # list of arches from there. - try: - buildrequires = mmd.get_xmd()["mbs"]["buildrequires"] - except (ValueError, KeyError): - log.warning( - "Module {0} does not have buildrequires in its xmd".format(mmd.get_nsvc())) - buildrequires = None - if buildrequires: - # Looping through all the privileged modules that are allowed to set koji tag arches - # and the base modules to see what the koji tag arches should be. Doing it this way - # preserves the order in the configurations. - for module in conf.allowed_privileged_module_names + conf.base_module_names: - module_in_xmd = buildrequires.get(module) - - if not module_in_xmd: - continue - - module_obj = models.ModuleBuild.get_build_from_nsvc( - db_session, - module, - module_in_xmd["stream"], - module_in_xmd["version"], - module_in_xmd["context"], - ) - if not module_obj: - continue - arches = GenericBuilder.get_module_build_arches(module_obj) - if arches: - log.info("Setting build arches of %s to %r based on the buildrequired " - "module %r." % (nsvc, arches, module_obj)) - return arches - - # As a last resort, return just the preconfigured list of arches. - arches = config.arches - log.info("Setting build arches of %s to %r based on default ARCHES." % (nsvc, arches)) - return arches - - -def deps_to_dict(deps, deps_type): - """ - Helper method to convert a Modulemd.Dependencies object to a dictionary. - - :param Modulemd.Dependencies deps: the Modulemd.Dependencies object to convert - :param str deps_type: the type of dependency (buildtime or runtime) - :return: a dictionary with the keys as module names and values as a list of strings - :rtype dict - """ - names_func = getattr(deps, 'get_{}_modules'.format(deps_type)) - streams_func = getattr(deps, 'get_{}_streams'.format(deps_type)) - return { - module: streams_func(module) - for module in names_func() - } - - -def mmd_to_str(mmd): - """ - Helper method to convert a Modulemd.ModuleStream object to a YAML string. - - :param Modulemd.ModuleStream mmd: the modulemd to convert - :return: the YAML string of the modulemd - :rtype: str - """ - index = Modulemd.ModuleIndex() - index.add_module_stream(mmd) - return to_text_type(index.dump_to_string()) - - -@contextlib.contextmanager -def set_locale(*args, **kwargs): - saved = locale.setlocale(locale.LC_ALL) - yield locale.setlocale(*args, **kwargs) - locale.setlocale(locale.LC_ALL, saved) diff --git a/module_build_service/utils/mse.py b/module_build_service/utils/mse.py index ea460f8e..420f9d3a 100644 --- a/module_build_service/utils/mse.py +++ b/module_build_service/utils/mse.py @@ -1,10 +1,11 @@ # -*- coding: utf-8 -*- # SPDX-License-Identifier: MIT from module_build_service import log, models, Modulemd, conf +from module_build_service.common.utils import mmd_to_str from module_build_service.errors import StreamAmbigous from module_build_service.errors import UnprocessableEntity from module_build_service.mmd_resolver import MMDResolver -from module_build_service.utils.general import deps_to_dict, mmd_to_str +from module_build_service.web.utils import deps_to_dict from module_build_service.resolver import GenericResolver diff --git a/module_build_service/utils/submit.py b/module_build_service/utils/submit.py index 7c279a09..0c141a59 100644 --- a/module_build_service/utils/submit.py +++ b/module_build_service/utils/submit.py @@ -17,12 +17,84 @@ from gi.repository import GLib import module_build_service.scm from module_build_service import conf, log, models, Modulemd +from module_build_service.common.utils import load_mmd, load_mmd_file, mmd_to_str, to_text_type from module_build_service.db_session import db_session from module_build_service.errors import ValidationError, UnprocessableEntity, Forbidden, Conflict -from module_build_service.utils import ( - to_text_type, deps_to_dict, mmd_to_str, load_mmd, load_mmd_file, - get_build_arches -) +from module_build_service.web.utils import deps_to_dict + + +def get_build_arches(mmd, config): + """ + Returns the list of architectures for which the module `mmd` should be built. + + :param mmd: Module MetaData + :param config: config (module_build_service.config.Config instance) + :return list of architectures + """ + # Imported here to allow import of utils in GenericBuilder. + from module_build_service.builder import GenericBuilder + + nsvc = mmd.get_nsvc() + + # At first, handle BASE_MODULE_ARCHES - this overrides any other option. + # Find out the base modules in buildrequires section of XMD and + # set the Koji tag arches according to it. + if "mbs" in mmd.get_xmd(): + for req_name, req_data in mmd.get_xmd()["mbs"]["buildrequires"].items(): + ns = ":".join([req_name, req_data["stream"]]) + if ns in config.base_module_arches: + arches = config.base_module_arches[ns] + log.info("Setting build arches of %s to %r based on the BASE_MODULE_ARCHES." % ( + nsvc, arches)) + return arches + + # Check whether the module contains the `koji_tag_arches`. This is used only + # by special modules defining the layered products. + try: + arches = mmd.get_xmd()["mbs"]["koji_tag_arches"] + log.info("Setting build arches of %s to %r based on the koji_tag_arches." % ( + nsvc, arches)) + return arches + except KeyError: + pass + + # Check the base/layered-product module this module buildrequires and try to get the + # list of arches from there. + try: + buildrequires = mmd.get_xmd()["mbs"]["buildrequires"] + except (ValueError, KeyError): + log.warning( + "Module {0} does not have buildrequires in its xmd".format(mmd.get_nsvc())) + buildrequires = None + if buildrequires: + # Looping through all the privileged modules that are allowed to set koji tag arches + # and the base modules to see what the koji tag arches should be. Doing it this way + # preserves the order in the configurations. + for module in conf.allowed_privileged_module_names + conf.base_module_names: + module_in_xmd = buildrequires.get(module) + + if not module_in_xmd: + continue + + module_obj = models.ModuleBuild.get_build_from_nsvc( + db_session, + module, + module_in_xmd["stream"], + module_in_xmd["version"], + module_in_xmd["context"], + ) + if not module_obj: + continue + arches = GenericBuilder.get_module_build_arches(module_obj) + if arches: + log.info("Setting build arches of %s to %r based on the buildrequired " + "module %r." % (nsvc, arches, module_obj)) + return arches + + # As a last resort, return just the preconfigured list of arches. + arches = config.arches + log.info("Setting build arches of %s to %r based on default ARCHES." % (nsvc, arches)) + return arches def record_module_build_arches(mmd, build): diff --git a/module_build_service/utils/views.py b/module_build_service/utils/views.py index c4b32e96..e9f7604c 100644 --- a/module_build_service/utils/views.py +++ b/module_build_service/utils/views.py @@ -12,7 +12,7 @@ import sqlalchemy from module_build_service import models, api_version, conf, db from module_build_service.errors import ValidationError, NotFound -from .general import scm_url_schemes +from module_build_service.scm import scm_url_schemes def get_scm_url_re(): diff --git a/module_build_service/views.py b/module_build_service/views.py index e0a4af79..80895a98 100644 --- a/module_build_service/views.py +++ b/module_build_service/views.py @@ -13,13 +13,13 @@ from io import BytesIO from prometheus_client import generate_latest, CONTENT_TYPE_LATEST from module_build_service import app, conf, log, models, db, version, api_version as max_api_version +from module_build_service.common.utils import import_mmd from module_build_service.utils import ( cors_header, fetch_mmd, filter_component_builds, filter_module_builds, get_scm_url_re, - import_mmd, pagination_metadata, str_to_bool, submit_module_build_from_scm, diff --git a/module_build_service/web/__init__.py b/module_build_service/web/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/module_build_service/web/utils.py b/module_build_service/web/utils.py new file mode 100644 index 00000000..db0fd9d7 --- /dev/null +++ b/module_build_service/web/utils.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: MIT + + +def deps_to_dict(deps, deps_type): + """ + Helper method to convert a Modulemd.Dependencies object to a dictionary. + + :param Modulemd.Dependencies deps: the Modulemd.Dependencies object to convert + :param str deps_type: the type of dependency (buildtime or runtime) + :return: a dictionary with the keys as module names and values as a list of strings + :rtype dict + """ + names_func = getattr(deps, 'get_{}_modules'.format(deps_type)) + streams_func = getattr(deps, 'get_{}_streams'.format(deps_type)) + return { + module: streams_func(module) + for module in names_func() + } diff --git a/tests/__init__.py b/tests/__init__.py index f8b8d062..e1ca19d1 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -8,12 +8,13 @@ from six import string_types import time import hashlib from traceback import extract_stack -from module_build_service.utils import to_text_type, load_mmd +from module_build_service.common.utils import load_mmd, import_mmd import koji import module_build_service from module_build_service import db -from module_build_service.utils import get_rpm_release, import_mmd, mmd_to_str +from module_build_service.builder.utils import get_rpm_release +from module_build_service.common.utils import mmd_to_str, to_text_type from module_build_service.models import ( ModuleBuild, ModuleArch, ComponentBuild, VirtualStream, BUILD_STATES, diff --git a/tests/conftest.py b/tests/conftest.py index 2e861abd..e1930395 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,10 +7,10 @@ import pytest from datetime import datetime import module_build_service - +from module_build_service.builder.utils import get_rpm_release +from module_build_service.common.utils import load_mmd, mmd_to_str from module_build_service.models import BUILD_STATES from module_build_service.db_session import db_session -from module_build_service.utils.general import mmd_to_str, load_mmd, get_rpm_release from tests import clean_database, read_staged_data, module_build_from_modulemd BASE_DIR = os.path.dirname(__file__) diff --git a/tests/test_build/test_build.py b/tests/test_build/test_build.py index dac994a2..70beb907 100644 --- a/tests/test_build/test_build.py +++ b/tests/test_build/test_build.py @@ -13,6 +13,8 @@ import hashlib import moksha.hub import fedmsg +from module_build_service.builder.utils import get_rpm_release +from module_build_service.common.utils import load_mmd, import_mmd import module_build_service.messaging import module_build_service.scheduler.consumer import module_build_service.scheduler.handlers.repos @@ -35,6 +37,7 @@ import json import itertools from module_build_service.builder import GenericBuilder +from module_build_service.builder.utils import validate_koji_tag from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder from tests import clean_database, read_staged_data, staged_data_filename @@ -180,7 +183,7 @@ class FakeModuleBuilder(GenericBuilder): on_buildroot_add_repos_cb = None on_get_task_info_cb = None - @module_build_service.utils.validate_koji_tag("tag_name") + @validate_koji_tag("tag_name") def __init__(self, db_session, owner, module, config, tag_name, components): self.db_session = db_session self.module_str = module @@ -347,8 +350,7 @@ class FakeModuleBuilder(GenericBuilder): def recover_orphaned_artifact(self, component_build): if self.INSTANT_COMPLETE: - disttag = module_build_service.utils.get_rpm_release( - self.db_session, component_build.module_build) + disttag = get_rpm_release(self.db_session, component_build.module_build) # We don't know the version or release, so just use a random one here nvr = "{0}-1.0-1.{1}".format(component_build.package, disttag) component_build.state = koji.BUILD_STATES["COMPLETE"] @@ -1838,10 +1840,8 @@ class TestBuild(BaseTestBuild): Test that when a build is submitted with a buildrequire without a Koji tag, MBS doesn't supply it as a dependency to the builder. """ - metadata_mmd = module_build_service.utils.load_mmd( - read_staged_data("build_metadata_module") - ) - module_build_service.utils.import_mmd(db_session, metadata_mmd) + metadata_mmd = load_mmd(read_staged_data("build_metadata_module")) + import_mmd(db_session, metadata_mmd) FakeSCM( mocked_scm, diff --git a/tests/test_builder/test_builder_utils.py b/tests/test_builder/test_builder_utils.py index 411d4ccc..22a613ae 100644 --- a/tests/test_builder/test_builder_utils.py +++ b/tests/test_builder/test_builder_utils.py @@ -3,122 +3,351 @@ import tempfile import shutil -from mock import patch, Mock, call +from mock import call, MagicMock, Mock, patch, PropertyMock +import pytest -from module_build_service import conf +from module_build_service import conf, models from module_build_service.builder import utils +from module_build_service.db_session import db_session +from module_build_service.errors import ProgrammingError, ValidationError +from module_build_service.common.utils import load_mmd, import_mmd, mmd_to_str +from tests import init_data, read_staged_data, scheduler_init_data -class TestBuilderUtils: - @patch("requests.get") - @patch("koji.ClientSession") - @patch("module_build_service.builder.utils.execute_cmd") - def test_create_local_repo_from_koji_tag(self, mock_exec_cmd, mock_koji_session, mock_get): - session = Mock() - rpms = [ - { - "arch": "src", - "build_id": 875991, - "name": "module-build-macros", - "release": "1.module_92011fe6", - "size": 6890, - "version": "0.1", - }, - { - "arch": "noarch", - "build_id": 875991, - "name": "module-build-macros", - "release": "1.module_92011fe6", - "size": 6890, - "version": "0.1", - }, - { - "arch": "x86_64", - "build_id": 875636, - "name": "ed-debuginfo", - "release": "2.module_bd6e0eb1", - "size": 81438, - "version": "1.14.1", - }, - { - "arch": "x86_64", - "build_id": 875636, - "name": "ed", - "release": "2.module_bd6e0eb1", - "size": 80438, - "version": "1.14.1", - }, - { - "arch": "x86_64", - "build_id": 875640, - "name": "mksh-debuginfo", - "release": "2.module_bd6e0eb1", - "size": 578774, - "version": "54", - }, - { - "arch": "x86_64", - "build_id": 875640, - "name": "mksh", - "release": "2.module_bd6e0eb1", - "size": 267042, - "version": "54", - }, - ] +@patch("requests.get") +@patch("koji.ClientSession") +@patch("module_build_service.builder.utils.execute_cmd") +def test_create_local_repo_from_koji_tag(mock_exec_cmd, mock_koji_session, mock_get): + session = Mock() + rpms = [ + { + "arch": "src", + "build_id": 875991, + "name": "module-build-macros", + "release": "1.module_92011fe6", + "size": 6890, + "version": "0.1", + }, + { + "arch": "noarch", + "build_id": 875991, + "name": "module-build-macros", + "release": "1.module_92011fe6", + "size": 6890, + "version": "0.1", + }, + { + "arch": "x86_64", + "build_id": 875636, + "name": "ed-debuginfo", + "release": "2.module_bd6e0eb1", + "size": 81438, + "version": "1.14.1", + }, + { + "arch": "x86_64", + "build_id": 875636, + "name": "ed", + "release": "2.module_bd6e0eb1", + "size": 80438, + "version": "1.14.1", + }, + { + "arch": "x86_64", + "build_id": 875640, + "name": "mksh-debuginfo", + "release": "2.module_bd6e0eb1", + "size": 578774, + "version": "54", + }, + { + "arch": "x86_64", + "build_id": 875640, + "name": "mksh", + "release": "2.module_bd6e0eb1", + "size": 267042, + "version": "54", + }, + ] - builds = [ - { - "build_id": 875640, - "name": "mksh", - "release": "2.module_bd6e0eb1", - "version": "54", - "volume_name": "prod", - }, - { - "build_id": 875636, - "name": "ed", - "release": "2.module_bd6e0eb1", - "version": "1.14.1", - "volume_name": "prod", - }, - { - "build_id": 875991, - "name": "module-build-macros", - "release": "1.module_92011fe6", - "version": "0.1", - "volume_name": "prod", - }, - ] + builds = [ + { + "build_id": 875640, + "name": "mksh", + "release": "2.module_bd6e0eb1", + "version": "54", + "volume_name": "prod", + }, + { + "build_id": 875636, + "name": "ed", + "release": "2.module_bd6e0eb1", + "version": "1.14.1", + "volume_name": "prod", + }, + { + "build_id": 875991, + "name": "module-build-macros", + "release": "1.module_92011fe6", + "version": "0.1", + "volume_name": "prod", + }, + ] - session.listTaggedRPMS.return_value = (rpms, builds) - session.opts = {"topurl": "https://kojipkgs.stg.fedoraproject.org/"} - mock_koji_session.return_value = session + session.listTaggedRPMS.return_value = (rpms, builds) + session.opts = {"topurl": "https://kojipkgs.stg.fedoraproject.org/"} + mock_koji_session.return_value = session - tag = "module-testmodule-master-20170405123740-build" - temp_dir = tempfile.mkdtemp() - try: - utils.create_local_repo_from_koji_tag(conf, tag, temp_dir) - finally: - shutil.rmtree(temp_dir) + tag = "module-testmodule-master-20170405123740-build" + temp_dir = tempfile.mkdtemp() + try: + utils.create_local_repo_from_koji_tag(conf, tag, temp_dir) + finally: + shutil.rmtree(temp_dir) - url_one = ( - "https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/module-build-macros/" - "0.1/1.module_92011fe6/noarch/module-build-macros-0.1-1.module_92011fe6.noarch.rpm" - ) - url_two = ( - "https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/ed/1.14.1/" - "2.module_bd6e0eb1/x86_64/ed-1.14.1-2.module_bd6e0eb1.x86_64.rpm" - ) - url_three = ( - "https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/mksh/54/" - "2.module_bd6e0eb1/x86_64/mksh-54-2.module_bd6e0eb1.x86_64.rpm" - ) + url_one = ( + "https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/module-build-macros/" + "0.1/1.module_92011fe6/noarch/module-build-macros-0.1-1.module_92011fe6.noarch.rpm" + ) + url_two = ( + "https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/ed/1.14.1/" + "2.module_bd6e0eb1/x86_64/ed-1.14.1-2.module_bd6e0eb1.x86_64.rpm" + ) + url_three = ( + "https://kojipkgs.stg.fedoraproject.org//vol/prod/packages/mksh/54/" + "2.module_bd6e0eb1/x86_64/mksh-54-2.module_bd6e0eb1.x86_64.rpm" + ) - expected_calls = [ - call(url_one, stream=True, timeout=60), - call(url_two, stream=True, timeout=60), - call(url_three, stream=True, timeout=60), - ] - for expected_call in expected_calls: - assert expected_call in mock_get.call_args_list - assert len(mock_get.call_args_list) == len(expected_calls) + expected_calls = [ + call(url_one, stream=True, timeout=60), + call(url_two, stream=True, timeout=60), + call(url_three, stream=True, timeout=60), + ] + for expected_call in expected_calls: + assert expected_call in mock_get.call_args_list + assert len(mock_get.call_args_list) == len(expected_calls) + + +def test_validate_koji_tag_wrong_tag_arg_during_programming(): + """ Test that we fail on a wrong param name (non-existing one) due to + programming error. """ + + @utils.validate_koji_tag("wrong_tag_arg") + def validate_koji_tag_programming_error(good_tag_arg, other_arg): + pass + + with pytest.raises(ProgrammingError): + validate_koji_tag_programming_error("dummy", "other_val") + + +def test_validate_koji_tag_bad_tag_value(): + """ Test that we fail on a bad tag value. """ + + @utils.validate_koji_tag("tag_arg") + def validate_koji_tag_bad_tag_value(tag_arg): + pass + + with pytest.raises(ValidationError): + validate_koji_tag_bad_tag_value("forbiddentagprefix-foo") + + +def test_validate_koji_tag_bad_tag_value_in_list(): + """ Test that we fail on a list containing bad tag value. """ + + @utils.validate_koji_tag("tag_arg") + def validate_koji_tag_bad_tag_value_in_list(tag_arg): + pass + + with pytest.raises(ValidationError): + validate_koji_tag_bad_tag_value_in_list(["module-foo", "forbiddentagprefix-bar"]) + + +def test_validate_koji_tag_good_tag_value(): + """ Test that we pass on a good tag value. """ + + @utils.validate_koji_tag("tag_arg") + def validate_koji_tag_good_tag_value(tag_arg): + return True + + assert validate_koji_tag_good_tag_value("module-foo") is True + + +def test_validate_koji_tag_good_tag_values_in_list(): + """ Test that we pass on a list of good tag values. """ + + @utils.validate_koji_tag("tag_arg") + def validate_koji_tag_good_tag_values_in_list(tag_arg): + return True + + assert validate_koji_tag_good_tag_values_in_list(["module-foo", "module-bar"]) is True + + +def test_validate_koji_tag_good_tag_value_in_dict(): + """ Test that we pass on a dict arg with default key + and a good value. """ + + @utils.validate_koji_tag("tag_arg") + def validate_koji_tag_good_tag_value_in_dict(tag_arg): + return True + + assert validate_koji_tag_good_tag_value_in_dict({"name": "module-foo"}) is True + + +def test_validate_koji_tag_good_tag_value_in_dict_nondefault_key(): + """ Test that we pass on a dict arg with non-default key + and a good value. """ + + @utils.validate_koji_tag("tag_arg", dict_key="nondefault") + def validate_koji_tag_good_tag_value_in_dict_nondefault_key(tag_arg): + return True + + assert ( + validate_koji_tag_good_tag_value_in_dict_nondefault_key({"nondefault": "module-foo"}) + is True + ) + + +def test_validate_koji_tag_double_trouble_good(): + """ Test that we pass on a list of tags that are good. """ + + expected = "foo" + + @utils.validate_koji_tag(["tag_arg1", "tag_arg2"]) + def validate_koji_tag_double_trouble(tag_arg1, tag_arg2): + return expected + + actual = validate_koji_tag_double_trouble("module-1", "module-2") + assert actual == expected + + +def test_validate_koji_tag_double_trouble_bad(): + """ Test that we fail on a list of tags that are bad. """ + + @utils.validate_koji_tag(["tag_arg1", "tag_arg2"]) + def validate_koji_tag_double_trouble(tag_arg1, tag_arg2): + pass + + with pytest.raises(ValidationError): + validate_koji_tag_double_trouble("module-1", "BADNEWS-2") + + +def test_validate_koji_tag_is_None(): + """ Test that we fail on a tag which is None. """ + + @utils.validate_koji_tag("tag_arg") + def validate_koji_tag_is_None(tag_arg): + pass + + with pytest.raises(ValidationError) as cm: + validate_koji_tag_is_None(None) + assert str(cm.value).endswith(" No value provided.") is True + + +@patch( + "module_build_service.config.Config.allowed_privileged_module_names", + new_callable=PropertyMock, + return_value=["testmodule"], +) +def test_validate_koji_tag_previleged_module_name(conf_apmn): + @utils.validate_koji_tag("tag_arg") + def validate_koji_tag_priv_mod_name(self, tag_arg): + pass + + builder = MagicMock() + builder.module_str = 'testmodule' + validate_koji_tag_priv_mod_name(builder, "abc") + + +def test_get_rpm_release_mse(): + init_data(contexts=True) + + build_one = models.ModuleBuild.get_by_id(db_session, 2) + release_one = utils.get_rpm_release(db_session, build_one) + assert release_one == "module+2+b8645bbb" + + build_two = models.ModuleBuild.get_by_id(db_session, 3) + release_two = utils.get_rpm_release(db_session, build_two) + assert release_two == "module+2+17e35784" + + +def test_get_rpm_release_platform_stream(): + scheduler_init_data(1) + build_one = models.ModuleBuild.get_by_id(db_session, 2) + release = utils.get_rpm_release(db_session, build_one) + assert release == "module+f28+2+814cfa39" + + +def test_get_rpm_release_platform_stream_override(): + scheduler_init_data(1) + + # Set the disttag_marking override on the platform + platform = ( + db_session.query(models.ModuleBuild) + .filter_by(name="platform", stream="f28") + .first() + ) + platform_mmd = platform.mmd() + platform_xmd = platform_mmd.get_xmd() + platform_xmd["mbs"]["disttag_marking"] = "fedora28" + platform_mmd.set_xmd(platform_xmd) + platform.modulemd = mmd_to_str(platform_mmd) + db_session.add(platform) + db_session.commit() + + build_one = models.ModuleBuild.get_by_id(db_session, 2) + release = utils.get_rpm_release(db_session, build_one) + assert release == "module+fedora28+2+814cfa39" + + +@patch( + "module_build_service.config.Config.allowed_privileged_module_names", + new_callable=PropertyMock, + return_value=["build"], +) +def test_get_rpm_release_metadata_br_stream_override(mock_admmn): + """ + Test that when a module buildrequires a module in conf.allowed_privileged_module_names, + and that module has the xmd.mbs.disttag_marking field set, it should influence the disttag. + """ + scheduler_init_data(1) + metadata_mmd = load_mmd(read_staged_data("build_metadata_module")) + import_mmd(db_session, metadata_mmd) + + build_one = models.ModuleBuild.get_by_id(db_session, 2) + mmd = build_one.mmd() + deps = mmd.get_dependencies()[0] + deps.add_buildtime_stream("build", "product1.2") + xmd = mmd.get_xmd() + xmd["mbs"]["buildrequires"]["build"] = { + "filtered_rpms": [], + "ref": "virtual", + "stream": "product1.2", + "version": "1", + "context": "00000000", + } + mmd.set_xmd(xmd) + build_one.modulemd = mmd_to_str(mmd) + db_session.add(build_one) + db_session.commit() + + release = utils.get_rpm_release(db_session, build_one) + assert release == "module+product12+2+814cfa39" + + +def test_get_rpm_release_mse_scratch(): + init_data(contexts=True, scratch=True) + + build_one = models.ModuleBuild.get_by_id(db_session, 2) + release_one = utils.get_rpm_release(db_session, build_one) + assert release_one == "scrmod+2+b8645bbb" + + build_two = models.ModuleBuild.get_by_id(db_session, 3) + release_two = utils.get_rpm_release(db_session, build_two) + assert release_two == "scrmod+2+17e35784" + + +def test_get_rpm_release_platform_stream_scratch(): + scheduler_init_data(1, scratch=True) + build_one = models.ModuleBuild.get_by_id(db_session, 2) + release = utils.get_rpm_release(db_session, build_one) + assert release == "scrmod+f28+2+814cfa39" diff --git a/tests/test_builder/test_koji.py b/tests/test_builder/test_koji.py index 27ff190c..f3763565 100644 --- a/tests/test_builder/test_koji.py +++ b/tests/test_builder/test_koji.py @@ -14,11 +14,11 @@ import module_build_service.messaging import module_build_service.scheduler.handlers.repos import module_build_service.models from module_build_service import conf, Modulemd +from module_build_service.common.utils import mmd_to_str from module_build_service.db_session import db_session from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder from module_build_service.builder import GenericBuilder from module_build_service.scheduler import events -from module_build_service.utils.general import mmd_to_str from tests import init_data, clean_database, make_module_in_db @@ -1030,3 +1030,21 @@ class TestGetDistTagSRPM: # Conflicting ursine RPMs for nevr in ["pizza-0:4.0-1.fc32", "spaghetti-0:3.0-1.fc32"]: assert KojiModuleBuilder.format_conflicts_line(nevr) + "\n" in content + + +def test_generate_koji_tag_in_nsvc_format(): + name, stream, version, context = ("testmodule", "master", "20170816080815", "37c6c57") + + tag = KojiModuleBuilder.generate_koji_tag(name, stream, version, context) + + assert tag == "module-testmodule-master-20170816080815-37c6c57" + + +def test_generate_koji_tag_in_hash_format(): + name, version, context = ("testmodule", "20170816080815", "37c6c57") + stream = "this-is-a-stream-with-very-looooong-name" + "-blah" * 50 + nsvc_list = [name, stream, version, context] + + tag = KojiModuleBuilder.generate_koji_tag(*nsvc_list) + expected_tag = "module-1cf457d452e54dda" + assert tag == expected_tag diff --git a/tests/test_builder/test_mock.py b/tests/test_builder/test_mock.py index 37ff197d..10deae45 100644 --- a/tests/test_builder/test_mock.py +++ b/tests/test_builder/test_mock.py @@ -1,19 +1,21 @@ # -*- coding: utf-8 -*- # SPDX-License-Identifier: MIT import os -import mock -import koji import tempfile import shutil from textwrap import dedent +import mock import kobo.rpmlib +import koji -from module_build_service import conf +from module_build_service import conf, models +from module_build_service.common.utils import load_mmd, mmd_to_str from module_build_service.db_session import db_session from module_build_service.models import ModuleBuild, ComponentBuild -from module_build_service.builder.MockModuleBuilder import MockModuleBuilder -from module_build_service.utils import import_fake_base_module, mmd_to_str, load_mmd +from module_build_service.builder.MockModuleBuilder import ( + import_fake_base_module, import_builds_from_local_dnf_repos, MockModuleBuilder, +) from tests import clean_database, make_module_in_db, read_staged_data @@ -232,3 +234,69 @@ class TestMockModuleBuilderAddRepos: assert "repofile 3" in builder.yum_conf assert set(builder.enabled_modules) == {"foo:1", "app:1"} + + +class TestOfflineLocalBuilds: + def setup_method(self): + clean_database() + + def teardown_method(self): + clean_database() + + def test_import_fake_base_module(self): + import_fake_base_module("platform:foo:1:000000") + module_build = models.ModuleBuild.get_build_from_nsvc( + db_session, "platform", "foo", 1, "000000") + assert module_build + + mmd = module_build.mmd() + xmd = mmd.get_xmd() + assert xmd == { + "mbs": { + "buildrequires": {}, + "commit": "ref_000000", + "koji_tag": "repofile://", + "mse": "true", + "requires": {}, + } + } + + assert set(mmd.get_profile_names()) == {"buildroot", "srpm-buildroot"} + + @mock.patch( + "module_build_service.builder.MockModuleBuilder.open", + create=True, + new_callable=mock.mock_open, + ) + def test_import_builds_from_local_dnf_repos(self, patched_open): + with mock.patch("dnf.Base") as dnf_base: + repo = mock.MagicMock() + repo.repofile = "/etc/yum.repos.d/foo.repo" + mmd = load_mmd(read_staged_data("formatted_testmodule")) + repo.get_metadata_content.return_value = mmd_to_str(mmd) + base = dnf_base.return_value + base.repos = {"reponame": repo} + patched_open.return_value.readlines.return_value = ("FOO=bar", "PLATFORM_ID=platform:x") + + import_builds_from_local_dnf_repos() + + base.read_all_repos.assert_called_once() + repo.load.assert_called_once() + repo.get_metadata_content.assert_called_once_with("modules") + + module_build = models.ModuleBuild.get_build_from_nsvc( + db_session, "testmodule", "master", 20180205135154, "9c690d0e") + assert module_build + assert module_build.koji_tag == "repofile:///etc/yum.repos.d/foo.repo" + + module_build = models.ModuleBuild.get_build_from_nsvc( + db_session, "platform", "x", 1, "000000") + assert module_build + + def test_import_builds_from_local_dnf_repos_platform_id(self): + with mock.patch("dnf.Base"): + import_builds_from_local_dnf_repos("platform:y") + + module_build = models.ModuleBuild.get_build_from_nsvc( + db_session, "platform", "y", 1, "000000") + assert module_build diff --git a/tests/test_common/test_utils.py b/tests/test_common/test_utils.py new file mode 100644 index 00000000..f7c10f9b --- /dev/null +++ b/tests/test_common/test_utils.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: MIT +import pytest + +from module_build_service import models +from module_build_service.common.utils import import_mmd, load_mmd +from module_build_service.db_session import db_session +from module_build_service.errors import UnprocessableEntity +from tests import clean_database, read_staged_data + + +@pytest.mark.parametrize("context", ["c1", None]) +def test_import_mmd_contexts(context): + mmd = load_mmd(read_staged_data("formatted_testmodule")) + mmd.set_context(context) + + xmd = mmd.get_xmd() + xmd["mbs"]["koji_tag"] = "foo" + mmd.set_xmd(xmd) + + build, msgs = import_mmd(db_session, mmd) + + mmd_context = build.mmd().get_context() + if context: + assert mmd_context == context + assert build.context == context + else: + assert mmd_context == models.DEFAULT_MODULE_CONTEXT + assert build.context == models.DEFAULT_MODULE_CONTEXT + + +def test_import_mmd_multiple_dependencies(): + mmd = load_mmd(read_staged_data("formatted_testmodule")) + mmd.add_dependencies(mmd.get_dependencies()[0].copy()) + + expected_error = "The imported module's dependencies list should contain just one element" + with pytest.raises(UnprocessableEntity) as e: + import_mmd(db_session, mmd) + assert str(e.value) == expected_error + + +def test_import_mmd_no_xmd_buildrequires(): + mmd = load_mmd(read_staged_data("formatted_testmodule")) + xmd = mmd.get_xmd() + del xmd["mbs"]["buildrequires"] + mmd.set_xmd(xmd) + + expected_error = ( + "The imported module buildrequires other modules, but the metadata in the " + 'xmd["mbs"]["buildrequires"] dictionary is missing entries' + ) + with pytest.raises(UnprocessableEntity) as e: + import_mmd(db_session, mmd) + assert str(e.value) == expected_error + + +def test_import_mmd_minimal_xmd_from_local_repository(): + mmd = load_mmd(read_staged_data("formatted_testmodule")) + xmd = mmd.get_xmd() + xmd["mbs"] = {} + xmd["mbs"]["koji_tag"] = "repofile:///etc/yum.repos.d/fedora-modular.repo" + xmd["mbs"]["mse"] = True + xmd["mbs"]["commit"] = "unknown" + mmd.set_xmd(xmd) + + build, msgs = import_mmd(db_session, mmd, False) + assert build.name == mmd.get_module_name() + + +@pytest.mark.parametrize( + "stream, disttag_marking, error_msg", + ( + ("f28", None, None), + ("f28", "fedora28", None), + ("f-28", "f28", None), + ("f-28", None, "The stream cannot contain a dash unless disttag_marking is set"), + ("f28", "f-28", "The disttag_marking cannot contain a dash"), + ("f-28", "fedora-28", "The disttag_marking cannot contain a dash"), + ), +) +def test_import_mmd_base_module(stream, disttag_marking, error_msg): + clean_database(add_platform_module=False) + mmd = load_mmd(read_staged_data("platform")) + mmd = mmd.copy(mmd.get_module_name(), stream) + + if disttag_marking: + xmd = mmd.get_xmd() + xmd["mbs"]["disttag_marking"] = disttag_marking + mmd.set_xmd(xmd) + + if error_msg: + with pytest.raises(UnprocessableEntity, match=error_msg): + import_mmd(db_session, mmd) + else: + import_mmd(db_session, mmd) + + +def test_import_mmd_remove_dropped_virtual_streams(): + mmd = load_mmd(read_staged_data("formatted_testmodule")) + + # Add some virtual streams + xmd = mmd.get_xmd() + xmd["mbs"]["virtual_streams"] = ["f28", "f29", "f30"] + mmd.set_xmd(xmd) + + # Import mmd into database to simulate the next step to reimport a module + import_mmd(db_session, mmd) + + # Now, remove some virtual streams from module metadata + xmd = mmd.get_xmd() + xmd["mbs"]["virtual_streams"] = ["f28", "f29"] # Note that, f30 is removed + mmd.set_xmd(xmd) + + # Test import modulemd again and the f30 should be removed from database. + module_build, _ = import_mmd(db_session, mmd) + + db_session.refresh(module_build) + assert ["f28", "f29"] == sorted(item.name for item in module_build.virtual_streams) + assert 0 == db_session.query(models.VirtualStream).filter_by(name="f30").count() + + +def test_import_mmd_dont_remove_dropped_virtual_streams_associated_with_other_modules(): + mmd = load_mmd(read_staged_data("formatted_testmodule")) + # Add some virtual streams to this module metadata + xmd = mmd.get_xmd() + xmd["mbs"]["virtual_streams"] = ["f28", "f29", "f30"] + mmd.set_xmd(xmd) + import_mmd(db_session, mmd) + + # Import another module which has overlapping virtual streams + another_mmd = load_mmd(read_staged_data("formatted_testmodule-more-components")) + # Add some virtual streams to this module metadata + xmd = another_mmd.get_xmd() + xmd["mbs"]["virtual_streams"] = ["f29", "f30"] + another_mmd.set_xmd(xmd) + another_module_build, _ = import_mmd( + db_session, another_mmd) + + # Now, remove f30 from mmd + xmd = mmd.get_xmd() + xmd["mbs"]["virtual_streams"] = ["f28", "f29"] + mmd.set_xmd(xmd) + + # Reimport formatted_testmodule again + module_build, _ = import_mmd(db_session, mmd) + + db_session.refresh(module_build) + assert ["f28", "f29"] == sorted(item.name for item in module_build.virtual_streams) + + # The overlapped f30 should be still there. + db_session.refresh(another_module_build) + assert ["f29", "f30"] == sorted(item.name for item in another_module_build.virtual_streams) diff --git a/tests/test_content_generator.py b/tests/test_content_generator.py index 6a2feeb4..c184691b 100644 --- a/tests/test_content_generator.py +++ b/tests/test_content_generator.py @@ -7,10 +7,9 @@ import json import os from os import path -import module_build_service.messaging from module_build_service import models, conf, build_logs, Modulemd +from module_build_service.common.utils import load_mmd, load_mmd_file, mmd_to_str from module_build_service.db_session import db_session -from module_build_service.utils.general import mmd_to_str from mock import patch, Mock, call, mock_open import kobo.rpmlib @@ -128,10 +127,10 @@ class TestBuild: # For devel, only check that the name has -devel suffix. assert ret["build"]["name"] == "nginx-devel" assert ret["build"]["extra"]["typeinfo"]["module"]["name"] == "nginx-devel" - new_mmd = module_build_service.utils.load_mmd( + new_mmd = load_mmd( ret["build"]["extra"]["typeinfo"]["module"]["modulemd_str"]) assert new_mmd.get_module_name().endswith("-devel") - new_mmd = module_build_service.utils.load_mmd_file("%s/modulemd.txt" % file_dir) + new_mmd = load_mmd_file("%s/modulemd.txt" % file_dir) assert new_mmd.get_module_name().endswith("-devel") # Ensure an anonymous Koji session works @@ -951,7 +950,7 @@ class TestBuild: def test_finalize_mmd_devel(self): self.cg.devel = True mmd = self.cg.module.mmd() - new_mmd = module_build_service.utils.load_mmd(self.cg._finalize_mmd("x86_64")) + new_mmd = load_mmd(self.cg._finalize_mmd("x86_64")) # Check that -devel suffix is set. assert new_mmd.get_module_name().endswith("-devel") diff --git a/tests/test_manage.py b/tests/test_manage.py index 3228d10f..49dbc1a5 100644 --- a/tests/test_manage.py +++ b/tests/test_manage.py @@ -7,7 +7,7 @@ from module_build_service import app, models from module_build_service.db_session import db_session from module_build_service.manage import manager_wrapper, retire from module_build_service.models import BUILD_STATES, ModuleBuild -from module_build_service.utils.general import deps_to_dict +from module_build_service.web.utils import deps_to_dict from tests import clean_database, staged_data_filename diff --git a/tests/test_models/test_models.py b/tests/test_models/test_models.py index f775cb8d..9d23e90a 100644 --- a/tests/test_models/test_models.py +++ b/tests/test_models/test_models.py @@ -4,9 +4,9 @@ import pytest from mock import patch from module_build_service import conf +from module_build_service.common.utils import load_mmd, mmd_to_str from module_build_service.db_session import db_session from module_build_service.models import ComponentBuild, ComponentBuildTrace, ModuleBuild -from module_build_service.utils.general import mmd_to_str, load_mmd from tests import init_data as init_data_contexts, clean_database, read_staged_data from tests import make_module_in_db, module_build_from_modulemd diff --git a/tests/test_resolver/test_db.py b/tests/test_resolver/test_db.py index 4258f63f..42fbc212 100644 --- a/tests/test_resolver/test_db.py +++ b/tests/test_resolver/test_db.py @@ -8,7 +8,7 @@ import pytest import module_build_service.resolver as mbs_resolver from module_build_service import conf, models, utils, Modulemd -from module_build_service.utils import import_mmd, mmd_to_str, load_mmd +from module_build_service.common.utils import import_mmd, load_mmd, mmd_to_str from module_build_service.models import ModuleBuild from module_build_service.errors import UnprocessableEntity from module_build_service.db_session import db_session diff --git a/tests/test_resolver/test_koji.py b/tests/test_resolver/test_koji.py index 008b4664..a0f930fe 100644 --- a/tests/test_resolver/test_koji.py +++ b/tests/test_resolver/test_koji.py @@ -5,11 +5,11 @@ from mock import patch, MagicMock from datetime import datetime from module_build_service import conf +from module_build_service.common.utils import import_mmd, load_mmd, mmd_to_str import module_build_service.resolver as mbs_resolver import tests from module_build_service.db_session import db_session from module_build_service.models import ModuleBuild, BUILD_STATES -from module_build_service.utils.general import import_mmd, mmd_to_str, load_mmd @pytest.mark.usefixtures("reuse_component_init_data") diff --git a/tests/test_resolver/test_local.py b/tests/test_resolver/test_local.py index 824f83c6..0ca11b48 100644 --- a/tests/test_resolver/test_local.py +++ b/tests/test_resolver/test_local.py @@ -4,10 +4,10 @@ import pytest from datetime import datetime from module_build_service import conf +from module_build_service.common.utils import import_mmd, load_mmd, mmd_to_str import module_build_service.resolver as mbs_resolver from module_build_service.db_session import db_session from module_build_service.models import ModuleBuild -from module_build_service.utils.general import import_mmd, mmd_to_str, load_mmd import tests diff --git a/tests/test_resolver/test_mbs.py b/tests/test_resolver/test_mbs.py index 25bd610e..31a86ac1 100644 --- a/tests/test_resolver/test_mbs.py +++ b/tests/test_resolver/test_mbs.py @@ -3,10 +3,10 @@ from mock import patch, PropertyMock, Mock, call from module_build_service import app, conf +from module_build_service.common.utils import load_mmd, mmd_to_str import module_build_service.resolver as mbs_resolver import module_build_service.utils from module_build_service.db_session import db_session -from module_build_service.utils.general import mmd_to_str import module_build_service.models import tests @@ -188,7 +188,7 @@ class TestMBSModule: self, mock_session, testmodule_mmd_9c690d0e ): - mmd = module_build_service.utils.load_mmd(testmodule_mmd_9c690d0e) + mmd = load_mmd(testmodule_mmd_9c690d0e) # Wipe out the dependencies for deps in mmd.get_dependencies(): mmd.remove_dependencies(deps) diff --git a/tests/test_scheduler/test_batches.py b/tests/test_scheduler/test_batches.py index 9b722f2e..29db790a 100644 --- a/tests/test_scheduler/test_batches.py +++ b/tests/test_scheduler/test_batches.py @@ -10,7 +10,7 @@ from module_build_service.builder import GenericBuilder from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder from module_build_service.scheduler import events from module_build_service.scheduler.batches import start_build_component, start_next_batch_build -from module_build_service.utils import validate_koji_tag +from module_build_service.builder.utils import validate_koji_tag class DummyModuleBuilder(GenericBuilder): diff --git a/tests/test_scheduler/test_default_modules.py b/tests/test_scheduler/test_default_modules.py index 185a8edb..ee8c0312 100644 --- a/tests/test_scheduler/test_default_modules.py +++ b/tests/test_scheduler/test_default_modules.py @@ -8,12 +8,12 @@ from mock import call, Mock, patch, PropertyMock import pytest from module_build_service import conf +from module_build_service.common.utils import import_mmd, load_mmd, mmd_to_str from module_build_service.db_session import db_session from module_build_service.errors import UnprocessableEntity from module_build_service.models import ModuleBuild from module_build_service.scheduler import default_modules -from module_build_service.utils.general import load_mmd, mmd_to_str -from tests import clean_database, make_module_in_db, read_staged_data, import_mmd +from tests import clean_database, make_module_in_db, read_staged_data @patch("module_build_service.scheduler.default_modules.handle_collisions_with_base_module_rpms") diff --git a/tests/test_scheduler/test_module_init.py b/tests/test_scheduler/test_module_init.py index 1b605fd7..d2067193 100644 --- a/tests/test_scheduler/test_module_init.py +++ b/tests/test_scheduler/test_module_init.py @@ -4,6 +4,7 @@ import os from mock import patch, PropertyMock +from module_build_service.common.utils import load_mmd, mmd_to_str from tests import clean_database, read_staged_data from tests.test_views.test_views import FakeSCM import module_build_service.messaging @@ -11,8 +12,6 @@ import module_build_service.scheduler.handlers.modules from module_build_service import build_logs, conf from module_build_service.db_session import db_session from module_build_service.models import ModuleBuild -from module_build_service.utils.general import mmd_to_str, load_mmd -# from module_build_service.scheduler.events import MBSModule class TestModuleInit: diff --git a/tests/test_utils/test_utils.py b/tests/test_utils/test_utils.py index 46be782e..b6239753 100644 --- a/tests/test_utils/test_utils.py +++ b/tests/test_utils/test_utils.py @@ -9,13 +9,13 @@ from datetime import datetime from werkzeug.datastructures import FileStorage from mock import patch from sqlalchemy.orm.session import make_transient -from module_build_service.utils.general import load_mmd_file, mmd_to_str + +from module_build_service.common.utils import import_mmd, load_mmd, load_mmd_file, mmd_to_str import module_build_service.utils import module_build_service.scm from module_build_service import models, conf -from module_build_service.errors import ProgrammingError, ValidationError, UnprocessableEntity +from module_build_service.errors import ValidationError, UnprocessableEntity from module_build_service.utils.reuse import get_reusable_module, get_reusable_component -from module_build_service.utils.general import load_mmd from module_build_service.utils.submit import format_mmd from tests import ( clean_database, @@ -386,231 +386,6 @@ class TestUtils: r = module_build_service.utils.get_build_arches(mmd, conf) assert r == ["x86_64", "i686"] - @pytest.mark.parametrize("context", ["c1", None]) - def test_import_mmd_contexts(self, context): - mmd = load_mmd(read_staged_data("formatted_testmodule")) - mmd.set_context(context) - - xmd = mmd.get_xmd() - xmd["mbs"]["koji_tag"] = "foo" - mmd.set_xmd(xmd) - - build, msgs = module_build_service.utils.import_mmd(db_session, mmd) - - mmd_context = build.mmd().get_context() - if context: - assert mmd_context == context - assert build.context == context - else: - assert mmd_context == models.DEFAULT_MODULE_CONTEXT - assert build.context == models.DEFAULT_MODULE_CONTEXT - - def test_import_mmd_multiple_dependencies(self): - mmd = load_mmd(read_staged_data("formatted_testmodule")) - mmd.add_dependencies(mmd.get_dependencies()[0].copy()) - - expected_error = "The imported module's dependencies list should contain just one element" - with pytest.raises(UnprocessableEntity) as e: - module_build_service.utils.import_mmd(db_session, mmd) - assert str(e.value) == expected_error - - def test_import_mmd_no_xmd_buildrequires(self): - mmd = load_mmd(read_staged_data("formatted_testmodule")) - xmd = mmd.get_xmd() - del xmd["mbs"]["buildrequires"] - mmd.set_xmd(xmd) - - expected_error = ( - "The imported module buildrequires other modules, but the metadata in the " - 'xmd["mbs"]["buildrequires"] dictionary is missing entries' - ) - with pytest.raises(UnprocessableEntity) as e: - module_build_service.utils.import_mmd(db_session, mmd) - assert str(e.value) == expected_error - - def test_import_mmd_minimal_xmd_from_local_repository(self): - mmd = load_mmd(read_staged_data("formatted_testmodule")) - xmd = mmd.get_xmd() - xmd["mbs"] = {} - xmd["mbs"]["koji_tag"] = "repofile:///etc/yum.repos.d/fedora-modular.repo" - xmd["mbs"]["mse"] = True - xmd["mbs"]["commit"] = "unknown" - mmd.set_xmd(xmd) - - build, msgs = module_build_service.utils.import_mmd(db_session, mmd, False) - assert build.name == mmd.get_module_name() - - @pytest.mark.parametrize( - "stream, disttag_marking, error_msg", - ( - ("f28", None, None), - ("f28", "fedora28", None), - ("f-28", "f28", None), - ("f-28", None, "The stream cannot contain a dash unless disttag_marking is set"), - ("f28", "f-28", "The disttag_marking cannot contain a dash"), - ("f-28", "fedora-28", "The disttag_marking cannot contain a dash"), - ), - ) - def test_import_mmd_base_module(self, stream, disttag_marking, error_msg): - clean_database(add_platform_module=False) - mmd = load_mmd(read_staged_data("platform")) - mmd = mmd.copy(mmd.get_module_name(), stream) - - if disttag_marking: - xmd = mmd.get_xmd() - xmd["mbs"]["disttag_marking"] = disttag_marking - mmd.set_xmd(xmd) - - if error_msg: - with pytest.raises(UnprocessableEntity, match=error_msg): - module_build_service.utils.import_mmd(db_session, mmd) - else: - module_build_service.utils.import_mmd(db_session, mmd) - - def test_import_mmd_remove_dropped_virtual_streams(self): - mmd = load_mmd(read_staged_data("formatted_testmodule")) - - # Add some virtual streams - xmd = mmd.get_xmd() - xmd["mbs"]["virtual_streams"] = ["f28", "f29", "f30"] - mmd.set_xmd(xmd) - - # Import mmd into database to simulate the next step to reimport a module - module_build_service.utils.general.import_mmd(db_session, mmd) - - # Now, remove some virtual streams from module metadata - xmd = mmd.get_xmd() - xmd["mbs"]["virtual_streams"] = ["f28", "f29"] # Note that, f30 is removed - mmd.set_xmd(xmd) - - # Test import modulemd again and the f30 should be removed from database. - module_build, _ = module_build_service.utils.general.import_mmd(db_session, mmd) - - db_session.refresh(module_build) - assert ["f28", "f29"] == sorted(item.name for item in module_build.virtual_streams) - assert 0 == db_session.query(models.VirtualStream).filter_by(name="f30").count() - - def test_import_mmd_dont_remove_dropped_virtual_streams_associated_with_other_modules(self): - mmd = load_mmd(read_staged_data("formatted_testmodule")) - # Add some virtual streams to this module metadata - xmd = mmd.get_xmd() - xmd["mbs"]["virtual_streams"] = ["f28", "f29", "f30"] - mmd.set_xmd(xmd) - module_build_service.utils.general.import_mmd(db_session, mmd) - - # Import another module which has overlapping virtual streams - another_mmd = load_mmd(read_staged_data("formatted_testmodule-more-components")) - # Add some virtual streams to this module metadata - xmd = another_mmd.get_xmd() - xmd["mbs"]["virtual_streams"] = ["f29", "f30"] - another_mmd.set_xmd(xmd) - another_module_build, _ = module_build_service.utils.general.import_mmd( - db_session, another_mmd) - - # Now, remove f30 from mmd - xmd = mmd.get_xmd() - xmd["mbs"]["virtual_streams"] = ["f28", "f29"] - mmd.set_xmd(xmd) - - # Reimport formatted_testmodule again - module_build, _ = module_build_service.utils.general.import_mmd(db_session, mmd) - - db_session.refresh(module_build) - assert ["f28", "f29"] == sorted(item.name for item in module_build.virtual_streams) - - # The overlapped f30 should be still there. - db_session.refresh(another_module_build) - assert ["f29", "f30"] == sorted(item.name for item in another_module_build.virtual_streams) - - def test_get_rpm_release_mse(self): - init_data(contexts=True) - - build_one = models.ModuleBuild.get_by_id(db_session, 2) - release_one = module_build_service.utils.get_rpm_release(db_session, build_one) - assert release_one == "module+2+b8645bbb" - - build_two = models.ModuleBuild.get_by_id(db_session, 3) - release_two = module_build_service.utils.get_rpm_release(db_session, build_two) - assert release_two == "module+2+17e35784" - - def test_get_rpm_release_platform_stream(self): - scheduler_init_data(1) - build_one = models.ModuleBuild.get_by_id(db_session, 2) - release = module_build_service.utils.get_rpm_release(db_session, build_one) - assert release == "module+f28+2+814cfa39" - - def test_get_rpm_release_platform_stream_override(self): - scheduler_init_data(1) - - # Set the disttag_marking override on the platform - platform = ( - db_session.query(models.ModuleBuild) - .filter_by(name="platform", stream="f28") - .first() - ) - platform_mmd = platform.mmd() - platform_xmd = platform_mmd.get_xmd() - platform_xmd["mbs"]["disttag_marking"] = "fedora28" - platform_mmd.set_xmd(platform_xmd) - platform.modulemd = mmd_to_str(platform_mmd) - db_session.add(platform) - db_session.commit() - - build_one = models.ModuleBuild.get_by_id(db_session, 2) - release = module_build_service.utils.get_rpm_release(db_session, build_one) - assert release == "module+fedora28+2+814cfa39" - - @patch( - "module_build_service.config.Config.allowed_privileged_module_names", - new_callable=mock.PropertyMock, - return_value=["build"], - ) - def test_get_rpm_release_metadata_br_stream_override(self, mock_admmn): - """ - Test that when a module buildrequires a module in conf.allowed_privileged_module_names, - and that module has the xmd.mbs.disttag_marking field set, it should influence the disttag. - """ - scheduler_init_data(1) - metadata_mmd = load_mmd(read_staged_data("build_metadata_module")) - module_build_service.utils.import_mmd(db_session, metadata_mmd) - - build_one = models.ModuleBuild.get_by_id(db_session, 2) - mmd = build_one.mmd() - deps = mmd.get_dependencies()[0] - deps.add_buildtime_stream("build", "product1.2") - xmd = mmd.get_xmd() - xmd["mbs"]["buildrequires"]["build"] = { - "filtered_rpms": [], - "ref": "virtual", - "stream": "product1.2", - "version": "1", - "context": "00000000", - } - mmd.set_xmd(xmd) - build_one.modulemd = mmd_to_str(mmd) - db_session.add(build_one) - db_session.commit() - - release = module_build_service.utils.get_rpm_release(db_session, build_one) - assert release == "module+product12+2+814cfa39" - - def test_get_rpm_release_mse_scratch(self): - init_data(contexts=True, scratch=True) - - build_one = models.ModuleBuild.get_by_id(db_session, 2) - release_one = module_build_service.utils.get_rpm_release(db_session, build_one) - assert release_one == "scrmod+2+b8645bbb" - - build_two = models.ModuleBuild.get_by_id(db_session, 3) - release_two = module_build_service.utils.get_rpm_release(db_session, build_two) - assert release_two == "scrmod+2+17e35784" - - def test_get_rpm_release_platform_stream_scratch(self): - scheduler_init_data(1, scratch=True) - build_one = models.ModuleBuild.get_by_id(db_session, 2) - release = module_build_service.utils.get_rpm_release(db_session, build_one) - assert release == "scrmod+f28+2+814cfa39" - @patch("module_build_service.utils.submit.get_build_arches") def test_record_module_build_arches(self, get_build_arches): get_build_arches.return_value = ["x86_64", "i686"] @@ -695,125 +470,6 @@ class TestUtils: rv = get_reusable_component(new_module, "llvm", previous_module_build=old_module) assert rv.package == "llvm" - def test_validate_koji_tag_wrong_tag_arg_during_programming(self): - """ Test that we fail on a wrong param name (non-existing one) due to - programming error. """ - - @module_build_service.utils.validate_koji_tag("wrong_tag_arg") - def validate_koji_tag_programming_error(good_tag_arg, other_arg): - pass - - with pytest.raises(ProgrammingError): - validate_koji_tag_programming_error("dummy", "other_val") - - def test_validate_koji_tag_bad_tag_value(self): - """ Test that we fail on a bad tag value. """ - - @module_build_service.utils.validate_koji_tag("tag_arg") - def validate_koji_tag_bad_tag_value(tag_arg): - pass - - with pytest.raises(ValidationError): - validate_koji_tag_bad_tag_value("forbiddentagprefix-foo") - - def test_validate_koji_tag_bad_tag_value_in_list(self): - """ Test that we fail on a list containing bad tag value. """ - - @module_build_service.utils.validate_koji_tag("tag_arg") - def validate_koji_tag_bad_tag_value_in_list(tag_arg): - pass - - with pytest.raises(ValidationError): - validate_koji_tag_bad_tag_value_in_list(["module-foo", "forbiddentagprefix-bar"]) - - def test_validate_koji_tag_good_tag_value(self): - """ Test that we pass on a good tag value. """ - - @module_build_service.utils.validate_koji_tag("tag_arg") - def validate_koji_tag_good_tag_value(tag_arg): - return True - - assert validate_koji_tag_good_tag_value("module-foo") is True - - def test_validate_koji_tag_good_tag_values_in_list(self): - """ Test that we pass on a list of good tag values. """ - - @module_build_service.utils.validate_koji_tag("tag_arg") - def validate_koji_tag_good_tag_values_in_list(tag_arg): - return True - - assert validate_koji_tag_good_tag_values_in_list(["module-foo", "module-bar"]) is True - - def test_validate_koji_tag_good_tag_value_in_dict(self): - """ Test that we pass on a dict arg with default key - and a good value. """ - - @module_build_service.utils.validate_koji_tag("tag_arg") - def validate_koji_tag_good_tag_value_in_dict(tag_arg): - return True - - assert validate_koji_tag_good_tag_value_in_dict({"name": "module-foo"}) is True - - def test_validate_koji_tag_good_tag_value_in_dict_nondefault_key(self): - """ Test that we pass on a dict arg with non-default key - and a good value. """ - - @module_build_service.utils.validate_koji_tag("tag_arg", dict_key="nondefault") - def validate_koji_tag_good_tag_value_in_dict_nondefault_key(tag_arg): - return True - - assert ( - validate_koji_tag_good_tag_value_in_dict_nondefault_key({"nondefault": "module-foo"}) - is True - ) - - def test_validate_koji_tag_double_trouble_good(self): - """ Test that we pass on a list of tags that are good. """ - - expected = "foo" - - @module_build_service.utils.validate_koji_tag(["tag_arg1", "tag_arg2"]) - def validate_koji_tag_double_trouble(tag_arg1, tag_arg2): - return expected - - actual = validate_koji_tag_double_trouble("module-1", "module-2") - assert actual == expected - - def test_validate_koji_tag_double_trouble_bad(self): - """ Test that we fail on a list of tags that are bad. """ - - @module_build_service.utils.validate_koji_tag(["tag_arg1", "tag_arg2"]) - def validate_koji_tag_double_trouble(tag_arg1, tag_arg2): - pass - - with pytest.raises(ValidationError): - validate_koji_tag_double_trouble("module-1", "BADNEWS-2") - - def test_validate_koji_tag_is_None(self): - """ Test that we fail on a tag which is None. """ - - @module_build_service.utils.validate_koji_tag("tag_arg") - def validate_koji_tag_is_None(tag_arg): - pass - - with pytest.raises(ValidationError) as cm: - validate_koji_tag_is_None(None) - assert str(cm.value).endswith(" No value provided.") is True - - @patch( - "module_build_service.config.Config.allowed_privileged_module_names", - new_callable=mock.PropertyMock, - return_value=["testmodule"], - ) - def test_validate_koji_tag_previleged_module_name(self, conf_apmn): - @module_build_service.utils.validate_koji_tag("tag_arg") - def validate_koji_tag_priv_mod_name(self, tag_arg): - pass - - builder = mock.MagicMock() - builder.module_str = 'testmodule' - validate_koji_tag_priv_mod_name(builder, "abc") - @patch("module_build_service.scm.SCM") def test_record_component_builds_duplicate_components(self, mocked_scm): # Mock for format_mmd to get components' latest ref @@ -1005,22 +661,6 @@ class TestUtils: assert build.time_modified == test_datetime - def test_generate_koji_tag_in_nsvc_format(self): - name, stream, version, context = ("testmodule", "master", "20170816080815", "37c6c57") - - tag = module_build_service.utils.generate_koji_tag(name, stream, version, context) - - assert tag == "module-testmodule-master-20170816080815-37c6c57" - - def test_generate_koji_tag_in_hash_format(self): - name, version, context = ("testmodule", "20170816080815", "37c6c57") - stream = "this-is-a-stream-with-very-looooong-name" + "-blah" * 50 - nsvc_list = [name, stream, version, context] - - tag = module_build_service.utils.generate_koji_tag(*nsvc_list) - expected_tag = "module-1cf457d452e54dda" - assert tag == expected_tag - @patch("module_build_service.utils.submit.requests") def test_pdc_eol_check(self, requests): """ Push mock pdc responses through the eol check function. """ @@ -1184,68 +824,6 @@ class TestLocalBuilds: assert local_modules[0].koji_tag.endswith("/module-platform-f28-3/results") -class TestOfflineLocalBuilds: - def setup_method(self): - clean_database() - - def teardown_method(self): - clean_database() - - def test_import_fake_base_module(self): - module_build_service.utils.import_fake_base_module("platform:foo:1:000000") - module_build = models.ModuleBuild.get_build_from_nsvc( - db_session, "platform", "foo", 1, "000000") - assert module_build - - mmd = module_build.mmd() - xmd = mmd.get_xmd() - assert xmd == { - "mbs": { - "buildrequires": {}, - "commit": "ref_000000", - "koji_tag": "repofile://", - "mse": "true", - "requires": {}, - } - } - - assert set(mmd.get_profile_names()) == {"buildroot", "srpm-buildroot"} - - @patch("module_build_service.utils.general.open", create=True, new_callable=mock.mock_open) - def test_import_builds_from_local_dnf_repos(self, patched_open): - with patch("dnf.Base") as dnf_base: - repo = mock.MagicMock() - repo.repofile = "/etc/yum.repos.d/foo.repo" - mmd = load_mmd(read_staged_data("formatted_testmodule")) - repo.get_metadata_content.return_value = mmd_to_str(mmd) - base = dnf_base.return_value - base.repos = {"reponame": repo} - patched_open.return_value.readlines.return_value = ("FOO=bar", "PLATFORM_ID=platform:x") - - module_build_service.utils.import_builds_from_local_dnf_repos() - - base.read_all_repos.assert_called_once() - repo.load.assert_called_once() - repo.get_metadata_content.assert_called_once_with("modules") - - module_build = models.ModuleBuild.get_build_from_nsvc( - db_session, "testmodule", "master", 20180205135154, "9c690d0e") - assert module_build - assert module_build.koji_tag == "repofile:///etc/yum.repos.d/foo.repo" - - module_build = models.ModuleBuild.get_build_from_nsvc( - db_session, "platform", "x", 1, "000000") - assert module_build - - def test_import_builds_from_local_dnf_repos_platform_id(self): - with patch("dnf.Base"): - module_build_service.utils.import_builds_from_local_dnf_repos("platform:y") - - module_build = models.ModuleBuild.get_build_from_nsvc( - db_session, "platform", "y", 1, "000000") - assert module_build - - @pytest.mark.usefixtures("reuse_component_init_data") class TestUtilsModuleReuse: @@ -1310,7 +888,7 @@ class TestUtilsModuleReuse: xmd = mmd.get_xmd() xmd["mbs"]["virtual_streams"] = ["fedora"] mmd.set_xmd(xmd) - platform_f29 = module_build_service.utils.import_mmd(db_session, mmd)[0] + platform_f29 = import_mmd(db_session, mmd)[0] # Create another copy of `testmodule:master` which should be reused, because its # stream version will be higher than the previous one. Also set its buildrequires @@ -1399,7 +977,7 @@ class TestUtilsModuleReuse: xmd = mmd.get_xmd() xmd["mbs"]["virtual_streams"] = ["fedora"] mmd.set_xmd(xmd) - platform_f27 = module_build_service.utils.import_mmd(db_session, mmd)[0] + platform_f27 = import_mmd(db_session, mmd)[0] # Change the reusable testmodule:master to buildrequire platform:f27. latest_module = db_session.query(models.ModuleBuild).filter_by( diff --git a/tests/test_utils/test_utils_mse.py b/tests/test_utils/test_utils_mse.py index 0a225057..094b36cd 100644 --- a/tests/test_utils/test_utils_mse.py +++ b/tests/test_utils/test_utils_mse.py @@ -4,6 +4,7 @@ from mock import patch, PropertyMock import pytest import module_build_service.utils +from module_build_service.common.utils import load_mmd from module_build_service import Modulemd, models from module_build_service.db_session import db_session from module_build_service.errors import StreamAmbigous @@ -470,7 +471,7 @@ class TestUtilsModuleStreamExpansion: def test__get_base_module_mmds(self): """Ensure the correct results are returned without duplicates.""" init_data(data_size=1, multiple_stream_versions=True) - mmd = module_build_service.utils.load_mmd(read_staged_data("testmodule_v2.yaml")) + mmd = load_mmd(read_staged_data("testmodule_v2.yaml")) deps = mmd.get_dependencies()[0] new_deps = Modulemd.Dependencies() for stream in deps.get_runtime_streams("platform"): @@ -494,7 +495,7 @@ class TestUtilsModuleStreamExpansion: def test__get_base_module_mmds_virtual_streams(self, virtual_streams): """Ensure the correct results are returned without duplicates.""" init_data(data_size=1, multiple_stream_versions=True) - mmd = module_build_service.utils.load_mmd(read_staged_data("testmodule_v2")) + mmd = load_mmd(read_staged_data("testmodule_v2")) deps = mmd.get_dependencies()[0] new_deps = Modulemd.Dependencies() for stream in deps.get_runtime_streams("platform"): @@ -538,7 +539,7 @@ class TestUtilsModuleStreamExpansion: db_session.add(platform) db_session.commit() - mmd = module_build_service.utils.load_mmd(read_staged_data("testmodule_v2")) + mmd = load_mmd(read_staged_data("testmodule_v2")) deps = mmd.get_dependencies()[0] new_deps = Modulemd.Dependencies() for stream in deps.get_runtime_streams("platform"): diff --git a/tests/test_views/test_views.py b/tests/test_views/test_views.py index ac3a80a7..2ed6f75c 100644 --- a/tests/test_views/test_views.py +++ b/tests/test_views/test_views.py @@ -18,6 +18,8 @@ import pytest import re import sqlalchemy +from module_build_service.builder.utils import get_rpm_release +from module_build_service.common.utils import load_mmd, import_mmd, mmd_to_str from tests import ( init_data, clean_database, @@ -33,9 +35,6 @@ from module_build_service.models import ModuleBuild, BUILD_STATES, ComponentBuil from module_build_service import app, version import module_build_service.config as mbs_config import module_build_service.utils.submit -from module_build_service.utils.general import ( - import_mmd, mmd_to_str, load_mmd, - get_rpm_release) user = ("Homer J. Simpson", {"packager"})