Split utils/submit.py

This moves the code used by the backend and API to common/submit.py,
the code used just by the API to web/submit.py, and the code used
just by the backend to scheduler/submit.py.
This commit is contained in:
mprahl
2020-01-03 15:51:19 -05:00
parent 85e3a2c496
commit 96b0c85927
23 changed files with 1617 additions and 1608 deletions

View File

@@ -101,7 +101,7 @@ Additionally, the imports should be ordered by standard library, third-party, th
import flask
import requests
import module_build_service.utils
import module_build_service.web
from module_build_service.errors import ValidationError

View File

@@ -14,9 +14,7 @@ import platform
from module_build_service import conf, log, Modulemd
from module_build_service.common.koji import get_session
from module_build_service.common.utils import import_mmd, mmd_to_str
import module_build_service.scm
import module_build_service.utils
from module_build_service.common.utils import import_mmd, load_mmd_file, mmd_to_str
from module_build_service.builder import GenericBuilder
from module_build_service.builder.utils import (
create_local_repo_from_koji_tag,
@@ -85,6 +83,101 @@ def import_fake_base_module(nsvc):
import_mmd(db_session, mmd, False)
def load_local_builds(local_build_nsvs):
"""
Loads previously finished local module builds from conf.mock_resultsdir
and imports them to database.
:param local_build_nsvs: List of NSV separated by ':' defining the modules
to load from the mock_resultsdir.
"""
if not local_build_nsvs:
return
if type(local_build_nsvs) != list:
local_build_nsvs = [local_build_nsvs]
# Get the list of all available local module builds.
builds = []
try:
for d in os.listdir(conf.mock_resultsdir):
m = re.match("^module-(.*)-([^-]*)-([0-9]+)$", d)
if m:
builds.append((m.group(1), m.group(2), int(m.group(3)), d))
except OSError:
pass
# Sort with the biggest version first
try:
# py27
builds.sort(lambda a, b: -cmp(a[2], b[2])) # noqa: F821
except TypeError:
# py3
builds.sort(key=lambda a: a[2], reverse=True)
for nsv in local_build_nsvs:
parts = nsv.split(":")
if len(parts) < 1 or len(parts) > 3:
raise RuntimeError(
'The local build "{0}" couldn\'t be be parsed into NAME[:STREAM[:VERSION]]'
.format(nsv)
)
name = parts[0]
stream = parts[1] if len(parts) > 1 else None
version = int(parts[2]) if len(parts) > 2 else None
found_build = None
for build in builds:
if name != build[0]:
continue
if stream is not None and stream != build[1]:
continue
if version is not None and version != build[2]:
continue
found_build = build
break
if not found_build:
raise RuntimeError(
'The local build "{0}" couldn\'t be found in "{1}"'.format(
nsv, conf.mock_resultsdir)
)
# Load the modulemd metadata.
path = os.path.join(conf.mock_resultsdir, found_build[3], "results")
mmd = load_mmd_file(os.path.join(path, "modules.yaml"))
# Create ModuleBuild in database.
module = models.ModuleBuild.create(
db_session,
conf,
name=mmd.get_module_name(),
stream=mmd.get_stream_name(),
version=str(mmd.get_version()),
context=mmd.get_context(),
modulemd=mmd_to_str(mmd),
scmurl="",
username="mbs",
publish_msg=False,
)
module.koji_tag = path
module.state = models.BUILD_STATES["ready"]
db_session.commit()
if (
found_build[0] != module.name
or found_build[1] != module.stream
or str(found_build[2]) != module.version
):
raise RuntimeError(
'Parsed metadata results for "{0}" don\'t match the directory name'.format(
found_build[3])
)
log.info("Loaded local module build %r", module)
def get_local_releasever():
"""
Returns the $releasever variable used in the system when expanding .repo files.

View File

@@ -0,0 +1,98 @@
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: MIT
import shutil
import tempfile
import requests
import module_build_service.scm
from module_build_service import conf, log
from module_build_service.common.utils import load_mmd_file
from module_build_service.errors import ValidationError
def _is_eol_in_pdc(name, stream):
""" Check PDC if the module name:stream is no longer active. """
params = {"type": "module", "global_component": name, "name": stream}
url = conf.pdc_url + "/component-branches/"
response = requests.get(url, params=params)
if not response:
raise ValidationError("Failed to talk to PDC {}{}".format(response, response.text))
data = response.json()
results = data["results"]
if not results:
raise ValidationError(
"No such module {}:{} found at {}".format(name, stream, response.request.url))
# If the module is active, then it is not EOL and vice versa.
return not results[0]["active"]
def fetch_mmd(url, branch=None, allow_local_url=False, whitelist_url=False, mandatory_checks=True):
td = None
scm = None
try:
log.debug("Verifying modulemd")
td = tempfile.mkdtemp()
if whitelist_url:
scm = module_build_service.scm.SCM(url, branch, [url], allow_local_url)
else:
scm = module_build_service.scm.SCM(url, branch, conf.scmurls, allow_local_url)
scm.checkout(td)
if not whitelist_url and mandatory_checks:
scm.verify()
cofn = scm.get_module_yaml()
mmd = load_mmd_file(cofn)
finally:
try:
if td is not None:
shutil.rmtree(td)
except Exception as e:
log.warning("Failed to remove temporary directory {!r}: {}".format(td, str(e)))
if conf.check_for_eol:
if _is_eol_in_pdc(scm.name, scm.branch):
raise ValidationError(
"Module {}:{} is marked as EOL in PDC.".format(scm.name, scm.branch))
if not mandatory_checks:
return mmd, scm
# If the name was set in the modulemd, make sure it matches what the scmurl
# says it should be
if mmd.get_module_name() and mmd.get_module_name() != scm.name:
if not conf.allow_name_override_from_scm:
raise ValidationError(
'The name "{0}" that is stored in the modulemd is not valid'
.format(mmd.get_module_name())
)
else:
# Set the module name
mmd = mmd.copy(scm.name)
# If the stream was set in the modulemd, make sure it matches what the repo
# branch is
if mmd.get_stream_name() and mmd.get_stream_name() != scm.branch:
if not conf.allow_stream_override_from_scm:
raise ValidationError(
'The stream "{0}" that is stored in the modulemd does not match the branch "{1}"'
.format(mmd.get_stream_name(), scm.branch)
)
else:
# Set the module stream
mmd = mmd.copy(mmd.get_module_name(), scm.branch)
# If the version is in the modulemd, throw an exception since the version
# since the version is generated by MBS
if mmd.get_version():
raise ValidationError(
'The version "{0}" is already defined in the modulemd but it shouldn\'t be since the '
"version is generated based on the commit time".format(mmd.get_version())
)
else:
mmd.set_version(int(scm.version))
return mmd, scm

View File

@@ -11,14 +11,16 @@ import textwrap
from werkzeug.datastructures import FileStorage
from module_build_service import app, conf, create_app, db, models
from module_build_service.builder.MockModuleBuilder import import_builds_from_local_dnf_repos
from module_build_service.builder.MockModuleBuilder import (
import_builds_from_local_dnf_repos, load_local_builds
)
from module_build_service.common.utils import load_mmd_file, import_mmd
from module_build_service.utils import submit_module_build_from_yaml, load_local_builds
from module_build_service.db_session import db_session
from module_build_service.errors import StreamAmbigous
import module_build_service.messaging
import module_build_service.scheduler.consumer
import module_build_service.scheduler.local
from module_build_service.web.submit import submit_module_build_from_yaml
manager = Manager(create_app)

View File

@@ -708,7 +708,7 @@ class ModuleBuild(MBSBase):
def local_modules(cls, db_session, name=None, stream=None):
"""
Returns list of local module builds added by
utils.load_local_builds(...). When `name` or `stream` is set,
load_local_builds(...). When `name` or `stream` is set,
it is used to further limit the result set.
If conf.system is not set to "mock" or "test", returns empty

View File

@@ -198,7 +198,7 @@ class DBResolver(GenericResolver):
Returns a dictionary with keys set according the `keys` parameters and values
set to the union of all components defined in all installation profiles matching
the key in all buildrequires. If there are some modules loaded by
utils.load_local_builds(...), these local modules will be considered when returning
load_local_builds(...), these local modules will be considered when returning
the profiles.
:param mmd: Modulemd.ModuleStream instance representing the module
:param keys: list of modulemd installation profiles to include in the result

View File

@@ -279,7 +279,7 @@ class MBSResolver(KojiResolver):
set to union of all components defined in all installation
profiles matching the key using the buildrequires.
If there are some modules loaded by utils.load_local_builds(...), these
If there are some modules loaded by load_local_builds(...), these
local modules will be considered when returning the profiles.
https://pagure.io/fm-orchestrator/issue/181
@@ -406,7 +406,7 @@ class MBSResolver(KojiResolver):
Resolves the requires list of N:S or N:S:V:C to a dictionary with keys as
the module name and the values as a dictionary with keys of ref,
stream, version.
If there are some modules loaded by utils.load_local_builds(...), these
If there are some modules loaded by load_local_builds(...), these
local modules will be considered when resolving the requires. A RuntimeError
is raised on MBS lookup errors.
:param requires: a list of N:S or N:S:V:C strings
@@ -425,7 +425,7 @@ class MBSResolver(KojiResolver):
raise ValueError(
"Only N:S or N:S:V:C is accepted by resolve_requires, got %s" % nsvc)
# Try to find out module dependency in the local module builds
# added by utils.load_local_builds(...).
# added by load_local_builds(...).
local_modules = models.ModuleBuild.local_modules(
self.db_session, module_name, module_stream)
if local_modules:

View File

@@ -8,8 +8,7 @@ from module_build_service.builder.utils import get_rpm_release
from module_build_service.common.utils import mmd_to_str
from module_build_service.common.retry import retry
import module_build_service.resolver
import module_build_service.utils
from module_build_service.utils import (
from module_build_service.scheduler.submit import (
record_component_builds,
record_filtered_rpms,
record_module_build_arches
@@ -20,9 +19,9 @@ from module_build_service.errors import UnprocessableEntity, Forbidden, Validati
from module_build_service.scheduler.default_modules import (
add_default_modules, handle_collisions_with_base_module_rpms)
from module_build_service.scheduler.greenwave import greenwave
from module_build_service.utils.submit import format_mmd
from module_build_service.scheduler import events
from module_build_service.scheduler.reuse import attempt_to_reuse_all_components
from module_build_service.scheduler.submit import format_mmd
from module_build_service.scheduler.ursine import handle_stream_collision_modules
from requests.exceptions import ConnectionError

View File

@@ -0,0 +1,487 @@
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: MIT
from datetime import datetime
import json
from multiprocessing.dummy import Pool as ThreadPool
import os
import kobo.rpmlib
from module_build_service import conf, log, models, Modulemd
from module_build_service.common.submit import fetch_mmd
from module_build_service.common.utils import to_text_type
from module_build_service.db_session import db_session
from module_build_service.errors import ValidationError, UnprocessableEntity, Forbidden
import module_build_service.scm
def get_build_arches(mmd, config):
"""
Returns the list of architectures for which the module `mmd` should be built.
:param mmd: Module MetaData
:param config: config (module_build_service.config.Config instance)
:return list of architectures
"""
# Imported here to allow import of utils in GenericBuilder.
from module_build_service.builder import GenericBuilder
nsvc = mmd.get_nsvc()
# At first, handle BASE_MODULE_ARCHES - this overrides any other option.
# Find out the base modules in buildrequires section of XMD and
# set the Koji tag arches according to it.
if "mbs" in mmd.get_xmd():
for req_name, req_data in mmd.get_xmd()["mbs"]["buildrequires"].items():
ns = ":".join([req_name, req_data["stream"]])
if ns in config.base_module_arches:
arches = config.base_module_arches[ns]
log.info("Setting build arches of %s to %r based on the BASE_MODULE_ARCHES." % (
nsvc, arches))
return arches
# Check whether the module contains the `koji_tag_arches`. This is used only
# by special modules defining the layered products.
try:
arches = mmd.get_xmd()["mbs"]["koji_tag_arches"]
log.info("Setting build arches of %s to %r based on the koji_tag_arches." % (
nsvc, arches))
return arches
except KeyError:
pass
# Check the base/layered-product module this module buildrequires and try to get the
# list of arches from there.
try:
buildrequires = mmd.get_xmd()["mbs"]["buildrequires"]
except (ValueError, KeyError):
log.warning(
"Module {0} does not have buildrequires in its xmd".format(mmd.get_nsvc()))
buildrequires = None
if buildrequires:
# Looping through all the privileged modules that are allowed to set koji tag arches
# and the base modules to see what the koji tag arches should be. Doing it this way
# preserves the order in the configurations.
for module in conf.allowed_privileged_module_names + conf.base_module_names:
module_in_xmd = buildrequires.get(module)
if not module_in_xmd:
continue
module_obj = models.ModuleBuild.get_build_from_nsvc(
db_session,
module,
module_in_xmd["stream"],
module_in_xmd["version"],
module_in_xmd["context"],
)
if not module_obj:
continue
arches = GenericBuilder.get_module_build_arches(module_obj)
if arches:
log.info("Setting build arches of %s to %r based on the buildrequired "
"module %r." % (nsvc, arches, module_obj))
return arches
# As a last resort, return just the preconfigured list of arches.
arches = config.arches
log.info("Setting build arches of %s to %r based on default ARCHES." % (nsvc, arches))
return arches
def record_module_build_arches(mmd, build):
"""
Finds out the list of build arches against which the ModuleBuld `build` should be built
and records them to `build.arches`.
:param Modulemd mmd: The MMD file associated with a ModuleBuild.
:param ModuleBuild build: The ModuleBuild.
"""
arches = get_build_arches(mmd, conf)
for arch in arches:
arch_obj = db_session.query(models.ModuleArch).filter_by(name=arch).first()
if not arch_obj:
arch_obj = models.ModuleArch(name=arch)
build.arches.append(arch_obj)
db_session.commit()
def record_filtered_rpms(mmd):
"""Record filtered RPMs that should not be installed into buildroot
These RPMs are filtered:
* Reads the mmd["xmd"]["buildrequires"] and extends it with "filtered_rpms"
list containing the NVRs of filtered RPMs in a buildrequired module.
:param Modulemd mmd: Modulemd that will be built next.
:rtype: Modulemd.Module
:return: Modulemd extended with the "filtered_rpms" in XMD section.
"""
# Imported here to allow import of utils in GenericBuilder.
from module_build_service.builder import GenericBuilder
from module_build_service.resolver import GenericResolver
resolver = GenericResolver.create(db_session, conf)
builder = GenericBuilder.backends[conf.system]
new_buildrequires = {}
for req_name, req_data in mmd.get_xmd()["mbs"]["buildrequires"].items():
# In case this is module resubmit or local build, the filtered_rpms
# will already be there, so there is no point in generating them again.
if "filtered_rpms" in req_data:
new_buildrequires[req_name] = req_data
continue
# We can just get the first modulemd data from result right here thanks to
# strict=True, so in case the module cannot be found, get_module_modulemds
# raises an exception.
req_mmd = resolver.get_module_modulemds(
req_name, req_data["stream"], req_data["version"], req_data["context"], True)[0]
# Find out the particular NVR of filtered packages
filtered_rpms = []
rpm_filter = req_mmd.get_rpm_filters()
if rpm_filter:
built_nvrs = builder.get_built_rpms_in_module_build(req_mmd)
for nvr in built_nvrs:
parsed_nvr = kobo.rpmlib.parse_nvr(nvr)
if parsed_nvr["name"] in rpm_filter:
filtered_rpms.append(nvr)
req_data["filtered_rpms"] = filtered_rpms
new_buildrequires[req_name] = req_data
# Replace the old buildrequires with new ones.
xmd = mmd.get_xmd()
xmd["mbs"]["buildrequires"] = new_buildrequires
mmd.set_xmd(xmd)
return mmd
def _scm_get_latest(pkg):
try:
# If the modulemd specifies that the 'f25' branch is what
# we want to pull from, we need to resolve that f25 branch
# to the specific commit available at the time of
# submission (now).
repo = pkg.get_repository()
ref = pkg.get_ref()
log.debug("Getting the commit hash for the ref %s on the repo %s", ref, repo)
pkgref = module_build_service.scm.SCM(repo).get_latest(ref)
except Exception as e:
log.exception(e)
return {
"error": "Failed to get the latest commit for %s#%s"
% (pkg.get_repository(), pkg.get_ref())
}
return {"pkg_name": pkg.get_name(), "pkg_ref": pkgref, "error": None}
def format_mmd(mmd, scmurl, module=None, db_session=None):
"""
Prepares the modulemd for the MBS. This does things such as replacing the
branches of components with commit hashes and adding metadata in the xmd
dictionary.
:param mmd: the Modulemd.ModuleStream object to format
:param scmurl: the url to the modulemd
:param module: When specified together with `session`, the time_modified
of a module is updated regularly in case this method takes lot of time.
:param db_session: Database session to update the `module`.
"""
# Import it here, because SCM uses utils methods and fails to import
# them because of dep-chain.
from module_build_service.scm import SCM
xmd = mmd.get_xmd()
if "mbs" not in xmd:
xmd["mbs"] = {}
if "scmurl" not in xmd["mbs"]:
xmd["mbs"]["scmurl"] = scmurl or ""
if "commit" not in xmd["mbs"]:
xmd["mbs"]["commit"] = ""
# If module build was submitted via yaml file, there is no scmurl
if scmurl:
scm = SCM(scmurl)
# We want to make sure we have the full commit hash for consistency
if SCM.is_full_commit_hash(scm.scheme, scm.commit):
full_scm_hash = scm.commit
else:
full_scm_hash = scm.get_full_commit_hash()
xmd["mbs"]["commit"] = full_scm_hash
if mmd.get_rpm_component_names() or mmd.get_module_component_names():
if "rpms" not in xmd["mbs"]:
xmd["mbs"]["rpms"] = {}
# Add missing data in RPM components
for pkgname in mmd.get_rpm_component_names():
pkg = mmd.get_rpm_component(pkgname)
# In case of resubmit of existing module which have been
# cancelled/failed during the init state, the package
# was maybe already handled by MBS, so skip it in this case.
if pkgname in xmd["mbs"]["rpms"]:
continue
if pkg.get_repository() and not conf.rpms_allow_repository:
raise Forbidden(
"Custom component repositories aren't allowed. "
"%r bears repository %r" % (pkgname, pkg.get_repository())
)
if pkg.get_cache() and not conf.rpms_allow_cache:
raise Forbidden(
"Custom component caches aren't allowed. "
"%r bears cache %r" % (pkgname, pkg.get_cache())
)
if pkg.get_buildafter():
raise ValidationError('The usage of "buildafter" is not yet supported')
if not pkg.get_repository():
pkg.set_repository(conf.rpms_default_repository + pkgname)
if not pkg.get_cache():
pkg.set_cache(conf.rpms_default_cache + pkgname)
if not pkg.get_ref():
pkg.set_ref("master")
if not pkg.get_arches():
for arch in conf.arches:
pkg.add_restricted_arch(arch)
# Add missing data in included modules components
for modname in mmd.get_module_component_names():
mod = mmd.get_module_component(modname)
if mod.get_repository() and not conf.modules_allow_repository:
raise Forbidden(
"Custom module repositories aren't allowed. "
"%r bears repository %r" % (modname, mod.get_repository())
)
if not mod.get_repository():
mod.set_repository(conf.modules_default_repository + modname)
if not mod.get_ref():
mod.set_ref("master")
# Check that SCM URL is valid and replace potential branches in pkg refs
# by real SCM hash and store the result to our private xmd place in modulemd.
pool = ThreadPool(20)
try:
# Filter out the packages which we have already resolved in possible
# previous runs of this method (can be caused by module build resubmition).
pkgs_to_resolve = [
mmd.get_rpm_component(name)
for name in mmd.get_rpm_component_names()
if name not in xmd["mbs"]["rpms"]
]
async_result = pool.map_async(_scm_get_latest, pkgs_to_resolve)
# For modules with lot of components, the _scm_get_latest can take a lot of time.
# We need to bump time_modified from time to time, otherwise poller could think
# that module is stuck in "init" state and it would send fake "init" message.
while not async_result.ready():
async_result.wait(60)
if module and db_session:
module.time_modified = datetime.utcnow()
db_session.commit()
pkg_dicts = async_result.get()
finally:
pool.close()
err_msg = ""
for pkg_dict in pkg_dicts:
if pkg_dict["error"]:
err_msg += pkg_dict["error"] + "\n"
else:
pkg_name = pkg_dict["pkg_name"]
pkg_ref = pkg_dict["pkg_ref"]
xmd["mbs"]["rpms"][pkg_name] = {"ref": pkg_ref}
if err_msg:
raise UnprocessableEntity(err_msg)
# Set the modified xmd back to the modulemd
mmd.set_xmd(xmd)
def merge_included_mmd(mmd, included_mmd):
"""
Merges two modulemds. This merges only metadata which are needed in
the `main` when it includes another module defined by `included_mmd`
"""
included_xmd = included_mmd.get_xmd()
if "rpms" in included_xmd["mbs"]:
xmd = mmd.get_xmd()
if "rpms" not in xmd["mbs"]:
xmd["mbs"]["rpms"] = included_xmd["mbs"]["rpms"]
else:
xmd["mbs"]["rpms"].update(included_xmd["mbs"]["rpms"])
# Set the modified xmd back to the modulemd
mmd.set_xmd(xmd)
def get_module_srpm_overrides(module):
"""
Make necessary preparations to use any provided custom SRPMs.
:param module: ModuleBuild object representing the module being submitted.
:type module: :class:`models.ModuleBuild`
:return: mapping of package names to SRPM links for all packages which
have custom SRPM overrides specified
:rtype: dict[str, str]
"""
overrides = {}
if not module.srpms:
return overrides
try:
# Make sure we can decode the custom SRPM list
srpms = json.loads(module.srpms)
assert isinstance(srpms, list)
except Exception:
raise ValueError("Invalid srpms list encountered: {}".format(module.srpms))
for source in srpms:
if source.startswith("cli-build/") and source.endswith(".src.rpm"):
# This is a custom srpm that has been uploaded to koji by rpkg
# using the package name as the basename suffixed with .src.rpm
rpm_name = os.path.basename(source)[: -len(".src.rpm")]
else:
# This should be a local custom srpm path
if not os.path.exists(source):
raise IOError("Provided srpm is missing: {}".format(source))
# Get package name from rpm headers
try:
rpm_hdr = kobo.rpmlib.get_rpm_header(source)
rpm_name = to_text_type(kobo.rpmlib.get_header_field(rpm_hdr, "name"))
except Exception:
raise ValueError("Provided srpm is invalid: {}".format(source))
if rpm_name in overrides:
log.warning(
'Encountered duplicate custom SRPM "{0}" for package {1}'
.format(source, rpm_name)
)
continue
log.debug('Using custom SRPM "{0}" for package {1}'.format(source, rpm_name))
overrides[rpm_name] = source
return overrides
def record_component_builds(
mmd, module, initial_batch=1, previous_buildorder=None, main_mmd=None
):
# Imported here to allow import of utils in GenericBuilder.
from module_build_service.builder import GenericBuilder
# When main_mmd is set, merge the metadata from this mmd to main_mmd,
# otherwise our current mmd is main_mmd.
if main_mmd:
# Check for components that are in both MMDs before merging since MBS
# currently can't handle that situation.
main_mmd_rpms = main_mmd.get_rpm_component_names()
mmd_rpms = mmd.get_rpm_component_names()
duplicate_components = [
rpm for rpm in main_mmd_rpms
if rpm in mmd_rpms
]
if duplicate_components:
error_msg = (
'The included module "{0}" in "{1}" have the following '
"conflicting components: {2}".format(
mmd.get_module_name(), main_mmd.get_module_name(),
", ".join(duplicate_components)
)
)
raise UnprocessableEntity(error_msg)
merge_included_mmd(main_mmd, mmd)
else:
main_mmd = mmd
# If the modulemd yaml specifies components, then submit them for build
rpm_components = [
mmd.get_rpm_component(name)
for name in mmd.get_rpm_component_names()
]
module_components = [
mmd.get_module_component(name)
for name in mmd.get_module_component_names()
]
all_components = list(rpm_components) + list(module_components)
if not all_components:
return
# Get map of packages that have SRPM overrides
srpm_overrides = get_module_srpm_overrides(module)
rpm_weights = GenericBuilder.get_build_weights(
[c.get_name() for c in rpm_components]
)
all_components.sort(key=lambda x: x.get_buildorder())
# We do not start with batch = 0 here, because the first batch is
# reserved for module-build-macros. First real components must be
# planned for batch 2 and following.
batch = initial_batch
for component in all_components:
# Increment the batch number when buildorder increases.
if previous_buildorder != component.get_buildorder():
previous_buildorder = component.get_buildorder()
batch += 1
# If the component is another module, we fetch its modulemd file
# and record its components recursively with the initial_batch
# set to our current batch, so the components of this module
# are built in the right global order.
if isinstance(component, Modulemd.ComponentModule):
full_url = component.get_repository() + "?#" + component.get_ref()
# It is OK to whitelist all URLs here, because the validity
# of every URL have been already checked in format_mmd(...).
included_mmd = fetch_mmd(full_url, whitelist_url=True)[0]
format_mmd(included_mmd, module.scmurl, module, db_session)
batch = record_component_builds(
included_mmd, module, batch, previous_buildorder, main_mmd)
continue
package = component.get_name()
if package in srpm_overrides:
component_ref = None
full_url = srpm_overrides[package]
log.info('Building custom SRPM "{0}"' " for package {1}".format(full_url, package))
else:
component_ref = mmd.get_xmd()["mbs"]["rpms"][package]["ref"]
full_url = component.get_repository() + "?#" + component_ref
# Skip the ComponentBuild if it already exists in database. This can happen
# in case of module build resubmition.
existing_build = models.ComponentBuild.from_component_name(db_session, package, module.id)
if existing_build:
# Check that the existing build has the same most important attributes.
# This should never be a problem, but it's good to be defensive here so
# we do not mess things during resubmition.
if (
existing_build.batch != batch
or existing_build.scmurl != full_url
or existing_build.ref != component_ref
):
raise ValidationError(
"Component build %s of module build %s (id: %d) already "
"exists in database, but its attributes are different from"
" resubmitted one." % (
component.get_name(), module.name, module.id)
)
continue
build = models.ComponentBuild(
module_id=module.id,
package=package,
format="rpms",
scmurl=full_url,
batch=batch,
ref=component_ref,
weight=rpm_weights[package],
buildonly=component.get_buildonly()
)
db_session.add(build)
return batch

View File

@@ -1,3 +0,0 @@
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: MIT
from module_build_service.utils.submit import * # noqa

File diff suppressed because it is too large Load Diff

View File

@@ -17,8 +17,9 @@ from module_build_service.common.utils import import_mmd
from module_build_service.errors import ValidationError, Forbidden, NotFound, ProgrammingError
from module_build_service.backports import jsonify
from module_build_service.monitor import registry
from module_build_service.utils.submit import (
fetch_mmd, submit_module_build_from_scm, submit_module_build_from_yaml
from module_build_service.common.submit import fetch_mmd
from module_build_service.web.submit import (
submit_module_build_from_scm, submit_module_build_from_yaml
)
from module_build_service.web.utils import (
cors_header,

View File

@@ -0,0 +1,618 @@
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: MIT
import copy
from datetime import datetime
import math
import os
import re
import time
from gi.repository import GLib
import requests
from module_build_service import conf, log, models, Modulemd
from module_build_service.common.submit import fetch_mmd
from module_build_service.common.utils import load_mmd, mmd_to_str, to_text_type
from module_build_service.errors import Conflict, Forbidden, ValidationError
from module_build_service.web.mse import generate_expanded_mmds
from module_build_service.web.utils import deps_to_dict
def validate_mmd(mmd):
"""Validate module metadata
If everything is ok, just keep quiet, otherwise error is raised for
specific problem.
:param mmd: modulemd object representing module metadata.
:type mmd: Modulemd.Module
:raises Forbidden: if metadata contains module repository but it is not
allowed.
:raise ValidationError: if the xmd has the "mbs" key set.
"""
for modname in mmd.get_module_component_names():
mod = mmd.get_module_component(modname)
if mod.get_repository() and not conf.modules_allow_repository:
raise Forbidden(
"Custom module repositories aren't allowed. "
"%r bears repository %r" % (modname, mod.get_repository())
)
name = mmd.get_module_name()
xmd = mmd.get_xmd()
if "mbs" in xmd:
if name not in conf.allowed_privileged_module_names:
raise ValidationError('The "mbs" xmd field is reserved for MBS')
allowed_keys = ["disttag_marking", "koji_tag_arches"]
for key in xmd["mbs"].keys():
if key not in allowed_keys:
raise ValidationError('The "mbs" xmd field is reserved for MBS')
if name in conf.base_module_names:
raise ValidationError(
'You cannot build a module named "{}" since it is a base module'.format(name))
def get_prefixed_version(mmd):
"""
Return the prefixed version of the module based on the buildrequired base module stream.
:param mmd: the Modulemd.ModuleStream object to format
:return: the prefixed version
:rtype: int
"""
xmd = mmd.get_xmd()
version = mmd.get_version()
base_module_stream = None
for base_module in conf.base_module_names:
try:
base_module_stream = xmd["mbs"]["buildrequires"].get(base_module, {}).get("stream")
if base_module_stream:
# Break after finding the first base module that is buildrequired
break
except KeyError:
log.warning("The module's mmd is missing information in the xmd section")
return version
else:
log.warning(
"This module does not buildrequire a base module ({0})".format(
" or ".join(conf.base_module_names)
)
)
return version
# The platform version (e.g. prefix1.2.0 => 010200)
version_prefix = models.ModuleBuild.get_stream_version(base_module_stream, right_pad=False)
if version_prefix is None:
log.warning(
'The "{0}" stream "{1}" couldn\'t be used to prefix the module\'s '
"version".format(base_module, base_module_stream)
)
return version
# Strip the stream suffix because Modulemd requires version to be an integer
new_version = int(str(int(math.floor(version_prefix))) + str(version))
if new_version > GLib.MAXUINT64:
log.warning(
'The "{0}" stream "{1}" caused the module\'s version prefix to be '
"too long".format(base_module, base_module_stream)
)
return version
return new_version
def submit_module_build_from_yaml(
db_session, username, handle, params, stream=None, skiptests=False
):
yaml_file = to_text_type(handle.read())
mmd = load_mmd(yaml_file)
dt = datetime.utcfromtimestamp(int(time.time()))
if hasattr(handle, "filename"):
def_name = str(os.path.splitext(os.path.basename(handle.filename))[0])
elif not mmd.get_module_name():
raise ValidationError(
"The module's name was not present in the modulemd file. Please use the "
'"module_name" parameter'
)
def_version = int(dt.strftime("%Y%m%d%H%M%S"))
module_name = mmd.get_module_name() or def_name
module_stream = stream or mmd.get_stream_name() or "master"
if module_name != mmd.get_module_name() or module_stream != mmd.get_stream_name():
# This is how you set the name and stream in the modulemd
mmd = mmd.copy(module_name, module_stream)
mmd.set_version(mmd.get_version() or def_version)
if skiptests:
buildopts = mmd.get_buildopts() or Modulemd.Buildopts()
macros = buildopts.get_rpm_macros() or ""
buildopts.set_rpm_macros(macros + "\n\n%__spec_check_pre exit 0\n")
mmd.set_buildopts(buildopts)
return submit_module_build(db_session, username, mmd, params)
_url_check_re = re.compile(r"^[^:/]+:.*$")
def submit_module_build_from_scm(db_session, username, params, allow_local_url=False):
url = params["scmurl"]
branch = params["branch"]
# Translate local paths into file:// URL
if allow_local_url and not _url_check_re.match(url):
log.info("'{}' is not a valid URL, assuming local path".format(url))
url = os.path.abspath(url)
url = "file://" + url
mmd, scm = fetch_mmd(url, branch, allow_local_url)
return submit_module_build(db_session, username, mmd, params)
def _apply_dep_overrides(mmd, params):
"""
Apply the dependency override parameters (if specified) on the input modulemd.
:param Modulemd.ModuleStream mmd: the modulemd to apply the overrides on
:param dict params: the API parameters passed in by the user
:raises ValidationError: if one of the overrides doesn't apply
"""
dep_overrides = {
"buildrequires": copy.copy(params.get("buildrequire_overrides", {})),
"requires": copy.copy(params.get("require_overrides", {})),
}
# Parse the module's branch to determine if it should override the stream of the buildrequired
# module defined in conf.br_stream_override_module
branch_search = None
if params.get("branch") and conf.br_stream_override_module and conf.br_stream_override_regexes:
# Only parse the branch for a buildrequire override if the user didn't manually specify an
# override for the module specified in conf.br_stream_override_module
if not dep_overrides["buildrequires"].get(conf.br_stream_override_module):
branch_search = None
for regex in conf.br_stream_override_regexes:
branch_search = re.search(regex, params["branch"])
if branch_search:
log.debug(
"The stream override regex `%s` matched the branch %s",
regex,
params["branch"],
)
break
else:
log.debug('No stream override regexes matched the branch "%s"', params["branch"])
# If a stream was parsed from the branch, then add it as a stream override for the module
# specified in conf.br_stream_override_module
if branch_search:
# Concatenate all the groups that are not None together to get the desired stream.
# This approach is taken in case there are sections to ignore.
# For instance, if we need to parse `el8.0.0` from `rhel-8.0.0`.
parsed_stream = "".join(group for group in branch_search.groups() if group)
if parsed_stream:
dep_overrides["buildrequires"][conf.br_stream_override_module] = [parsed_stream]
log.info(
'The buildrequired stream of "%s" was overriden with "%s" based on the branch "%s"',
conf.br_stream_override_module, parsed_stream, params["branch"],
)
else:
log.warning(
'The regex `%s` only matched empty capture groups on the branch "%s". The regex is '
" invalid and should be rewritten.",
regex, params["branch"],
)
unused_dep_overrides = {
"buildrequires": set(dep_overrides["buildrequires"].keys()),
"requires": set(dep_overrides["requires"].keys()),
}
deps = mmd.get_dependencies()
for dep in deps:
overridden = False
new_dep = Modulemd.Dependencies()
for dep_type, overrides in dep_overrides.items():
if dep_type == "buildrequires":
mmd_dep_type = "buildtime"
else:
mmd_dep_type = "runtime"
# Get the existing streams
reqs = deps_to_dict(dep, mmd_dep_type)
# Get the method to add a new stream for this dependency type
# (e.g. add_buildtime_stream)
add_func = getattr(new_dep, "add_{}_stream".format(mmd_dep_type))
add_empty_func = getattr(
new_dep, "set_empty_{}_dependencies_for_module".format(mmd_dep_type))
for name, streams in reqs.items():
if name in dep_overrides[dep_type]:
streams_to_add = dep_overrides[dep_type][name]
unused_dep_overrides[dep_type].remove(name)
overridden = True
else:
streams_to_add = reqs[name]
if not streams_to_add:
add_empty_func(name)
else:
for stream in streams_to_add:
add_func(name, stream)
if overridden:
# Set the overridden streams
mmd.remove_dependencies(dep)
mmd.add_dependencies(new_dep)
for dep_type in unused_dep_overrides.keys():
# If a stream override was applied from parsing the branch and it wasn't applicable,
# just ignore it
if branch_search and conf.br_stream_override_module in unused_dep_overrides[dep_type]:
unused_dep_overrides[dep_type].remove(conf.br_stream_override_module)
if unused_dep_overrides[dep_type]:
raise ValidationError(
"The {} overrides for the following modules aren't applicable: {}".format(
dep_type[:-1], ", ".join(sorted(unused_dep_overrides[dep_type])))
)
def _modify_buildtime_streams(db_session, mmd, new_streams_func):
"""
Modify buildtime streams using the input new_streams_func.
:param Modulemd.ModuleStream mmd: the modulemd to apply the overrides on
:param function new_streams: a function that takes the parameters (module_name, module_streams),
and returns the streams that should be set on the buildtime dependency.
"""
deps = mmd.get_dependencies()
for dep in deps:
overridden = False
brs = deps_to_dict(dep, "buildtime")
# There is no way to replace streams, so create a new Dependencies object that will end up
# being a copy, but with the streams replaced if a virtual stream is detected
new_dep = Modulemd.Dependencies()
for name, streams in brs.items():
new_streams = new_streams_func(db_session, name, streams)
if streams != new_streams:
overridden = True
if not new_streams:
new_dep.set_empty_buildtime_dependencies_for_module(name)
else:
for stream in new_streams:
new_dep.add_buildtime_stream(name, stream)
if overridden:
# Copy the runtime streams as is
reqs = deps_to_dict(dep, "runtime")
for name, streams in reqs.items():
if not streams:
new_dep.set_empty_runtime_dependencies_for_module(name)
else:
for stream in streams:
new_dep.add_runtime_stream(name, stream)
# Replace the old Dependencies object with the new one with the overrides
mmd.remove_dependencies(dep)
mmd.add_dependencies(new_dep)
def resolve_base_module_virtual_streams(db_session, name, streams):
"""
Resolve any base module virtual streams and return a copy of `streams` with the resolved values.
:param str name: the module name
:param str streams: the streams to resolve
:return: the resolved streams
:rtype: list
"""
from module_build_service.resolver import GenericResolver
resolver = GenericResolver.create(db_session, conf)
if name not in conf.base_module_names:
return streams
new_streams = copy.deepcopy(streams)
for i, stream in enumerate(streams):
# Ignore streams that start with a minus sign, since those are handled in the
# MSE code
if stream.startswith("-"):
continue
# Check if the base module stream is available
log.debug('Checking to see if the base module "%s:%s" is available', name, stream)
if resolver.get_module_count(name=name, stream=stream) > 0:
continue
# If the base module stream is not available, check if there's a virtual stream
log.debug(
'Checking to see if there is a base module "%s" with the virtual stream "%s"',
name, stream,
)
base_module_mmd = resolver.get_latest_with_virtual_stream(
name=name, virtual_stream=stream
)
if not base_module_mmd:
# If there isn't this base module stream or virtual stream available, skip it,
# and let the dep solving code deal with it like it normally would
log.warning(
'There is no base module "%s" with stream/virtual stream "%s"',
name, stream,
)
continue
latest_stream = base_module_mmd.get_stream_name()
log.info(
'Replacing the buildrequire "%s:%s" with "%s:%s", since "%s" is a virtual '
"stream",
name, stream, name, latest_stream, stream
)
new_streams[i] = latest_stream
return new_streams
def _process_support_streams(db_session, mmd, params):
"""
Check if any buildrequired base modules require a support stream suffix.
This checks the Red Hat Product Pages to see if the buildrequired base module stream has been
released, if yes, then add the appropriate stream suffix.
:param Modulemd.ModuleStream mmd: the modulemd to apply the overrides on
:param dict params: the API parameters passed in by the user
"""
config_msg = (
'Skipping the release date checks for adding a stream suffix since "%s" '
"is not configured"
)
if not conf.product_pages_url:
log.debug(config_msg, "product_pages_url")
return
elif not conf.product_pages_module_streams:
log.debug(config_msg, "product_pages_module_streams")
return
buildrequire_overrides = params.get("buildrequire_overrides", {})
def new_streams_func(db_session, name, streams):
if name not in conf.base_module_names:
log.debug("The module %s is not a base module. Skipping the release date check.", name)
return streams
elif name in buildrequire_overrides:
log.debug(
"The module %s is a buildrequire override. Skipping the release date check.", name)
return streams
new_streams = copy.deepcopy(streams)
for i, stream in enumerate(streams):
for regex, values in conf.product_pages_module_streams.items():
if re.match(regex, stream):
log.debug(
'The regex `%s` from the configuration "product_pages_module_streams" '
"matched the stream %s",
regex, stream,
)
stream_suffix, pp_release_template, pp_major_release_template = values
break
else:
log.debug(
'No regexes in the configuration "product_pages_module_streams" matched the '
"stream %s. Skipping the release date check for this stream.",
stream,
)
continue
if stream.endswith(stream_suffix):
log.debug(
'The stream %s already contains the stream suffix of "%s". Skipping the '
"release date check.",
stream, stream_suffix
)
continue
stream_version = models.ModuleBuild.get_stream_version(stream)
if not stream_version:
log.debug("A stream version couldn't be parsed from %s", stream)
continue
# Convert the stream_version float to an int to make the math below deal with only
# integers
stream_version_int = int(stream_version)
# For example 80000 => 8
x = stream_version_int // 10000
# For example 80100 => 1
y = (stream_version_int - x * 10000) // 100
# For example 80104 => 4
z = stream_version_int - x * 10000 - y * 100
# Check if the stream version is x.0.0
if stream_version_int % 10000 == 0 and pp_major_release_template:
# For example, el8.0.0 => rhel-8-0
pp_release = pp_major_release_template.format(x=x, y=y, z=z)
else:
# For example el8.0.1 => rhel-8-0.1
pp_release = pp_release_template.format(x=x, y=y, z=z)
url = "{}/api/v7/releases/{}/?fields=ga_date".format(
conf.product_pages_url.rstrip("/"), pp_release)
try:
pp_rv = requests.get(url, timeout=15)
pp_json = pp_rv.json()
# Catch requests failures and JSON parsing errors
except (requests.exceptions.RequestException, ValueError):
log.exception(
"The query to the Product Pages at %s failed. Assuming it is not yet released.",
url,
)
continue
ga_date = pp_json.get("ga_date")
if not ga_date:
log.debug("A release date for the release %s could not be determined", pp_release)
continue
if datetime.strptime(ga_date, "%Y-%m-%d").date() >= datetime.utcnow().date():
log.debug(
"The release %s hasn't been released yet. Not adding a stream suffix.",
ga_date
)
continue
new_stream = stream + stream_suffix
log.info(
'Replacing the buildrequire "%s:%s" with "%s:%s", since the stream is released',
name, stream, name, new_stream
)
new_streams[i] = new_stream
return new_streams
_modify_buildtime_streams(db_session, mmd, new_streams_func)
def submit_module_build(db_session, username, mmd, params):
"""
Submits new module build.
:param db_session: SQLAlchemy session object.
:param str username: Username of the build's owner.
:param Modulemd.ModuleStream mmd: Modulemd defining the build.
:param dict params: the API parameters passed in by the user
:rtype: list with ModuleBuild
:return: List with submitted module builds.
"""
log.debug(
"Submitted %s module build for %s:%s:%s",
("scratch" if params.get("scratch", False) else "normal"),
mmd.get_module_name(),
mmd.get_stream_name(),
mmd.get_version(),
)
validate_mmd(mmd)
raise_if_stream_ambigous = False
default_streams = {}
# For local builds, we want the user to choose the exact stream using the default_streams
# in case there are multiple streams to choose from and raise an exception otherwise.
if "local_build" in params:
raise_if_stream_ambigous = True
# Get the default_streams if set.
if "default_streams" in params:
default_streams = params["default_streams"]
_apply_dep_overrides(mmd, params)
_modify_buildtime_streams(db_session, mmd, resolve_base_module_virtual_streams)
_process_support_streams(db_session, mmd, params)
mmds = generate_expanded_mmds(db_session, mmd, raise_if_stream_ambigous, default_streams)
if not mmds:
raise ValidationError(
"No dependency combination was satisfied. Please verify the "
"buildrequires in your modulemd have previously been built."
)
modules = []
# True if all module builds are skipped so MBS will actually not rebuild
# anything. To keep the backward compatibility, we need to raise an exception
# later in the end of this method.
all_modules_skipped = True
for mmd in mmds:
# Prefix the version of the modulemd based on the base module it buildrequires
version = get_prefixed_version(mmd)
mmd.set_version(version)
nsvc = mmd.get_nsvc()
log.debug("Checking whether module build already exists: %s.", nsvc)
module = models.ModuleBuild.get_build_from_nsvc(db_session, *nsvc.split(":"))
if module and not params.get("scratch", False):
if module.state != models.BUILD_STATES["failed"]:
log.info(
"Skipping rebuild of %s, only rebuild of modules in failed state is allowed.",
nsvc,
)
modules.append(module)
continue
rebuild_strategy = params.get("rebuild_strategy")
if rebuild_strategy and module.rebuild_strategy != rebuild_strategy:
raise ValidationError(
'You cannot change the module\'s "rebuild_strategy" when '
"resuming a module build"
)
log.debug("Resuming existing module build %r" % module)
# Reset all component builds that didn't complete
for component in module.component_builds:
if not component.is_waiting_for_build and not component.is_completed:
component.state = None
component.state_reason = None
db_session.add(component)
module.username = username
prev_state = module.previous_non_failed_state
if prev_state == models.BUILD_STATES["init"]:
transition_to = models.BUILD_STATES["init"]
else:
transition_to = models.BUILD_STATES["wait"]
module.batch = 0
module.transition(db_session, conf, transition_to, "Resubmitted by %s" % username)
log.info("Resumed existing module build in previous state %s" % module.state)
else:
# make NSVC unique for every scratch build
context_suffix = ""
if params.get("scratch", False):
log.debug("Checking for existing scratch module builds by NSVC")
scrmods = models.ModuleBuild.get_scratch_builds_from_nsvc(
db_session, *nsvc.split(":"))
scrmod_contexts = [scrmod.context for scrmod in scrmods]
log.debug(
"Found %d previous scratch module build context(s): %s",
len(scrmods), ",".join(scrmod_contexts),
)
# append incrementing counter to context
context_suffix = "_" + str(len(scrmods) + 1)
mmd.set_context(mmd.get_context() + context_suffix)
else:
# In case the branch is defined, check whether user is allowed to submit
# non-scratch build from this branch. Note that the branch is always defined
# for official builds from SCM, because it is requested in views.py.
branch = params.get("branch")
if branch:
for regex in conf.scratch_build_only_branches:
branch_search = re.search(regex, branch)
if branch_search:
raise ValidationError(
"Only scratch module builds can be built from this branch."
)
log.debug("Creating new module build")
module = models.ModuleBuild.create(
db_session,
conf,
name=mmd.get_module_name(),
stream=mmd.get_stream_name(),
version=str(mmd.get_version()),
modulemd=mmd_to_str(mmd),
scmurl=params.get("scmurl"),
username=username,
rebuild_strategy=params.get("rebuild_strategy"),
reused_module_id=params.get("reuse_components_from"),
scratch=params.get("scratch"),
srpms=params.get("srpms"),
)
module.build_context, module.runtime_context, module.context, \
module.build_context_no_bms = module.contexts_from_mmd(module.modulemd)
module.context += context_suffix
all_modules_skipped = False
db_session.add(module)
db_session.commit()
modules.append(module)
log.info('The user "%s" submitted the build "%s"', username, nsvc)
if all_modules_skipped:
err_msg = (
"Module (state=%s) already exists. Only a new build, resubmission of "
"a failed build or build against new buildrequirements is "
"allowed." % module.state
)
log.error(err_msg)
raise Conflict(err_msg)
return modules

View File

@@ -18,7 +18,6 @@ from module_build_service.common.utils import load_mmd, import_mmd
import module_build_service.messaging
import module_build_service.scheduler.consumer
import module_build_service.scheduler.handlers.repos
import module_build_service.utils
from module_build_service.errors import Forbidden
from module_build_service import app, models, conf, build_logs, log
from module_build_service.db_session import db_session
@@ -37,6 +36,7 @@ import json
import itertools
from module_build_service.builder import GenericBuilder
from module_build_service.builder.MockModuleBuilder import load_local_builds
from module_build_service.builder.utils import validate_koji_tag
from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder
from tests import clean_database, read_staged_data, staged_data_filename
@@ -672,7 +672,7 @@ class TestBuild(BaseTestBuild):
new_callable=PropertyMock,
return_value=True,
)
@patch("module_build_service.utils.submit._is_eol_in_pdc", return_value=True)
@patch("module_build_service.common.submit._is_eol_in_pdc", return_value=True)
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_eol_module(
@@ -1906,7 +1906,7 @@ class TestLocalBuild(BaseTestBuild):
"""
Tests local module build dependency.
"""
module_build_service.utils.load_local_builds(["platform"])
load_local_builds(["platform"])
FakeSCM(
mocked_scm,
"testmodule",

View File

@@ -5,18 +5,23 @@ import tempfile
import shutil
from textwrap import dedent
import mock
import kobo.rpmlib
import koji
import mock
import pytest
from module_build_service import conf, models
from module_build_service.common.utils import load_mmd, mmd_to_str
from module_build_service.db_session import db_session
from module_build_service.models import ModuleBuild, ComponentBuild
from module_build_service.builder.MockModuleBuilder import (
import_fake_base_module, import_builds_from_local_dnf_repos, MockModuleBuilder,
import_fake_base_module,
import_builds_from_local_dnf_repos,
load_local_builds,
MockModuleBuilder,
)
from tests import clean_database, make_module_in_db, read_staged_data
from module_build_service.scheduler import events
from tests import clean_database, make_module_in_db, read_staged_data, staged_data_filename
class TestMockModuleBuilder:
@@ -300,3 +305,73 @@ class TestOfflineLocalBuilds:
module_build = models.ModuleBuild.get_build_from_nsvc(
db_session, "platform", "y", 1, "000000")
assert module_build
@mock.patch(
"module_build_service.config.Config.mock_resultsdir",
new_callable=mock.PropertyMock,
return_value=staged_data_filename("local_builds")
)
@mock.patch(
"module_build_service.config.Config.system", new_callable=mock.PropertyMock, return_value="mock"
)
class TestLocalBuilds:
def setup_method(self):
clean_database()
events.scheduler.reset()
def teardown_method(self):
clean_database()
events.scheduler.reset()
def test_load_local_builds_name(self, conf_system, conf_resultsdir):
load_local_builds("testmodule")
local_modules = models.ModuleBuild.local_modules(db_session)
assert len(local_modules) == 1
assert local_modules[0].koji_tag.endswith(
"/module-testmodule-master-20170816080816/results")
def test_load_local_builds_name_stream(self, conf_system, conf_resultsdir):
load_local_builds("testmodule:master")
local_modules = models.ModuleBuild.local_modules(db_session)
assert len(local_modules) == 1
assert local_modules[0].koji_tag.endswith(
"/module-testmodule-master-20170816080816/results")
def test_load_local_builds_name_stream_non_existing(
self, conf_system, conf_resultsdir
):
with pytest.raises(RuntimeError):
load_local_builds("testmodule:x")
models.ModuleBuild.local_modules(db_session)
def test_load_local_builds_name_stream_version(self, conf_system, conf_resultsdir):
load_local_builds("testmodule:master:20170816080815")
local_modules = models.ModuleBuild.local_modules(db_session)
assert len(local_modules) == 1
assert local_modules[0].koji_tag.endswith(
"/module-testmodule-master-20170816080815/results")
def test_load_local_builds_name_stream_version_non_existing(
self, conf_system, conf_resultsdir
):
with pytest.raises(RuntimeError):
load_local_builds("testmodule:master:123")
models.ModuleBuild.local_modules(db_session)
def test_load_local_builds_platform(self, conf_system, conf_resultsdir):
load_local_builds("platform")
local_modules = models.ModuleBuild.local_modules(db_session)
assert len(local_modules) == 1
assert local_modules[0].koji_tag.endswith("/module-platform-f28-3/results")
def test_load_local_builds_platform_f28(self, conf_system, conf_resultsdir):
load_local_builds("platform:f28")
local_modules = models.ModuleBuild.local_modules(db_session)
assert len(local_modules) == 1
assert local_modules[0].koji_tag.endswith("/module-platform-f28-3/results")

View File

@@ -0,0 +1,32 @@
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: MIT
import mock
from module_build_service.common.submit import _is_eol_in_pdc
@mock.patch("module_build_service.common.submit.requests")
def test_pdc_eol_check(requests):
""" Push mock pdc responses through the eol check function. """
response = mock.Mock()
response.json.return_value = {
"results": [{
"id": 347907,
"global_component": "mariadb",
"name": "10.1",
"slas": [{"id": 694207, "sla": "security_fixes", "eol": "2019-12-01"}],
"type": "module",
"active": True,
"critical_path": False,
}]
}
requests.get.return_value = response
is_eol = _is_eol_in_pdc("mariadb", "10.1")
assert not is_eol
response.json.return_value["results"][0]["active"] = False
is_eol = _is_eol_in_pdc("mariadb", "10.1")
assert is_eol

View File

@@ -7,11 +7,12 @@ from mock import patch, PropertyMock
import pytest
import module_build_service.resolver as mbs_resolver
from module_build_service import conf, models, utils, Modulemd
from module_build_service import conf, models, Modulemd
from module_build_service.common.utils import import_mmd, load_mmd, mmd_to_str
from module_build_service.models import ModuleBuild
from module_build_service.errors import UnprocessableEntity
from module_build_service.db_session import db_session
from module_build_service.builder.MockModuleBuilder import load_local_builds
import tests
@@ -139,7 +140,7 @@ class TestDBModule:
"""
Tests that it returns the requires of the buildrequires recursively
"""
utils.load_local_builds(["platform", "parent", "child", "testmodule"])
load_local_builds(["platform", "parent", "child", "testmodule"])
build = models.ModuleBuild.local_modules(db_session, "child", "master")
resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="db")
@@ -260,7 +261,7 @@ class TestDBModule:
"""
Test that profiles get resolved recursively on local builds
"""
utils.load_local_builds(["platform"])
load_local_builds(["platform"])
mmd = models.ModuleBuild.get_by_id(db_session, 2).mmd()
resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="mbs")
result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))

View File

@@ -3,9 +3,9 @@
from mock import patch, PropertyMock, Mock, call
from module_build_service import app, conf
from module_build_service.builder.MockModuleBuilder import load_local_builds
from module_build_service.common.utils import load_mmd, mmd_to_str
import module_build_service.resolver as mbs_resolver
import module_build_service.utils
from module_build_service.db_session import db_session
import module_build_service.models
import tests
@@ -328,7 +328,7 @@ class TestMBSModule:
self, local_builds, conf_system, formatted_testmodule_mmd
):
tests.clean_database()
module_build_service.utils.load_local_builds(["platform"])
load_local_builds(["platform"])
resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="mbs")
result = resolver.resolve_profiles(
@@ -450,7 +450,7 @@ class TestMBSModule:
):
tests.clean_database()
with app.app_context():
module_build_service.utils.load_local_builds(["testmodule"])
load_local_builds(["testmodule"])
resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="mbs")
result = resolver.get_buildrequired_modulemds(

View File

@@ -42,7 +42,7 @@ class TestModuleInit:
@patch(
"module_build_service.scheduler.handlers.modules.handle_collisions_with_base_module_rpms"
)
@patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])
@patch("module_build_service.scheduler.submit.get_build_arches", return_value=["x86_64"])
def init_basic(self, db_session, get_build_arches, hcwbmr, rscm, mocked_scm, built_rpms):
FakeSCM(
mocked_scm,
@@ -102,7 +102,7 @@ class TestModuleInit:
assert len(mmd_to_str(old_mmd)) == len(mmd_to_str(new_mmd))
@patch("module_build_service.scm.SCM")
@patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])
@patch("module_build_service.scheduler.submit.get_build_arches", return_value=["x86_64"])
def test_init_scm_not_available(self, get_build_arches, mocked_scm):
FakeSCM(
mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4",
@@ -123,7 +123,7 @@ class TestModuleInit:
return_value=True,
)
@patch("module_build_service.scm.SCM")
@patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])
@patch("module_build_service.scheduler.submit.get_build_arches", return_value=["x86_64"])
def test_init_includedmodule(
self, get_build_arches, mocked_scm, mocked_mod_allow_repo
):
@@ -159,7 +159,7 @@ class TestModuleInit:
@patch("module_build_service.models.ModuleBuild.from_module_event")
@patch("module_build_service.scm.SCM")
@patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])
@patch("module_build_service.scheduler.submit.get_build_arches", return_value=["x86_64"])
def test_init_when_get_latest_raises(
self, get_build_arches, mocked_scm, mocked_from_module_event):
FakeSCM(

View File

@@ -1,124 +1,44 @@
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: MIT
import io
import tempfile
import hashlib
from os import path, mkdir
from shutil import copyfile, rmtree
from datetime import datetime
from werkzeug.datastructures import FileStorage
from mock import patch
import mock
import pytest
from module_build_service.common.utils import load_mmd, load_mmd_file, mmd_to_str
import module_build_service.utils
from module_build_service.db_session import db_session
import module_build_service.scm
from module_build_service import app, models, conf
from module_build_service.errors import ValidationError, UnprocessableEntity
from module_build_service.utils.submit import format_mmd
from module_build_service.errors import UnprocessableEntity
import module_build_service.scheduler.handlers.components
from module_build_service.scheduler.submit import (
get_build_arches, format_mmd, record_component_builds, record_module_build_arches
)
from tests import (
clean_database,
init_data,
read_staged_data,
staged_data_filename,
scheduler_init_data,
make_module_in_db,
make_module,
read_staged_data, staged_data_filename)
import mock
import pytest
import module_build_service.scheduler.handlers.components
from module_build_service.db_session import db_session
from module_build_service.scheduler import events
BASE_DIR = path.abspath(path.dirname(__file__))
)
class FakeSCM(object):
def __init__(self, mocked_scm, name, mmd_filename, commit=None):
self.mocked_scm = mocked_scm
self.name = name
self.commit = commit
self.mmd_filename = mmd_filename
self.sourcedir = None
self.mocked_scm.return_value.checkout = self.checkout
self.mocked_scm.return_value.name = self.name
self.mocked_scm.return_value.branch = "master"
self.mocked_scm.return_value.get_latest = self.get_latest
self.mocked_scm.return_value.commit = self.commit
self.mocked_scm.return_value.repository_root = "https://src.stg.fedoraproject.org/modules/"
self.mocked_scm.return_value.sourcedir = self.sourcedir
self.mocked_scm.return_value.get_module_yaml = self.get_module_yaml
self.mocked_scm.return_value.is_full_commit_hash.return_value = commit and len(commit) == 40
self.mocked_scm.return_value.get_full_commit_hash.return_value = self.get_full_commit_hash
def checkout(self, temp_dir):
self.sourcedir = path.join(temp_dir, self.name)
mkdir(self.sourcedir)
copyfile(staged_data_filename(self.mmd_filename), self.get_module_yaml())
return self.sourcedir
def get_latest(self, ref="master"):
return self.commit if self.commit else ref
def get_module_yaml(self):
return path.join(self.sourcedir, self.name + ".yaml")
def get_full_commit_hash(self, commit_hash=None):
if not commit_hash:
commit_hash = self.commit
sha1_hash = hashlib.sha1("random").hexdigest()
return commit_hash + sha1_hash[len(commit_hash):]
class TestUtils:
class TestSubmit:
def setup_method(self, test_method):
clean_database()
def teardown_method(self, test_method):
clean_database()
@patch("module_build_service.utils.submit.submit_module_build")
def test_submit_module_build_from_yaml_with_skiptests(self, mock_submit):
"""
Tests local module build from a yaml file with the skiptests option
Args:
mock_submit (MagickMock): mocked function submit_module_build, which we then
inspect if it was called with correct arguments
"""
module_dir = tempfile.mkdtemp()
module = models.ModuleBuild.get_by_id(db_session, 3)
mmd = module.mmd()
modulemd_yaml = mmd_to_str(mmd)
modulemd_file_path = path.join(module_dir, "testmodule.yaml")
username = "test"
stream = "dev"
with io.open(modulemd_file_path, "w", encoding="utf-8") as fd:
fd.write(modulemd_yaml)
with open(modulemd_file_path, "rb") as fd:
handle = FileStorage(fd)
module_build_service.utils.submit_module_build_from_yaml(
db_session, username, handle, {}, stream=stream, skiptests=True)
mock_submit_args = mock_submit.call_args[0]
username_arg = mock_submit_args[1]
mmd_arg = mock_submit_args[2]
assert mmd_arg.get_stream_name() == stream
assert "\n\n%__spec_check_pre exit 0\n" in mmd_arg.get_buildopts().get_rpm_macros()
assert username_arg == username
rmtree(module_dir)
@patch("koji.ClientSession")
@mock.patch("koji.ClientSession")
def test_get_build_arches(self, ClientSession):
session = ClientSession.return_value
session.getTag.return_value = {"arches": "ppc64le"}
mmd = load_mmd(read_staged_data("formatted_testmodule"))
r = module_build_service.utils.get_build_arches(mmd, conf)
r = get_build_arches(mmd, conf)
assert r == ["ppc64le"]
@patch("koji.ClientSession")
@mock.patch("koji.ClientSession")
def test_get_build_arches_no_arch_set(self, ClientSession):
"""
When no architecture is set in Koji tag, fallback to conf.arches.
@@ -126,10 +46,10 @@ class TestUtils:
session = ClientSession.return_value
session.getTag.return_value = {"arches": ""}
mmd = load_mmd(read_staged_data("formatted_testmodule"))
r = module_build_service.utils.get_build_arches(mmd, conf)
r = get_build_arches(mmd, conf)
assert set(r) == set(conf.arches)
@patch(
@mock.patch(
"module_build_service.config.Config.allowed_privileged_module_names",
new_callable=mock.PropertyMock,
return_value=["testmodule"],
@@ -140,10 +60,10 @@ class TestUtils:
xmd["mbs"]["koji_tag_arches"] = ["ppc64", "ppc64le"]
mmd.set_xmd(xmd)
r = module_build_service.utils.get_build_arches(mmd, conf)
r = get_build_arches(mmd, conf)
assert r == ["ppc64", "ppc64le"]
@patch.object(conf, "base_module_arches", new={"platform:xx": ["x86_64", "i686"]})
@mock.patch.object(conf, "base_module_arches", new={"platform:xx": ["x86_64", "i686"]})
def test_get_build_arches_base_module_override(self):
mmd = load_mmd(read_staged_data("formatted_testmodule"))
xmd = mmd.get_xmd()
@@ -152,16 +72,16 @@ class TestUtils:
xmd["mbs"] = mbs_options
mmd.set_xmd(xmd)
r = module_build_service.utils.get_build_arches(mmd, conf)
r = get_build_arches(mmd, conf)
assert r == ["x86_64", "i686"]
@patch("module_build_service.utils.submit.get_build_arches")
@mock.patch("module_build_service.scheduler.submit.get_build_arches")
def test_record_module_build_arches(self, get_build_arches):
get_build_arches.return_value = ["x86_64", "i686"]
scheduler_init_data(1)
build = models.ModuleBuild.get_by_id(db_session, 2)
build.arches = []
module_build_service.utils.record_module_build_arches(build.mmd(), build)
record_module_build_arches(build.mmd(), build)
arches = {arch.name for arch in build.arches}
assert arches == set(get_build_arches.return_value)
@@ -176,7 +96,7 @@ class TestUtils:
None,
],
)
@patch("module_build_service.scm.SCM")
@mock.patch("module_build_service.scm.SCM")
def test_format_mmd(self, mocked_scm, scmurl):
mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4"
# For all the RPMs in testmodule, get_latest is called
@@ -198,7 +118,7 @@ class TestUtils:
# Modify the component branches so we can identify them later on
mmd.get_rpm_component("perl-Tangerine").set_ref("f28")
mmd.get_rpm_component("tangerine").set_ref("f27")
module_build_service.utils.format_mmd(mmd, scmurl)
format_mmd(mmd, scmurl)
# Make sure that original refs are not changed.
mmd_pkg_refs = [
@@ -226,7 +146,7 @@ class TestUtils:
mmd_xmd = mmd.get_xmd()
assert mmd_xmd == xmd
@patch("module_build_service.scm.SCM")
@mock.patch("module_build_service.scm.SCM")
def test_record_component_builds_duplicate_components(self, mocked_scm):
# Mock for format_mmd to get components' latest ref
mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4"
@@ -265,12 +185,11 @@ class TestUtils:
)
format_mmd(mmd, module_build.scmurl)
with pytest.raises(UnprocessableEntity) as e:
module_build_service.utils.record_component_builds(
mmd, module_build, main_mmd=module_build.mmd())
record_component_builds(mmd, module_build, main_mmd=module_build.mmd())
assert str(e.value) == error_msg
@patch("module_build_service.scm.SCM")
@mock.patch("module_build_service.scm.SCM")
def test_record_component_builds_set_weight(self, mocked_scm):
# Mock for format_mmd to get components' latest ref
mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4"
@@ -302,7 +221,7 @@ class TestUtils:
db_session.commit()
format_mmd(mmd, module_build.scmurl)
module_build_service.utils.record_component_builds(mmd, module_build)
record_component_builds(mmd, module_build)
db_session.commit()
assert module_build.state == models.BUILD_STATES["init"]
@@ -310,7 +229,7 @@ class TestUtils:
for c in module_build.component_builds:
assert c.weight == 1.5
@patch("module_build_service.scm.SCM")
@mock.patch("module_build_service.scm.SCM")
def test_record_component_builds_component_exists_already(self, mocked_scm):
mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4"
mocked_scm.return_value.get_latest.side_effect = [
@@ -347,7 +266,7 @@ class TestUtils:
db_session.commit()
format_mmd(mmd, module_build.scmurl)
module_build_service.utils.record_component_builds(mmd, module_build)
record_component_builds(mmd, module_build)
db_session.commit()
mmd = original_mmd.copy("testmodule", "master")
@@ -357,9 +276,9 @@ class TestUtils:
ValidationError,
match=r"Component build .+ of module build .+ already exists in database"):
format_mmd(mmd, module_build.scmurl)
module_build_service.utils.record_component_builds(mmd, module_build)
record_component_builds(mmd, module_build)
@patch("module_build_service.scm.SCM")
@mock.patch("module_build_service.scm.SCM")
def test_format_mmd_arches(self, mocked_scm):
with app.app_context():
clean_database()
@@ -377,7 +296,7 @@ class TestUtils:
test_archs = ["powerpc", "i486"]
mmd1 = load_mmd_file(testmodule_mmd_path)
module_build_service.utils.format_mmd(mmd1, None)
format_mmd(mmd1, None)
for pkg_name in mmd1.get_rpm_component_names():
pkg = mmd1.get_rpm_component(pkg_name)
@@ -391,14 +310,14 @@ class TestUtils:
for arch in test_archs:
pkg.add_restricted_arch(arch)
module_build_service.utils.format_mmd(mmd2, None)
format_mmd(mmd2, None)
for pkg_name in mmd2.get_rpm_component_names():
pkg = mmd2.get_rpm_component(pkg_name)
assert set(pkg.get_arches()) == set(test_archs)
@patch("module_build_service.scm.SCM")
@patch("module_build_service.utils.submit.ThreadPool")
@mock.patch("module_build_service.scm.SCM")
@mock.patch("module_build_service.scheduler.submit.ThreadPool")
def test_format_mmd_update_time_modified(self, tp, mocked_scm):
init_data()
build = models.ModuleBuild.get_by_id(db_session, 2)
@@ -411,170 +330,8 @@ class TestUtils:
mmd = load_mmd(read_staged_data("testmodule"))
with patch("module_build_service.utils.submit.datetime") as dt:
with mock.patch("module_build_service.scheduler.submit.datetime") as dt:
dt.utcnow.return_value = test_datetime
module_build_service.utils.format_mmd(mmd, None, build, db_session)
format_mmd(mmd, None, build, db_session)
assert build.time_modified == test_datetime
@patch("module_build_service.utils.submit.requests")
def test_pdc_eol_check(self, requests):
""" Push mock pdc responses through the eol check function. """
response = mock.Mock()
response.json.return_value = {
"results": [{
"id": 347907,
"global_component": "mariadb",
"name": "10.1",
"slas": [{"id": 694207, "sla": "security_fixes", "eol": "2019-12-01"}],
"type": "module",
"active": True,
"critical_path": False,
}]
}
requests.get.return_value = response
is_eol = module_build_service.utils.submit._is_eol_in_pdc("mariadb", "10.1")
assert not is_eol
response.json.return_value["results"][0]["active"] = False
is_eol = module_build_service.utils.submit._is_eol_in_pdc("mariadb", "10.1")
assert is_eol
def test_get_prefixed_version_f28(self):
scheduler_init_data(1)
build_one = models.ModuleBuild.get_by_id(db_session, 2)
v = module_build_service.utils.submit.get_prefixed_version(build_one.mmd())
assert v == 2820180205135154
def test_get_prefixed_version_fl701(self):
scheduler_init_data(1)
build_one = models.ModuleBuild.get_by_id(db_session, 2)
mmd = build_one.mmd()
xmd = mmd.get_xmd()
xmd["mbs"]["buildrequires"]["platform"]["stream"] = "fl7.0.1-beta"
mmd.set_xmd(xmd)
v = module_build_service.utils.submit.get_prefixed_version(mmd)
assert v == 7000120180205135154
@patch("module_build_service.utils.submit.generate_expanded_mmds")
def test_submit_build_new_mse_build(self, generate_expanded_mmds):
"""
Tests that finished build can be resubmitted in case the resubmitted
build adds new MSE build (it means there are new expanded
buildrequires).
"""
build = make_module_in_db("foo:stream:0:c1")
assert build.state == models.BUILD_STATES["ready"]
mmd1 = build.mmd()
mmd2 = build.mmd()
mmd2.set_context("c2")
generate_expanded_mmds.return_value = [mmd1, mmd2]
# Create a copy of mmd1 without xmd.mbs, since that will cause validate_mmd to fail
mmd1_copy = mmd1.copy()
mmd1_copy.set_xmd({})
builds = module_build_service.utils.submit_module_build(db_session, "foo", mmd1_copy, {})
ret = {b.mmd().get_context(): b.state for b in builds}
assert ret == {"c1": models.BUILD_STATES["ready"], "c2": models.BUILD_STATES["init"]}
assert builds[0].siblings(db_session) == [builds[1].id]
assert builds[1].siblings(db_session) == [builds[0].id]
@patch("module_build_service.utils.submit.generate_expanded_mmds")
@patch(
"module_build_service.config.Config.scratch_build_only_branches",
new_callable=mock.PropertyMock,
return_value=["^private-.*"],
)
def test_submit_build_scratch_build_only_branches(self, cfg, generate_expanded_mmds):
"""
Tests the "scratch_build_only_branches" config option.
"""
mmd = make_module("foo:stream:0:c1")
generate_expanded_mmds.return_value = [mmd]
# Create a copy of mmd1 without xmd.mbs, since that will cause validate_mmd to fail
mmd_copy = mmd.copy()
mmd_copy.set_xmd({})
with pytest.raises(ValidationError,
match="Only scratch module builds can be built from this branch."):
module_build_service.utils.submit_module_build(
db_session, "foo", mmd_copy, {"branch": "private-foo"})
module_build_service.utils.submit_module_build(
db_session, "foo", mmd_copy, {"branch": "otherbranch"})
@patch(
"module_build_service.config.Config.mock_resultsdir",
new_callable=mock.PropertyMock,
return_value=staged_data_filename("local_builds")
)
@patch(
"module_build_service.config.Config.system", new_callable=mock.PropertyMock, return_value="mock"
)
class TestLocalBuilds:
def setup_method(self):
clean_database()
events.scheduler.reset()
def teardown_method(self):
clean_database()
events.scheduler.reset()
def test_load_local_builds_name(self, conf_system, conf_resultsdir):
module_build_service.utils.load_local_builds("testmodule")
local_modules = models.ModuleBuild.local_modules(db_session)
assert len(local_modules) == 1
assert local_modules[0].koji_tag.endswith(
"/module-testmodule-master-20170816080816/results")
def test_load_local_builds_name_stream(self, conf_system, conf_resultsdir):
module_build_service.utils.load_local_builds("testmodule:master")
local_modules = models.ModuleBuild.local_modules(db_session)
assert len(local_modules) == 1
assert local_modules[0].koji_tag.endswith(
"/module-testmodule-master-20170816080816/results")
def test_load_local_builds_name_stream_non_existing(
self, conf_system, conf_resultsdir
):
with pytest.raises(RuntimeError):
module_build_service.utils.load_local_builds("testmodule:x")
models.ModuleBuild.local_modules(db_session)
def test_load_local_builds_name_stream_version(self, conf_system, conf_resultsdir):
module_build_service.utils.load_local_builds("testmodule:master:20170816080815")
local_modules = models.ModuleBuild.local_modules(db_session)
assert len(local_modules) == 1
assert local_modules[0].koji_tag.endswith(
"/module-testmodule-master-20170816080815/results")
def test_load_local_builds_name_stream_version_non_existing(
self, conf_system, conf_resultsdir
):
with pytest.raises(RuntimeError):
module_build_service.utils.load_local_builds("testmodule:master:123")
models.ModuleBuild.local_modules(db_session)
def test_load_local_builds_platform(self, conf_system, conf_resultsdir):
module_build_service.utils.load_local_builds("platform")
local_modules = models.ModuleBuild.local_modules(db_session)
assert len(local_modules) == 1
assert local_modules[0].koji_tag.endswith("/module-platform-f28-3/results")
def test_load_local_builds_platform_f28(self, conf_system, conf_resultsdir):
module_build_service.utils.load_local_builds("platform:f28")
local_modules = models.ModuleBuild.local_modules(db_session)
assert len(local_modules) == 1
assert local_modules[0].koji_tag.endswith("/module-platform-f28-3/results")

View File

@@ -34,7 +34,7 @@ from module_build_service.errors import UnprocessableEntity
from module_build_service.models import ModuleBuild, BUILD_STATES, ComponentBuild
from module_build_service import app, version
import module_build_service.config as mbs_config
import module_build_service.utils.submit
import module_build_service.web.submit
user = ("Homer J. Simpson", {"packager"})
@@ -973,7 +973,7 @@ class TestViews:
assert data["state_trace"][0]["state"] == 0
assert data["tasks"] == {}
assert data["siblings"] == []
module_build_service.utils.load_mmd(data["modulemd"])
load_mmd(data["modulemd"])
# Make sure the buildrequires entry was created
module = ModuleBuild.get_by_id(db_session, 8)
@@ -1581,7 +1581,7 @@ class TestViews:
rv = self.client.post(post_url, data=json.dumps(json_input))
data = json.loads(rv.data)
mmd = module_build_service.utils.load_mmd(data[0]["modulemd"])
mmd = load_mmd(data[0]["modulemd"])
assert len(mmd.get_dependencies()) == 1
dep = mmd.get_dependencies()[0]
assert set(dep.get_buildtime_streams("platform")) == expected_br
@@ -2120,7 +2120,7 @@ class TestViews:
assert data["state_trace"][0]["state"] == 0
assert data["tasks"] == {}
assert data["siblings"] == []
module_build_service.utils.load_mmd(data["modulemd"])
load_mmd(data["modulemd"])
# Make sure the buildrequires entry was created
module = ModuleBuild.get_by_id(db_session, 8)
@@ -2216,7 +2216,7 @@ class TestViews:
assert data["state_trace"][0]["state"] == 0
assert data["tasks"] == {}
assert data["siblings"] == []
module_build_service.utils.load_mmd(data["modulemd"])
load_mmd(data["modulemd"])
# Make sure the buildrequires entry was created
module = ModuleBuild.get_by_id(db_session, 8)
@@ -2334,7 +2334,7 @@ class TestViews:
data = json.loads(rv.data)
assert rv.status_code == 201
mmd = module_build_service.utils.load_mmd(data[0]["modulemd"])
mmd = load_mmd(data[0]["modulemd"])
assert len(mmd.get_dependencies()) == 1
dep = mmd.get_dependencies()[0]
if platform_override:
@@ -2382,7 +2382,7 @@ class TestViews:
data = json.loads(rv.data)
assert rv.status_code == 201
mmd = module_build_service.utils.load_mmd(data[0]["modulemd"])
mmd = load_mmd(data[0]["modulemd"])
assert len(mmd.get_dependencies()) == 1
dep = mmd.get_dependencies()[0]
# The buildrequire_override value should take precedence over the stream override from
@@ -2450,7 +2450,7 @@ class TestViews:
post_url, data=json.dumps({"branch": "product1.2", "scmurl": scm_url}))
assert rv.status_code == 201
data = json.loads(rv.data)[0]
mmd = module_build_service.utils.load_mmd(data["modulemd"])
mmd = load_mmd(data["modulemd"])
assert mmd.get_xmd()["mbs"]["disttag_marking"] == "product12"
@patch("module_build_service.auth.get_user", return_value=user)
@@ -2592,7 +2592,7 @@ class TestViews:
),
)
@patch.object(
module_build_service.utils.submit,
module_build_service.web.submit,
"datetime",
new_callable=partial(Mock, wraps=datetime),
)

View File

@@ -0,0 +1,131 @@
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: MIT
import io
from os import path
from shutil import rmtree
import tempfile
import mock
import pytest
from werkzeug.datastructures import FileStorage
from module_build_service import models
from module_build_service.common.utils import mmd_to_str
from module_build_service.db_session import db_session
from module_build_service.errors import ValidationError
from module_build_service.web.submit import (
get_prefixed_version, submit_module_build, submit_module_build_from_yaml
)
from tests import (
clean_database,
scheduler_init_data,
make_module_in_db,
make_module,
)
class TestSubmit:
def setup_method(self, test_method):
clean_database()
def test_get_prefixed_version_f28(self):
scheduler_init_data(1)
build_one = models.ModuleBuild.get_by_id(db_session, 2)
v = get_prefixed_version(build_one.mmd())
assert v == 2820180205135154
def test_get_prefixed_version_fl701(self):
scheduler_init_data(1)
build_one = models.ModuleBuild.get_by_id(db_session, 2)
mmd = build_one.mmd()
xmd = mmd.get_xmd()
xmd["mbs"]["buildrequires"]["platform"]["stream"] = "fl7.0.1-beta"
mmd.set_xmd(xmd)
v = get_prefixed_version(mmd)
assert v == 7000120180205135154
@pytest.mark.usefixtures("reuse_component_init_data")
class TestUtilsComponentReuse:
@mock.patch("module_build_service.web.submit.submit_module_build")
def test_submit_module_build_from_yaml_with_skiptests(self, mock_submit):
"""
Tests local module build from a yaml file with the skiptests option
Args:
mock_submit (MagickMock): mocked function submit_module_build, which we then
inspect if it was called with correct arguments
"""
module_dir = tempfile.mkdtemp()
module = models.ModuleBuild.get_by_id(db_session, 3)
mmd = module.mmd()
modulemd_yaml = mmd_to_str(mmd)
modulemd_file_path = path.join(module_dir, "testmodule.yaml")
username = "test"
stream = "dev"
with io.open(modulemd_file_path, "w", encoding="utf-8") as fd:
fd.write(modulemd_yaml)
with open(modulemd_file_path, "rb") as fd:
handle = FileStorage(fd)
submit_module_build_from_yaml(
db_session, username, handle, {}, stream=stream, skiptests=True)
mock_submit_args = mock_submit.call_args[0]
username_arg = mock_submit_args[1]
mmd_arg = mock_submit_args[2]
assert mmd_arg.get_stream_name() == stream
assert "\n\n%__spec_check_pre exit 0\n" in mmd_arg.get_buildopts().get_rpm_macros()
assert username_arg == username
rmtree(module_dir)
@mock.patch("module_build_service.web.submit.generate_expanded_mmds")
def test_submit_build_new_mse_build(self, generate_expanded_mmds):
"""
Tests that finished build can be resubmitted in case the resubmitted
build adds new MSE build (it means there are new expanded
buildrequires).
"""
build = make_module_in_db("foo:stream:0:c1")
assert build.state == models.BUILD_STATES["ready"]
mmd1 = build.mmd()
mmd2 = build.mmd()
mmd2.set_context("c2")
generate_expanded_mmds.return_value = [mmd1, mmd2]
# Create a copy of mmd1 without xmd.mbs, since that will cause validate_mmd to fail
mmd1_copy = mmd1.copy()
mmd1_copy.set_xmd({})
builds = submit_module_build(db_session, "foo", mmd1_copy, {})
ret = {b.mmd().get_context(): b.state for b in builds}
assert ret == {"c1": models.BUILD_STATES["ready"], "c2": models.BUILD_STATES["init"]}
assert builds[0].siblings(db_session) == [builds[1].id]
assert builds[1].siblings(db_session) == [builds[0].id]
@mock.patch("module_build_service.web.submit.generate_expanded_mmds")
@mock.patch(
"module_build_service.config.Config.scratch_build_only_branches",
new_callable=mock.PropertyMock,
return_value=["^private-.*"],
)
def test_submit_build_scratch_build_only_branches(self, cfg, generate_expanded_mmds):
"""
Tests the "scratch_build_only_branches" config option.
"""
mmd = make_module("foo:stream:0:c1")
generate_expanded_mmds.return_value = [mmd]
# Create a copy of mmd1 without xmd.mbs, since that will cause validate_mmd to fail
mmd_copy = mmd.copy()
mmd_copy.set_xmd({})
with pytest.raises(
ValidationError,
match="Only scratch module builds can be built from this branch.",
):
submit_module_build(db_session, "foo", mmd_copy, {"branch": "private-foo"})
submit_module_build(db_session, "foo", mmd_copy, {"branch": "otherbranch"})