Support local build of modules with MSE.

This commit is contained in:
Jan Kaluza
2018-03-21 15:53:11 +01:00
committed by mprahl
parent 604a336649
commit 9503e4624e
9 changed files with 399 additions and 424 deletions

View File

@@ -51,6 +51,10 @@ class ProgrammingError(ValueError):
pass
class StreamAmbigous(ValueError):
pass
def json_error(status, error, message):
response = jsonify(
{'status': status,

View File

@@ -36,6 +36,7 @@ from module_build_service.utils import (
submit_module_build_from_yaml,
load_local_builds, load_mmd, import_mmd
)
from module_build_service.errors import StreamAmbigous
import module_build_service.messaging
import module_build_service.scheduler.consumer
@@ -103,7 +104,9 @@ def import_module(mmd_file):
@manager.option('--file', action='store', dest="yaml_file")
@manager.option('--skiptests', action='store_true', dest="skiptests")
@manager.option('-l', '--add-local-build', action='append', default=None, dest='local_build_nsvs')
def build_module_locally(local_build_nsvs=None, yaml_file=None, stream=None, skiptests=False):
@manager.option('-s', '--set-stream', action='append', default=[], dest='default_streams')
def build_module_locally(local_build_nsvs=None, yaml_file=None, stream=None, skiptests=False,
default_streams=None):
""" Performs local module build using Mock
"""
if 'SERVER_NAME' not in app.config or not app.config['SERVER_NAME']:
@@ -126,16 +129,31 @@ def build_module_locally(local_build_nsvs=None, yaml_file=None, stream=None, ski
db.create_all()
load_local_builds(local_build_nsvs)
optional_params = {}
optional_params["local_build"] = True
optional_params["default_streams"] = {}
for ns in default_streams:
name, stream = ns.split(":")
optional_params["default_streams"][name] = stream
username = getpass.getuser()
if yaml_file and yaml_file.endswith(".yaml"):
yaml_file_path = os.path.abspath(yaml_file)
with open(yaml_file_path) as fd:
filename = os.path.basename(yaml_file)
handle = FileStorage(fd)
handle.filename = filename
submit_module_build_from_yaml(username, handle, str(stream), skiptests)
else:
if not yaml_file or not yaml_file.endswith(".yaml"):
raise IOError("Provided modulemd file is not a yaml file.")
yaml_file_path = os.path.abspath(yaml_file)
with open(yaml_file_path) as fd:
filename = os.path.basename(yaml_file)
handle = FileStorage(fd)
handle.filename = filename
try:
submit_module_build_from_yaml(
username, handle, str(stream), skiptests, optional_params)
except StreamAmbigous as e:
logging.error(str(e))
logging.error(
"Use '-s module_name:module_stream' to choose the stream")
return
stop = module_build_service.scheduler.make_simple_stop_condition(db.session)
# Run the consumer until stop_condition returns True

View File

@@ -38,6 +38,35 @@ class DBResolver(GenericResolver):
def __init__(self, config):
self.config = config
def get_module_modulemds(self, name, stream, version=None, context=None, strict=False):
"""
Gets the module modulemds from the resolver.
:param name: a string of the module's name
:param stream: a string of the module's stream
:param version: a string or int of the module's version. When None, latest version will
be returned.
:param context: a string of the module's context. When None, all contexts will
be returned.
:kwarg strict: Normally this function returns [] if no module can be
found. If strict=True, then a ValueError is raised.
:return: List of Modulemd metadata instances matching the query
"""
with models.make_session(self.config) as session:
if version and context:
builds = [models.ModuleBuild.get_build_from_nsvc(
session, name, stream, version, context)]
elif not version and not context:
builds = models.ModuleBuild.get_last_builds_in_stream(
session, name, stream)
else:
raise NotImplemented(
"This combination of name/stream/version/context is not implemented")
if not builds and strict:
raise ValueError("Cannot find any module build for %s:%s "
"in MBS database" % (name, stream))
return [build.mmd() for build in builds]
def get_module_tag(self, name, stream, version, context, strict=False):
"""
Gets the module tag from the resolver. Since the resolver is the DB, it is just generated

View File

@@ -28,15 +28,13 @@
import pdc_client
from module_build_service import db
from module_build_service import models, Modulemd
from module_build_service import models
from module_build_service.errors import UnprocessableEntity
from module_build_service.resolver.base import GenericResolver
import inspect
import pprint
import logging
import six
import copy
import kobo.rpmlib
log = logging.getLogger()
@@ -69,107 +67,29 @@ class PDCResolver(GenericResolver):
insecure=self.config.pdc_insecure,
)
def _get_variant_dict(self, data):
def _query_from_nsvc(self, name, stream, version=None, context=None, active=None):
"""
:param data: one of following
pdc variant_dict {'variant_id': value, 'variant_version': value, }
module dict {'name': value, 'version': value }
modulemd
Generates dict with PDC query.
:return final list of module_info which pass repoclosure
:param str name: Name of the module to query.
:param str stream: Stream of the module to query.
:param str version/int: Version of the module to query.
:param str context: Context of the module to query.
:param bool active: Include "active" in a query.
"""
def is_module_dict(data):
if not isinstance(data, dict):
return False
query = {
"variant_id": name,
"variant_version": stream,
}
if active is not None:
query["active"] = active
if version is not None:
query["variant_release"] = str(version)
if context is not None:
query["variant_context"] = context
return query
for attr in ('name', 'version'):
if attr not in data.keys():
return False
return True
def is_variant_dict(data):
if not isinstance(data, dict):
return False
if ('variant_id' not in data or
('variant_stream' not in data and
'variant_version' not in data)):
return False
return True
def is_modulemd(data):
return isinstance(data, Modulemd.Module)
def is_module_str(data):
return isinstance(data, six.string_types)
result = None
if is_module_str(data):
result = self._variant_dict_from_str(data)
elif is_modulemd(data):
result = {
'variant_id': data.get_name(),
'variant_release': data.get_version(),
'variant_version': data.get_stream()
}
elif is_variant_dict(data):
result = data.copy()
# This is a transitionary thing until we've ported PDC away from the old nomenclature
if 'variant_version' not in result and 'variant_stream' in result:
result['variant_version'] = result['variant_stream']
del result['variant_stream']
# ensure that variant_type is in result
if 'variant_type' not in result.keys():
result['variant_type'] = 'module'
elif is_module_dict(data):
result = {'variant_id': data['name'], 'variant_version': data['version']}
if data.get('context'):
result['variant_context'] = data['context']
if 'release' in data:
result['variant_release'] = data['release']
if 'active' in data:
result['active'] = data['active']
if not result:
raise RuntimeError("Couldn't get variant_dict from %s" % data)
return result
def _variant_dict_from_str(self, module_str):
"""
:param module_str: a string to match in PDC
:return module_info dict
Example minimal module_info:
{
'variant_id': module_name,
'variant_version': module_version,
'variant_type': 'module'
}
"""
log.debug("variant_dict_from_str(%r)" % module_str)
# best match due several filters not being provided such as variant type ...
module_info = {}
release_start = module_str.rfind('-')
version_start = module_str.rfind('-', 0, release_start)
module_info['variant_release'] = module_str[release_start + 1:]
module_info['variant_version'] = module_str[version_start + 1:release_start]
module_info['variant_id'] = module_str[:version_start]
module_info['variant_type'] = 'module'
return module_info
def _get_module(self, module_info, strict=False):
def _get_modules(self, name, stream, version=None, context=None, active=None, strict=False):
"""
:param module_info: pdc variant_dict, str, mmd or module dict
:param strict: Normally this function returns None if no module can be
@@ -177,20 +97,7 @@ class PDCResolver(GenericResolver):
:return final list of module_info which pass repoclosure
"""
log.debug("get_module(%r, strict=%r)" % (module_info, strict))
variant_dict = self._get_variant_dict(module_info)
query = dict(
variant_id=variant_dict['variant_id'],
variant_version=variant_dict['variant_version'],
)
if variant_dict.get('variant_release'):
query['variant_release'] = variant_dict['variant_release']
if module_info.get('active'):
query['active'] = module_info['active']
if module_info.get('context'):
query['variant_context'] = module_info['context']
query = self._query_from_nsvc(name, stream, version, context, active)
# TODO: So far sorting on Fedora prod PDC instance is broken and it sorts
# only by variant_uid by default. Once the sorting is fixed, we can start
@@ -203,7 +110,7 @@ class PDCResolver(GenericResolver):
retval = self.session['unreleasedvariants']._(page_size=1, **query)
except Exception as ex:
log.debug("error during PDC lookup: %r" % ex)
raise RuntimeError("Error during PDC lookup for module %s" % module_info["name"])
raise RuntimeError("Error during PDC lookup for module %s" % name)
# Error handling
if not retval or len(retval["results"]) == 0:
@@ -212,16 +119,31 @@ class PDCResolver(GenericResolver):
else:
return None
# Jump to last page to latest module release.
if retval['count'] != 1:
query['page'] = retval['count']
retval = self.session['unreleasedvariants']._(page_size=1, **query)
# Get all the contexts of given module in case there is just NS or NSV input.
if not version or not context:
# If the version is not set, we have to jump to last page to get the latest
# version in PDC.
if not version:
query['page'] = retval['count']
retval = self.session['unreleasedvariants']._(page_size=1, **query)
del query['page']
query["variant_release"] = retval["results"][0]["variant_release"]
retval = self.session['unreleasedvariants']._(page_size=-1, **query)
return retval
results = retval["results"]
assert len(results) <= 1, pprint.pformat(retval)
return results[0]
# Error handling
if len(results) == 0:
if strict:
raise UnprocessableEntity("Failed to find module in PDC %r" % query)
else:
return None
return results
def get_module_tag(self, name, stream, version, context, strict=False):
def _get_module(self, name, stream, version=None, context=None, active=None, strict=False):
return self._get_modules(name, stream, version, context, active, strict)[0]
def get_module_tag(self, name, stream, version=None, context=None, strict=False):
"""
:param name: a module's name
:param stream: a module's stream
@@ -231,115 +153,41 @@ class PDCResolver(GenericResolver):
found. If strict=True, then an UnprocessableEntity is raised.
:return: koji tag string
"""
module_info = {
'name': name,
'version': stream,
'release': str(version),
'context': context
}
return self._get_module(module_info, strict=strict)['koji_tag']
return self._get_module(
name, stream, version, context, active=True, strict=strict)['koji_tag']
def _get_module_modulemd(self, module_info, strict=False):
def get_module_modulemds(self, name, stream, version=None, context=None, strict=False):
"""
:param module_info: list of module_info dicts
:param strict: Normally this function returns None if no module can be
found. If strict=True, then an UnprocessableEntity is raised.
:return: ModuleMetadata instance
Gets the module modulemds from the resolver.
:param name: a string of the module's name
:param stream: a string of the module's stream
:param version: a string or int of the module's version. When None, latest version will
be returned.
:param context: a string of the module's context. When None, all contexts will
be returned.
:kwarg strict: Normally this function returns [] if no module can be
found. If strict=True, then a UnprocessableEntity is raised.
:return: List of Modulemd metadata instances matching the query
"""
yaml = None
module = self._get_module(module_info, strict=strict)
if module:
yaml = module['modulemd']
modules = self._get_modules(name, stream, version, context, active=True, strict=strict)
if not modules:
return []
if not yaml:
if strict:
raise UnprocessableEntity(
"Failed to find modulemd entry in PDC for %r" % module_info)
else:
return None
mmds = []
for module in modules:
if module:
yaml = module['modulemd']
return self.extract_modulemd(yaml, strict=strict)
if not yaml:
if strict:
raise UnprocessableEntity(
"Failed to find modulemd entry in PDC for %r" % module)
else:
return None
def _get_recursively_required_modules(self, info, modules=None,
strict=False):
"""
:param info: pdc variant_dict, str, mmd or module dict
:param modules: Used by recursion only, list of modules found by previous
iteration of this method.
:param strict: Normally this function returns empty list if no module can
be found. If strict=True, then an UnprocessableEntity is raised.
Returns list of modules by recursively querying PDC based on a "requires"
list of an input module represented by `info`. The returned list
therefore contains all modules the input module "requires".
If there are some modules loaded by utils.load_local_builds(...), these
local modules will be used instead of querying PDC for the particular
module found in local module builds database.
The returned list contains only "modulemd" and "koji_tag" fields returned
by PDC, because other fields are not known for local builds.
"""
modules = modules or []
variant_dict = self._get_variant_dict(info)
local_modules = models.ModuleBuild.local_modules(
db.session, variant_dict["variant_id"],
variant_dict['variant_version'])
if local_modules:
local_module = local_modules[0]
log.info("Using local module %r as a dependency.",
local_module)
mmd = local_module.mmd()
module_info = {}
module_info["modulemd"] = local_module.modulemd
module_info["koji_tag"] = local_module.koji_tag
else:
module_info = self._get_module(variant_dict, strict)
module_info = {k: v for k, v in module_info.items()
if k in ["modulemd", "koji_tag"]}
module_info = {
'modulemd': module_info['modulemd'],
'koji_tag': module_info['koji_tag']
}
yaml = module_info['modulemd']
mmd = self.extract_modulemd(yaml)
# Check if we have examined this koji_tag already - no need to do
# it again...
if module_info in modules:
return modules
modules.append(module_info)
# We want to use the same stream as the one used in the time this
# module was built. But we still should fallback to plain mmd.requires
# in case this module depends on some older module for which we did
# not populate mmd.xmd['mbs']['requires'].
mbs_xmd = mmd.get_xmd().get('mbs')
if 'requires' in mbs_xmd.keys():
requires = {name: data['stream'] for name, data in mbs_xmd['requires'].items()}
else:
# Since MBS doesn't support v2 modulemds submitted by a user, we will
# always only have one stream per require. That way it's safe to just take the first
# element of the list.
# TODO: Change this once module stream expansion is implemented
requires = {
name: deps.get()[0]
for name, deps in mmd.get_dependencies()[0].get_requires().items()}
for name, stream in requires.items():
modified_dep = {
'name': name,
'version': stream,
# Only return details about module builds that finished
'active': True,
}
modules = self._get_recursively_required_modules(
modified_dep, modules, strict)
return modules
mmds.append(self.extract_modulemd(yaml, strict=strict))
return mmds
def resolve_profiles(self, mmd, keys):
"""
@@ -373,12 +221,9 @@ class PDCResolver(GenericResolver):
continue
# Find the dep in the built modules in PDC
module_info = {
'variant_id': module_name,
'variant_stream': module_info['stream'],
'variant_release': module_info['version']}
modules = self._get_recursively_required_modules(
module_info, strict=True)
modules = self._get_modules(
module_name, module_info['stream'], module_info['version'],
module_info['context'], strict=True)
for module in modules:
yaml = module['modulemd']
@@ -418,13 +263,8 @@ class PDCResolver(GenericResolver):
if mmd:
queried_mmd = mmd
else:
module_info = {
'name': name,
'version': stream,
'release': str(version),
'context': context
}
queried_module = self._get_module(module_info, strict=strict)
queried_module = self._get_module(
name, stream, version, context, strict=strict)
yaml = queried_module['modulemd']
queried_mmd = self.extract_modulemd(yaml, strict=strict)
@@ -432,20 +272,14 @@ class PDCResolver(GenericResolver):
'buildrequires' not in queried_mmd.get_xmd()['mbs'].keys()):
raise RuntimeError(
'The module "{0!r}" did not contain its modulemd or did not have '
'its xmd attribute filled out in PDC'.format(module_info))
'its xmd attribute filled out in PDC'.format(queried_mmd))
buildrequires = queried_mmd.get_xmd()['mbs']['buildrequires']
# Queue up the next tier of deps that we should look at..
for name, details in buildrequires.items():
modified_dep = {
'name': name,
'version': details['stream'],
'release': details['version'],
# Only return details about module builds that finished
'active': True,
}
modules = self._get_recursively_required_modules(
modified_dep, strict=strict)
modules = self._get_modules(
name, details['stream'], details['version'],
details['context'], active=True, strict=True)
for m in modules:
if m["koji_tag"] in module_tags:
continue
@@ -474,8 +308,18 @@ class PDCResolver(GenericResolver):
Raises RuntimeError on PDC lookup error.
"""
new_requires = copy.deepcopy(requires)
for module_name, module_stream in requires.items():
new_requires = {}
for nsvc in requires:
nsvc_splitted = nsvc.split(":")
if len(nsvc_splitted) == 2:
module_name, module_stream = nsvc_splitted
module_version = None
module_context = None
elif len(nsvc_splitted) == 4:
module_name, module_stream, module_version, module_context = nsvc_splitted
else:
raise ValueError(
"Only N:S or N:S:V:C is accepted by resolve_requires, got %s" % nsvc)
# Try to find out module dependency in the local module builds
# added by utils.load_local_builds(...).
local_modules = models.ModuleBuild.local_modules(
@@ -494,16 +338,12 @@ class PDCResolver(GenericResolver):
}
continue
# Assumes that module_stream is the stream and not the commit hash
module_info = {
'name': module_name,
'version': module_stream,
'active': True}
commit_hash = None
version = None
filtered_rpms = []
module = self._get_module(module_info, strict=True)
module = self._get_module(
module_name, module_stream, module_version,
module_context, active=True, strict=True)
if module.get('modulemd'):
mmd = self.extract_modulemd(module['modulemd'])
if mmd.get_xmd().get('mbs') and 'commit' in mmd.get_xmd()['mbs'].keys():
@@ -531,6 +371,7 @@ class PDCResolver(GenericResolver):
'ref': commit_hash,
'stream': module_stream,
'version': str(version),
'context': module["variant_context"],
'filtered_rpms': filtered_rpms,
}
else:

View File

@@ -94,6 +94,10 @@ class GenericResolver(six.with_metaclass(ABCMeta)):
raise ValueError('Invalid modulemd')
return mmd
@abstractmethod
def get_module_modulemds(self, name, stream, version=None, context=None, strict=False):
raise NotImplementedError()
@abstractmethod
def get_module_tag(self, name, stream, version, context, strict=False):
raise NotImplementedError()

View File

@@ -40,7 +40,7 @@ from sqlalchemy.sql.sqltypes import Boolean as sqlalchemy_boolean
from module_build_service import log, models, Modulemd, api_version
from module_build_service.errors import (ValidationError, UnprocessableEntity,
ProgrammingError, NotFound)
ProgrammingError, NotFound, StreamAmbigous)
from module_build_service import conf, db
from module_build_service.errors import (Forbidden, Conflict)
import module_build_service.messaging
@@ -970,15 +970,27 @@ def submit_module_build_from_scm(username, url, branch, allow_local_url=False,
return submit_module_build(username, url, mmd, scm, optional_params)
def generate_expanded_mmds(session, mmd):
def generate_expanded_mmds(session, mmd, raise_if_stream_ambigous=False, default_streams=None):
"""
Returns list with MMDs with buildrequires and requires set according
to module stream expansion rules. These module metadata can be directly
built using MBS.
:param session: SQLAlchemy DB session.
:param Modulemd.Module mmd: Modulemd metadata with original unexpanded module.
:param bool raise_if_stream_ambigous: When True, raises a StreamAmbigous exception in case
there are multiple streams for some dependency of module and the module name is not
defined in `default_streams`, so it is not clear which stream should be used.
:param dict default_streams: Dict in {module_name: module_stream, ...} format defining
the default stream to choose for module in case when there are multiple streams to
choose from.
"""
if not session:
session = db.session
if not default_streams:
default_streams = {}
# Create local copy of mmd, because we will expand its dependencies,
# which would change the module.
# TODO: Use copy method once its in released libmodulemd:
@@ -989,13 +1001,13 @@ def generate_expanded_mmds(session, mmd):
current_mmd.set_context(None)
# Expands the MSE streams. This mainly handles '-' prefix in MSE streams.
expand_mse_streams(session, current_mmd)
expand_mse_streams(session, current_mmd, default_streams, raise_if_stream_ambigous)
# Get the list of all MMDs which this module can be possibly built against
# and add them to MMDResolver.
mmd_resolver = MMDResolver()
mmds_for_resolving = get_mmds_required_by_module_recursively(
session, current_mmd)
session, current_mmd, default_streams, raise_if_stream_ambigous)
for m in mmds_for_resolving:
mmd_resolver.add_modules(m)
@@ -1102,10 +1114,41 @@ def generate_expanded_mmds(session, mmd):
def submit_module_build(username, url, mmd, scm, optional_params=None):
"""
Submits new module build.
:param str username: Username of the build's owner.
:param str url: SCM URL of submitted build.
:param Modulemd.Module mmd: Modulemd defining the build.
:param scm.SCM scm: SCM class representing the cloned git repo.
:param dict optional_params: Dict with optional params for a build:
- "local_build" (bool): The module is being built locally (the MBS is
not running in infra, but on local developer's machine).
- "default_streams" (dict): Dict with name:stream mapping defining the stream
to choose for given module name if multiple streams are available to choose from.
- Any optional ModuleBuild class field (str).
:rtype: list with ModuleBuild
:return: List with submitted module builds.
"""
import koji # Placed here to avoid py2/py3 conflicts...
# For local builds, we want the user to choose the exact stream using the default_streams
# in case there are multiple streams to choose from and raise an exception otherwise.
if optional_params and "local_build" in optional_params:
raise_if_stream_ambigous = True
del optional_params["local_build"]
else:
raise_if_stream_ambigous = False
# Get the default_streams if set.
if optional_params and "default_streams" in optional_params:
default_streams = optional_params["default_streams"]
del optional_params["default_streams"]
else:
default_streams = {}
validate_mmd(mmd)
mmds = generate_expanded_mmds(db.session, mmd)
mmds = generate_expanded_mmds(db.session, mmd, raise_if_stream_ambigous, default_streams)
modules = []
for mmd in mmds:
@@ -1521,11 +1564,22 @@ def get_reusable_component(session, module, component_name,
return reusable_component
def _expand_mse_streams(session, name, streams):
def _expand_mse_streams(session, name, streams, default_streams, raise_if_stream_ambigous):
"""
Helper method for `expand_mse_stream()` expanding single name:[streams].
Returns list of expanded streams.
:param session: SQLAlchemy DB session.
:param str name: Name of the module which will be expanded.
:param list streams: List of streams to expand.
:param dict default_streams: Dict in {module_name: module_stream, ...} format defining
the default stream to choose for module in case when there are multiple streams to
choose from.
:param bool raise_if_stream_ambigous: When True, raises a StreamAmbigous exception in case
there are multiple streams for some dependency of module and the module name is not
defined in `default_streams`, so it is not clear which stream should be used.
"""
default_streams = default_streams or {}
# Stream can be prefixed with '-' sign to define that this stream should
# not appear in a resulting list of streams. There can be two situations:
# a) all streams have '-' prefix. In this case, we treat list of streams
@@ -1536,9 +1590,15 @@ def _expand_mse_streams(session, name, streams):
# '-' prefix to the list of valid streams.
streams_is_blacklist = all(stream.startswith("-") for stream in streams.get())
if streams_is_blacklist or len(streams.get()) == 0:
builds = models.ModuleBuild.get_last_build_in_all_streams(
session, name)
expanded_streams = [build.stream for build in builds]
if name in default_streams:
expanded_streams = [default_streams[name]]
elif raise_if_stream_ambigous:
raise StreamAmbigous(
"There are multiple streams to choose from for module %s." % name)
else:
builds = models.ModuleBuild.get_last_build_in_all_streams(
session, name)
expanded_streams = [build.stream for build in builds]
else:
expanded_streams = []
for stream in streams.get():
@@ -1547,30 +1607,50 @@ def _expand_mse_streams(session, name, streams):
expanded_streams.remove(stream[1:])
else:
expanded_streams.append(stream)
if len(expanded_streams) > 1:
if name in default_streams:
expanded_streams = [default_streams[name]]
elif raise_if_stream_ambigous:
raise StreamAmbigous("There are multiple streams %r to choose from for module %s."
% (expanded_streams, name))
return expanded_streams
def expand_mse_streams(session, mmd):
def expand_mse_streams(session, mmd, default_streams=None, raise_if_stream_ambigous=False):
"""
Expands streams in both buildrequires/requires sections of MMD.
:param session: SQLAlchemy DB session.
:param Modulemd.Module mmd: Modulemd metadata with original unexpanded module.
:param dict default_streams: Dict in {module_name: module_stream, ...} format defining
the default stream to choose for module in case when there are multiple streams to
choose from.
:param bool raise_if_stream_ambigous: When True, raises a StreamAmbigous exception in case
there are multiple streams for some dependency of module and the module name is not
defined in `default_streams`, so it is not clear which stream should be used.
"""
for deps in mmd.get_dependencies():
expanded = {}
for name, streams in deps.get_requires().items():
streams_set = Modulemd.SimpleSet()
streams_set.set(_expand_mse_streams(session, name, streams))
streams_set.set(_expand_mse_streams(
session, name, streams, default_streams, raise_if_stream_ambigous))
expanded[name] = streams_set
deps.set_requires(expanded)
expanded = {}
for name, streams in deps.get_buildrequires().items():
streams_set = Modulemd.SimpleSet()
streams_set.set(_expand_mse_streams(session, name, streams))
streams_set.set(_expand_mse_streams(
session, name, streams, default_streams, raise_if_stream_ambigous))
expanded[name] = streams_set
deps.set_buildrequires(expanded)
def _get_mmds_from_requires(session, requires, mmds, recursive=False):
def _get_mmds_from_requires(session, requires, mmds, recursive=False,
default_streams=None, raise_if_stream_ambigous=False):
"""
Helper method for get_mmds_required_by_module_recursively returning
the list of module metadata objects defined by `requires` dict.
@@ -1580,12 +1660,28 @@ def _get_mmds_from_requires(session, requires, mmds, recursive=False):
:param mmds: Dictionary with already handled name:streams as a keys and lists
of resulting mmds as values.
:param recursive: If True, the requires are checked recursively.
:param dict default_streams: Dict in {module_name: module_stream, ...} format defining
the default stream to choose for module in case when there are multiple streams to
choose from.
:param bool raise_if_stream_ambigous: When True, raises a StreamAmbigous exception in case
there are multiple streams for some dependency of module and the module name is not
defined in `default_streams`, so it is not clear which stream should be used.
:return: Dict with name:stream as a key and list with mmds as value.
"""
default_streams = default_streams or {}
# To be able to call itself recursively, we need to store list of mmds
# we have added to global mmds list in this particular call.
added_mmds = {}
resolver = module_build_service.resolver.GenericResolver.create(conf)
for name, streams in requires.items():
streams_to_try = streams.get()
if name in default_streams:
streams_to_try = [default_streams[name]]
elif len(streams_to_try) > 1 and raise_if_stream_ambigous:
raise StreamAmbigous("There are multiple streams %r to choose from for module %s."
% (streams_to_try, name))
# For each valid stream, find the last build in a stream and also all
# its contexts and add mmds of these builds to `mmds` and `added_mmds`.
# Of course only do that if we have not done that already in some
@@ -1595,14 +1691,8 @@ def _get_mmds_from_requires(session, requires, mmds, recursive=False):
if ns in mmds:
continue
builds = models.ModuleBuild.get_last_builds_in_stream(
session, name, stream)
if not builds:
raise ValueError("Cannot find any module build for %s:%s "
"in MBS database" % (name, stream))
else:
mmds[ns] = [build.mmd() for build in builds]
added_mmds[ns] = mmds[ns]
mmds[ns] = resolver.get_module_modulemds(name, stream, strict=True)
added_mmds[ns] = mmds[ns]
# Get the requires recursively.
if recursive:
@@ -1614,7 +1704,8 @@ def _get_mmds_from_requires(session, requires, mmds, recursive=False):
return mmds
def get_mmds_required_by_module_recursively(session, mmd):
def get_mmds_required_by_module_recursively(
session, mmd, default_streams=None, raise_if_stream_ambigous=False):
"""
Returns the list of Module metadata objects of all modules required while
building the module defined by `mmd` module metadata. This presumes the
@@ -1628,6 +1719,12 @@ def get_mmds_required_by_module_recursively(session, mmd):
recursively all the "requires" and finds the latest version of each
required module and also all contexts of these latest versions.
:param dict default_streams: Dict in {module_name: module_stream, ...} format defining
the default stream to choose for module in case when there are multiple streams to
choose from.
:param bool raise_if_stream_ambigous: When True, raises a StreamAmbigous exception in case
there are multiple streams for some dependency of module and the module name is not
defined in `default_streams`, so it is not clear which stream should be used.
:rtype: list of Modulemd metadata
:return: List of all modulemd metadata of all modules required to build
the module `mmd`.
@@ -1640,13 +1737,17 @@ def get_mmds_required_by_module_recursively(session, mmd):
# At first get all the buildrequires of the module of interest.
for deps in mmd.get_dependencies():
mmds = _get_mmds_from_requires(session, deps.get_buildrequires(), mmds)
mmds = _get_mmds_from_requires(
session, deps.get_buildrequires(), mmds, False, default_streams,
raise_if_stream_ambigous)
# Now get the requires of buildrequires recursively.
for mmd_key in list(mmds.keys()):
for mmd in mmds[mmd_key]:
for deps in mmd.get_dependencies():
mmds = _get_mmds_from_requires(session, deps.get_requires(), mmds, True)
mmds = _get_mmds_from_requires(
session, deps.get_requires(), mmds, True, default_streams,
raise_if_stream_ambigous)
# Make single list from dict of lists.
res = []

View File

@@ -44,6 +44,12 @@ _mmd2 = Modulemd.Module().new_from_file(
_mmd2.upgrade()
TESTMODULE_MODULEMD = _mmd2.dumps()
_mmd3 = Modulemd.Module().new_from_file(
os.path.join(STAGED_DATA_DIR, 'formatted_testmodule.yaml'))
_mmd3.upgrade()
_mmd3.set_context("c2c572ed")
TESTMODULE_MODULEMD_SECOND_CONTEXT = _mmd3.dumps()
class MockPDCFilterAPI(pdc_client.test_helpers.MockAPI):
""" A modified pdc_client.test_helpers.MockAPI that supports basic filtering on GET requests
@@ -57,7 +63,7 @@ class MockPDCFilterAPI(pdc_client.test_helpers.MockAPI):
page_size = filters.get('page_size', 20)
# End of code taken from pdc_client/test_helpers.py
if not (isinstance(data, list) and page_size > 0):
if not isinstance(data, list):
return data
# Keep track of indexes to pop since we can't pop them during the loop
@@ -88,6 +94,9 @@ class MockPDCFilterAPI(pdc_client.test_helpers.MockAPI):
for index in sorted(indexes_to_pop, reverse=True):
rv_data.pop(index)
if page_size <= 0:
return rv_data
# Code taken from pdc_client/test_helpers.py
page = filters.get('page', 1)
pages = int(math.ceil(float(len(rv_data)) / page_size))
@@ -176,6 +185,29 @@ def pdc_module_active(pdc_module_inactive):
return pdc_module_active
@pytest.fixture(scope='function')
def pdc_module_active_two_contexts(pdc_module_active):
# Rename it for clarity
pdc_module_active_two_contexts = pdc_module_active
pdc_module_active_two_contexts.endpoints['unreleasedvariants']['GET'][-1].update({
'active': True,
'rpms': [
'tangerine-0:0.22-6.module+0+814cfa39.noarch.rpm',
'tangerine-0:0.22-6.module+0+814cfa39.src.rpm',
'perl-Tangerine-0:0.22-2.module+0+814cfa39.noarch.rpm',
'perl-Tangerine-0:0.22-2.module+0+814cfa39.src.rpm',
'perl-List-Compare-0:0.53-8.module+0+814cfa39.noarch.rpm',
'perl-List-Compare-0:0.53-8.module+0+814cfa39.src.rpm'
]
})
pdc_module_active_two_contexts.endpoints['unreleasedvariants']['GET'].append(
dict(pdc_module_active.endpoints['unreleasedvariants']['GET'][-1]))
pdc_module_active_two_contexts.endpoints['unreleasedvariants']['GET'][-1].update({
"variant_context": "c2c572ed",
"modulemd": TESTMODULE_MODULEMD_SECOND_CONTEXT})
return pdc_module_active_two_contexts
@pytest.fixture(scope='function')
def pdc_module_reuse(pdc_module_active):
# Rename it for clarity

View File

@@ -21,7 +21,6 @@
# Written by Ralph Bean <rbean@redhat.com>
import os
import copy
from mock import patch, PropertyMock
import pytest
@@ -29,7 +28,7 @@ import pytest
import module_build_service.resolver as mbs_resolver
import module_build_service.utils
import module_build_service.models
from module_build_service import app, db
from module_build_service import app
from module_build_service import glib, Modulemd
import tests
@@ -39,32 +38,21 @@ base_dir = os.path.join(os.path.dirname(__file__), "..")
class TestPDCModule:
def test_get_variant_dict_module_dict_active(self):
"""
Tests that "active" is honored by get_variant_dict(...).
"""
dep = {
'name': "platform",
'version': "master",
'active': True,
}
expected = {
'active': True,
'variant_id': 'platform',
'variant_version': 'master'
}
def test_get_module_modulemds_nsvc(self, pdc_module_active_two_contexts):
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='pdc')
variant_dict = resolver._get_variant_dict(dep)
assert variant_dict == expected
ret = resolver.get_module_modulemds('testmodule', 'master', '20180205135154', 'c2c572ec')
nsvcs = set(m.dup_nsvc() for m in ret)
expected = set(["testmodule:master:125a91f56532:c2c572ec"])
assert nsvcs == expected
def test_get_module_simple_as_dict(self, pdc_module_active):
query = {'name': 'testmodule', 'version': 'master'}
@pytest.mark.parametrize('kwargs', [{'version': '20180205135154'}, {}])
def test_get_module_modulemds_partial(self, pdc_module_active_two_contexts, kwargs):
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='pdc')
result = resolver._get_module(query)
assert result['variant_name'] == 'testmodule'
assert result['variant_version'] == 'master'
assert 'build_deps' in result
ret = resolver.get_module_modulemds('testmodule', 'master', **kwargs)
nsvcs = set(m.dup_nsvc() for m in ret)
expected = set(["testmodule:master:125a91f56532:c2c572ec",
"testmodule:master:125a91f56532:c2c572ed"])
assert nsvcs == expected
@pytest.mark.parametrize('empty_buildrequires', [False, True])
def test_get_module_build_dependencies(self, pdc_module_active, empty_buildrequires):
@@ -90,74 +78,6 @@ class TestPDCModule:
'testmodule', 'master', '20180205135154', 'c2c572ec').keys()
assert set(result) == expected
def test_get_module_build_dependencies_recursive(self, pdc_module_active):
"""
Tests that we return just direct build-time dependencies of testmodule.
"""
# Add testmodule2 that requires testmodule
pdc_module_active.endpoints['unreleasedvariants']['GET'].append(
copy.deepcopy(pdc_module_active.endpoints['unreleasedvariants']['GET'][-1]))
pdc_item = pdc_module_active.endpoints['unreleasedvariants']['GET'][-1]
mmd = Modulemd.Module().new_from_string(pdc_item['modulemd'])
mmd.set_name('testmodule2')
mmd.set_version(20180123171545)
requires = mmd.get_dependencies()[0].get_requires()
requires['testmodule'] = Modulemd.SimpleSet()
requires['testmodule'].add('master')
mmd.get_dependencies()[0].set_requires(requires)
xmd = glib.from_variant_dict(mmd.get_xmd())
xmd['mbs']['requires']['testmodule'] = {
'filtered_rpms': [],
'ref': '620ec77321b2ea7b0d67d82992dda3e1d67055b4',
'stream': 'master',
'version': '20180205135154'
}
mmd.set_xmd(glib.dict_values(xmd))
pdc_item.update({
'variant_id': 'testmodule2',
'variant_name': 'testmodule2',
'variant_release': str(mmd.get_version()),
'koji_tag': 'module-ae2adf69caf0e1b6',
'modulemd': mmd.dumps()
})
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='pdc')
result = resolver.get_module_build_dependencies(
'testmodule2', 'master', '20180123171545', 'c2c572ec').keys()
assert set(result) == set(['module-f28-build'])
@patch("module_build_service.config.Config.system",
new_callable=PropertyMock, return_value="test")
@patch("module_build_service.config.Config.mock_resultsdir",
new_callable=PropertyMock,
return_value=os.path.join(base_dir, 'staged_data', "local_builds"))
def test_get_module_build_dependencies_recursive_requires(
self, resultdir, conf_system):
"""
Tests that we return Requires of Buildrequires of a module
recursively.
"""
with app.app_context():
module_build_service.utils.load_local_builds(
["platform", "parent", "child", "testmodule"])
build = module_build_service.models.ModuleBuild.local_modules(
db.session, "child", "master")
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='pdc')
result = resolver.get_module_build_dependencies(mmd=build[0].mmd()).keys()
local_path = os.path.join(base_dir, 'staged_data', "local_builds")
expected = [
os.path.join(
local_path,
'module-platform-f28-3/results'),
os.path.join(
local_path,
'module-parent-master-20170816080815/results'),
]
assert set(result) == set(expected)
def test_resolve_profiles(self, pdc_module_active):
yaml_path = os.path.join(
base_dir, 'staged_data', 'formatted_testmodule.yaml')
@@ -185,6 +105,7 @@ class TestPDCModule:
new_callable=PropertyMock,
return_value=os.path.join(base_dir, 'staged_data', "local_builds"))
def test_resolve_profiles_local_module(self, local_builds, conf_system):
tests.clean_database()
with app.app_context():
module_build_service.utils.load_local_builds(['platform'])

View File

@@ -26,6 +26,7 @@ import pytest
import module_build_service.utils
from module_build_service import models, glib, Modulemd
from module_build_service.errors import StreamAmbigous
from tests import (db, clean_database)
@@ -161,53 +162,77 @@ class TestUtilsModuleStreamExpansion:
contexts = set([mmd.get_context() for mmd in mmds])
assert set(['3031e5a5', '6d10e00e']) == contexts
@pytest.mark.parametrize('requires,build_requires,expected_xmd,expected_buildrequires', [
({"gtk": ["1", "2"]}, {"gtk": ["1", "2"]},
set([
frozenset(['platform:f28:0:c10', 'gtk:2:0:c4']),
frozenset(['platform:f28:0:c10', 'gtk:1:0:c2'])
]),
set([
frozenset(['gtk:1']),
frozenset(['gtk:2']),
])),
@pytest.mark.parametrize(
'requires,build_requires,stream_ambigous,expected_xmd,expected_buildrequires', [
({"gtk": ["1", "2"]}, {"gtk": ["1", "2"]}, True,
set([
frozenset(['platform:f28:0:c10', 'gtk:2:0:c4']),
frozenset(['platform:f28:0:c10', 'gtk:1:0:c2'])
]),
set([
frozenset(['gtk:1']),
frozenset(['gtk:2']),
])),
({"gtk": ["1"], "foo": ["1"]}, {"gtk": ["1"], "foo": ["1"]},
set([
frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10'])
]),
set([
frozenset(['foo:1', 'gtk:1'])
])),
({"gtk": ["1"], "foo": ["1"]}, {"gtk": ["1"], "foo": ["1"]}, False,
set([
frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10'])
]),
set([
frozenset(['foo:1', 'gtk:1'])
])),
({"gtk": ["1"], "foo": ["1"]}, {"gtk": ["1"], "foo": ["1"], "platform": ["f28"]},
set([
frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10'])
]),
set([
frozenset(['foo:1', 'gtk:1'])
])),
({"gtk": ["1"], "foo": ["1"]}, {"gtk": ["1"], "foo": ["1"], "platform": ["f28"]}, False,
set([
frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10'])
]),
set([
frozenset(['foo:1', 'gtk:1'])
])),
({"gtk": ["-2"], "foo": ["-2"]}, {"gtk": ["-2"], "foo": ["-2"]},
set([
frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10'])
]),
set([
frozenset(['foo:1', 'gtk:1'])
])),
({"gtk": ["-2"], "foo": ["-2"]}, {"gtk": ["-2"], "foo": ["-2"]}, True,
set([
frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10'])
]),
set([
frozenset(['foo:1', 'gtk:1'])
])),
({"gtk": ["1"], "foo": ["1"]}, {"gtk": ["-1", "1"], "foo": ["-2", "1"]},
set([
frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10'])
]),
set([
frozenset(['foo:1', 'gtk:1'])
])),
])
def test_generate_expanded_mmds_buildrequires(self, requires, build_requires,
expected_xmd, expected_buildrequires):
({"gtk": ["1"], "foo": ["1"]}, {"gtk": ["-1", "1"], "foo": ["-2", "1"]}, False,
set([
frozenset(['foo:1:0:c2', 'gtk:1:0:c2', 'platform:f28:0:c10'])
]),
set([
frozenset(['foo:1', 'gtk:1'])
])),
])
def test_generate_expanded_mmds_buildrequires(
self, requires, build_requires, stream_ambigous, expected_xmd,
expected_buildrequires):
self._generate_default_modules()
module_build = self._make_module("app:1:0:c1", requires, build_requires)
# Check that generate_expanded_mmds raises an exception if stream is ambigous
# and also that it does not raise an exception otherwise.
if stream_ambigous:
with pytest.raises(StreamAmbigous):
module_build_service.utils.generate_expanded_mmds(
db.session, module_build.mmd(), raise_if_stream_ambigous=True)
else:
module_build_service.utils.generate_expanded_mmds(
db.session, module_build.mmd(), raise_if_stream_ambigous=True)
# Check that if stream is ambigous and we define the stream, it does not raise
# an exception.
if stream_ambigous:
default_streams = {}
for ns in list(expected_buildrequires)[0]:
name, stream = ns.split(":")
default_streams[name] = stream
module_build_service.utils.generate_expanded_mmds(
db.session, module_build.mmd(), raise_if_stream_ambigous=True,
default_streams=default_streams)
mmds = module_build_service.utils.generate_expanded_mmds(
db.session, module_build.mmd())