Fix issues from review. Drop xmd['mbs']['requires']. Do not allow building MSE builds against non-MSE builds.

This commit is contained in:
Jan Kaluza
2018-03-13 11:25:18 +01:00
committed by mprahl
parent 31ddbe51a5
commit 2482e47c07
12 changed files with 122 additions and 172 deletions

View File

@@ -262,10 +262,7 @@ class ModuleBuild(MBSBase):
return query.all()
@staticmethod
def get_last_builds_in_stream(session, name, stream):
"""
Returns the latest builds in "ready" state for given name:stream.
"""
def _get_last_builds_in_stream_query(session, name, stream):
# Prepare the subquery to find out all unique name:stream records.
subq = session.query(
func.max(sqlalchemy.cast(ModuleBuild.version, db.BigInteger)).label("maxversion")
@@ -277,15 +274,30 @@ class ModuleBuild(MBSBase):
ModuleBuild.name == name,
ModuleBuild.stream == stream,
sqlalchemy.cast(ModuleBuild.version, db.BigInteger) == subq.c.maxversion))
return query.all()
return query
@staticmethod
def get_build_from_nsvc(session, name, stream, version, context):
def get_last_builds_in_stream(session, name, stream):
"""
Returns the latest builds in "ready" state for given name:stream.
"""
# Prepare the subquery to find out all unique name:stream records.
return ModuleBuild._get_last_builds_in_stream_query(session, name, stream).all()
@staticmethod
def get_last_build_in_stream(session, name, stream):
"""
Returns the latest build in "ready" state for given name:stream.
"""
return ModuleBuild._get_last_builds_in_stream_query(session, name, stream).first()
@staticmethod
def get_build_from_nsvc(session, name, stream, version, context, **kwargs):
#TODO: Rewrite this to use self.context when we add it.
builds = session.query(ModuleBuild).filter_by(
name=name, stream=stream, version=version).all()
name=name, stream=stream, version=version, **kwargs).all()
for build in builds:
print build.name, build.stream, build.version, build.context
if build.context == context:
return build
return None
@@ -338,14 +350,31 @@ class ModuleBuild(MBSBase):
raise ValueError("Invalid modulemd")
mbs_xmd = mmd.get_xmd().get('mbs', {})
rv = []
for property_name in ['buildrequires', 'requires']:
# We have to use keys because GLib.Variant doesn't support `in` directly.
if property_name not in mbs_xmd.keys():
raise ValueError('The module\'s modulemd hasn\'t been formatted by MBS')
mmd_formatted_property = {
dep: info['ref'] for dep, info in mbs_xmd[property_name].items()}
property_json = json.dumps(OrderedDict(sorted(mmd_formatted_property.items())))
rv.append(hashlib.sha1(property_json.encode('utf-8')).hexdigest())
# Get the buildrequires from the XMD section, because it contains
# all the buildrequires as we resolved them using dependency resolver.
# We have to use keys because GLib.Variant doesn't support `in` directly.
if "buildrequires" not in mbs_xmd.keys():
raise ValueError('The module\'s modulemd hasn\'t been formatted by MBS')
mmd_formatted_buildrequires = {
dep: info['ref'] for dep, info in mbs_xmd["buildrequires"].items()}
property_json = json.dumps(OrderedDict(sorted(mmd_formatted_buildrequires.items())))
rv.append(hashlib.sha1(property_json).hexdigest())
# Get the requires from the real "dependencies" section in MMD.
mmd_requires = {}
for deps in mmd.get_dependencies():
for name, streams in deps.get_requires().items():
if name not in mmd_requires:
mmd_requires[name] = set()
mmd_requires[name] = mmd_requires[name].union(streams.get())
# Sort the streams for each module name and also sort the module names.
mmd_requires = {
dep: sorted(list(streams)) for dep, streams in mmd_requires.items()}
property_json = json.dumps(OrderedDict(sorted(mmd_requires.items())))
rv.append(hashlib.sha1(property_json.encode('utf-8')).hexdigest())
return tuple(rv)
@staticmethod

View File

@@ -53,62 +53,6 @@ class DBResolver(GenericResolver):
tag_str = '.'.join([name, stream, str(version), context])
return 'module-{0}'.format(hashlib.sha1(tag_str).hexdigest()[:16])
def _get_recursively_required_modules(self, build, session, modules=None, strict=False):
"""
Returns a dictionary of modulemds by recursively querying the DB based on the
depdendencies of the input module. The returned dictionary is a key of koji_tag
and value of Modulemd object. Note that if there are some modules loaded by
utils.load_local_builds(...), these local modules will be used instead of generically
querying the DB.
:param build: models.ModuleBuild object of the module to resolve
:param modules: dictionary of koji_tag:modulemd found by previous iteration
of this method. Used by recursion only.
:param session: SQLAlchemy database sesion to query from
:param strict: Normally this function returns an empty dictionary if no module can
be found. If strict=True, then an UnprocessableEntity is raised instead.
:return: a dictionary
"""
modules = modules or {}
koji_tag = build.koji_tag
mmd = build.mmd()
# Check if it's already been examined
if koji_tag in modules:
return modules
modules.update({build.koji_tag: mmd})
# We want to use the same stream as the one used in the time this
# module was built. But we still should fallback to plain mmd.requires
# in case this module depends on some older module for which we did
# not populate mmd.xmd['mbs']['requires'].
mbs_xmd = mmd.get_xmd().get('mbs')
if 'requires' in mbs_xmd.keys():
requires = {name: data['stream'] for name, data in mbs_xmd['requires'].items()}
else:
# Since MBS doesn't support v2 modulemds submitted by a user, we will
# always only have one stream per require. That way it's safe to just take the first
# element of the list.
# TODO: Change this once module stream expansion is implemented
requires = {
name: deps.get()[0]
for name, deps in mmd.get_dependencies()[0].get_requires().items()}
for name, stream in requires.items():
local_modules = models.ModuleBuild.local_modules(session, name, stream)
if local_modules:
dep = local_modules[0]
else:
dep = models.ModuleBuild.get_last_builds_in_stream(session, name, stream)
if dep:
dep = dep[0]
if dep:
modules = self._get_recursively_required_modules(dep, session, modules, strict)
elif strict:
raise UnprocessableEntity(
'The module {0}:{1} was not found'.format(name, stream))
return modules
def resolve_profiles(self, mmd, keys):
"""
Returns a dictionary with keys set according the `keys` parameters and values
@@ -136,19 +80,19 @@ class DBResolver(GenericResolver):
results[key] |= set(dep_mmd.get_profiles()[key].get_rpms().get())
continue
build = session.query(models.ModuleBuild).filter_by(
name=module_name, stream=module_info['stream'],
version=module_info['version'], state=models.BUILD_STATES['ready']).first()
build = models.ModuleBuild.get_build_from_nsvc(
session, module_name, module_info['stream'], module_info['version'],
module_info['context'], state=models.BUILD_STATES['ready'])
if not build:
raise UnprocessableEntity('The module {}:{}:{} was not found'.format(
module_name, module_info['stream'], module_info['version']))
raise UnprocessableEntity('The module {}:{}:{}:{} was not found'.format(
module_name, module_info['stream'], module_info['version'],
module_info['context']))
dep_mmd = build.mmd()
modules = self._get_recursively_required_modules(build, session, strict=True)
for name, dep_mmd in modules.items():
# Take note of what rpms are in this dep's profile
for key in keys:
if key in dep_mmd.get_profiles().keys():
results[key] |= set(dep_mmd.get_profiles()[key].get_rpms().get())
# Take note of what rpms are in this dep's profile
for key in keys:
if key in dep_mmd.get_profiles().keys():
results[key] |= set(dep_mmd.get_profiles()[key].get_rpms().get())
# Return the union of all rpms in all profiles of the given keys
return results
@@ -184,13 +128,8 @@ class DBResolver(GenericResolver):
mmd.get_name(), mmd.get_stream(), str(mmd.get_version()),
mmd.get_context() or '00000000'])
else:
build = None
for _build in session.query(models.ModuleBuild).filter_by(
name=name, stream=stream, version=version).all():
# Figure out how to query by context directly
if _build.context == context:
build = _build
break
build = models.ModuleBuild.get_build_from_nsvc(
session, name, stream, version, context)
if not build:
raise UnprocessableEntity('The module {} was not found'.format(
':'.join([name, stream, version, context])))
@@ -208,18 +147,15 @@ class DBResolver(GenericResolver):
build = session.query(models.ModuleBuild).filter_by(
name=br_name, stream=details['stream'], version=details['version'],
state=models.BUILD_STATES['ready']).first()
if not build:
raise UnprocessableEntity('The module {} was not found'.format(
':'.join([br_name, details['stream'], details['version']])))
module_tags.update(
self._get_recursively_required_modules(build, session, strict=strict))
module_tags[build.koji_tag] = build.mmd()
return module_tags
def resolve_requires(self, requires):
"""
Resolves the requires dictionary to a dictionary with keys as the module name and the
values as a dictionary with keys of ref, stream, version, filtered_rpms.
Resolves the requires list of N:S or N:S:V:C to a dictionary with keys as
the module name and the values as a dictionary with keys of ref,
stream, version, filtered_rpms.
If there are some modules loaded by utils.load_local_builds(...), these
local modules will be considered when resolving the requires. A RuntimeError
is raised on DB lookup errors.
@@ -228,12 +164,17 @@ class DBResolver(GenericResolver):
"""
new_requires = {}
with models.make_session(self.config) as session:
for module_name, module_stream in requires.items():
if ":" in module_stream:
module_stream, module_version, module_context = module_stream.split(":")
else:
for nsvc in requires:
nsvc_splitted = nsvc.split(":")
if len(nsvc_splitted) == 2:
module_name, module_stream = nsvc_splitted
module_version = None
module_context = None
elif len(nsvc_splitted) == 4:
module_name, module_stream, module_version, module_context = nsvc_splitted
else:
raise ValueError(
"Only N:S or N:S:V:C is accepted by resolve_requires, got %s" % nsvc)
local_modules = models.ModuleBuild.local_modules(
session, module_name, module_stream)
@@ -252,19 +193,14 @@ class DBResolver(GenericResolver):
continue
if module_version is None or module_context is None:
build = models.ModuleBuild.get_last_builds_in_stream(
build = models.ModuleBuild.get_last_build_in_stream(
session, module_name, module_stream)
if build:
build = build[0]
if not build:
raise UnprocessableEntity('The module {}:{} was not found'.format(
module_name, module_stream))
else:
build = models.ModuleBuild.get_build_from_nsvc(
session, module_name, module_stream, module_version, module_context)
if not build:
raise UnprocessableEntity('The module {}:{}:{}:{} was not found'.format(
module_name, module_stream, module_version, module_context))
if not build:
raise UnprocessableEntity('The module {} was not found'.format(nsvc))
commit_hash = None
filtered_rpms = []
@@ -277,6 +213,11 @@ class DBResolver(GenericResolver):
'The module "{0}" didn\'t contain a commit hash in its xmd'
.format(module_name))
if "mse" not in mbs_xmd.keys():
raise RuntimeError(
'The module "{}" is not built using Module Stream Expansion. '
'Please rebuild this module first'.format(nsvc))
# Find out the particular NVR of filtered packages
rpm_filter = mmd.get_rpm_filter()
if rpm_filter and rpm_filter.get():

View File

@@ -781,18 +781,6 @@ def format_mmd(mmd, scmurl, session=None):
else:
xmd['mbs']['commit'] = scm.get_latest()
resolver = module_build_service.resolver.GenericResolver.create(conf)
# Resolve buildrequires and requires
# Reformat the input for resolve_requires to match the old modulemd format
dep_obj = mmd.get_dependencies()[0]
if 'buildrequires' not in xmd['mbs']:
br_dict = {br: br_list.get()[0] for br, br_list in dep_obj.get_buildrequires().items()}
xmd['mbs']['buildrequires'] = resolver.resolve_requires(br_dict)
if 'requires' not in xmd['mbs']:
req_dict = {req: req_list.get()[0] for req, req_list in dep_obj.get_requires().items()}
xmd['mbs']['requires'] = resolver.resolve_requires(req_dict)
if mmd.get_rpm_components() or mmd.get_module_components():
if 'rpms' not in xmd['mbs']:
xmd['mbs']['rpms'] = {}
@@ -993,7 +981,7 @@ def generate_expanded_mmds(session, mmd):
if not session:
session = db.session
# Create local copy of mmd, because we will have expand its dependencies,
# Create local copy of mmd, because we will expand its dependencies,
# which would change the module.
# TODO: Use copy method once its in released libmodulemd:
# https://github.com/fedora-modularity/libmodulemd/pull/20
@@ -1040,28 +1028,28 @@ def generate_expanded_mmds(session, mmd):
# We don't want to depend on ourselves, so store the NSVC of the current_mmd
# to be able to ignore it later.
self_nsvc = None
self_nsvca = None
# Dict to store name:stream pairs from nsvc, so we are able to access it
# Dict to store name:stream pairs from nsvca, so we are able to access it
# easily later.
req_name_stream = {}
# Get the values for dependencies_id, self_nsvc and req_name_stream variables.
for nsvc in requires:
req_name, req_stream, _ = nsvc.split(":", 2)
# Get the values for dependencies_id, self_nsvca and req_name_stream variables.
for nsvca in requires:
req_name, req_stream, _ = nsvca.split(":", 2)
if req_name == current_mmd.get_name() and req_stream == current_mmd.get_stream():
dependencies_id = int(nsvc.split(":")[3])
self_nsvc = nsvc
dependencies_id = int(nsvca.split(":")[3])
self_nsvca = nsvca
continue
req_name_stream[req_name] = req_stream
if dependencies_id is None or self_nsvc is None:
if dependencies_id is None or self_nsvca is None:
raise RuntimeError(
"%s:%s not found in requires %r" % (current_mmd.get_name(), current_mmd.get_stream(), requires))
# The name:[streams, ...] pairs do not have to be the same in both
# buildrequires/requires. In case they are the same, we replace the streams
# in requires section with a single stream against which we will build this MMD.
# In case they are not the same, we have to keep the streams as they in requires
# In case they are not the same, we have to keep the streams as they are in requires
# section. We always replace stream(s) for build-requirement with the one we
# will build this MMD against.
new_dep = Modulemd.Dependencies()
@@ -1083,26 +1071,25 @@ def generate_expanded_mmds(session, mmd):
mmd_copy.set_dependencies((new_dep, ))
# The Modulemd.Dependencies() stores only streams, but to really build this
# module, we need NSVC of buildrequires. We will get it using the
# module_build_service.resolver.GenericResolver.resolve_requires, so prepare
# dict in {N: SVC, ...} format as an input for this method.
br_dict = {}
for nsvc in requires:
if nsvc == self_nsvc:
# module, we need NSVC of buildrequires, so we have to store this data in XMD.
# We also need additional data like for example list of filtered_rpms. We will
# get them using module_build_service.resolver.GenericResolver.resolve_requires,
# so prepare list witht NSVCs of buildrequires as an input for this method.
br_list = []
for nsvca in requires:
if nsvca == self_nsvca:
continue
req_name, req_stream, req_version, req_context, req_arch = nsvc.split(":")
br_dict[req_name] = ":".join([req_stream, req_version, req_context])
# Remove the arch from nsvca
nsvc = ":".join(nsvca.split(":")[:-1])
br_list.append(nsvc)
# The same for runtime requires, which we need to compute runtime context.
r_dict = {req: req_list.get()[0] for req, req_list in new_dep.get_requires().items()}
# Resolve the requires/buildrequires and store the result in XMD.
# Resolve the buildrequires and store the result in XMD.
if 'mbs' not in xmd:
xmd['mbs'] = {}
resolver = module_build_service.resolver.GenericResolver.create(conf)
xmd['mbs']['buildrequires'] = resolver.resolve_requires(br_dict)
xmd['mbs']['requires'] = resolver.resolve_requires(r_dict)
xmd['mbs']['buildrequires'] = resolver.resolve_requires(br_list)
xmd['mbs']['mse'] = "true"
mmd_copy.set_xmd(glib.dict_values(xmd))
# Now we have all the info to actually compute context of this module.
@@ -1122,11 +1109,13 @@ def submit_module_build(username, url, mmd, scm, optional_params=None):
mmds = generate_expanded_mmds(db.session, mmd)
for mmd in mmds:
log.debug('Checking whether module build already exists: %s.',
":".join([mmd.get_name(), mmd.get_stream(),
str(mmd.get_version()), mmd.get_context()]))
module = models.ModuleBuild.get_build_from_nsvc(
db.session, mmd.get_name(), mmd.get_stream(), str(mmd.get_version()),
mmd.get_context())
if module:
log.debug('Checking whether module build already exist.')
if module.state != models.BUILD_STATES['failed']:
err_msg = ('Module (state=%s) already exists. Only a new build or resubmission of '
'a failed build is allowed.' % module.state)

View File

@@ -40,6 +40,7 @@ data:
ref: virtual
stream: f28
version: '3'
context: '00000000'
commit: 620ec77321b2ea7b0d67d82992dda3e1d67055b4
requires:
platform:
@@ -50,4 +51,5 @@ data:
rpms:
ed:
ref: 51f529a5cde2b843ed9c7870689d707eaab3a9d1
mse: true
scmurl: https://src.fedoraproject.org/modules/testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4

View File

@@ -27,6 +27,7 @@ data:
ref: virtual
stream: f28
version: '3'
context: '00000000'
commit: 620ec77321b2ea7b0d67d82992dda3e1d67055b4
requires:
platform:
@@ -43,6 +44,7 @@ data:
ref: 61f529a5cde2b843ed9c7870689d707eaab3a9d1
mksh2:
ref: d7df9926cbe7bd8ffbd0ed35108814d7e037d1e3
mse: true
scmurl: https://src.fedoraproject.org/modules/testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4
components:
rpms:

View File

@@ -27,6 +27,7 @@ data:
ref: virtual
stream: f28
version: '3'
context: '00000000'
commit: 65a7721ee4eff44d2a63fb8f3a8da6e944ab7f4d
requires:
platform:
@@ -41,6 +42,7 @@ data:
ref: 7e96446223f1ad84a26c7cf23d6591cd9f6326c6
tangerine:
ref: c0f9a7dbd8cf823a2bdc19eeeed20d22b0aa52bf
mse: true
scmurl: https://src.fedoraproject.org/modules/testmodule.git?#65a7721ee4eff44d2a63fb8f3a8da6e944ab7f4d
profiles:
default:

View File

@@ -22,3 +22,4 @@ data:
buildrequires: {}
commit: virtual
requires: {}
mse: true

View File

@@ -878,7 +878,7 @@ class TestBuild:
build_one.stream = 'master'
build_one.version = 20180205135154
build_one.build_context = 'return_runtime_context'
build_one.runtime_context = 'c2c572ec'
build_one.runtime_context = 'c7b355af'
build_one.state = models.BUILD_STATES['failed']
current_dir = os.path.dirname(__file__)
formatted_testmodule_yml_path = os.path.join(
@@ -1005,7 +1005,7 @@ class TestBuild:
build_one.stream = 'master'
build_one.version = 20180205135154
build_one.build_context = 'return_runtime_context'
build_one.runtime_context = 'c2c572ec'
build_one.runtime_context = 'c7b355af'
build_one.state = models.BUILD_STATES['failed']
current_dir = os.path.dirname(__file__)
formatted_testmodule_yml_path = os.path.join(

View File

@@ -73,8 +73,8 @@ class TestModels:
build.modulemd = mmd.dumps()
build.build_context, build.runtime_context = ModuleBuild.contexts_from_mmd(build.modulemd)
assert build.build_context == 'f6e2aeec7576196241b9afa0b6b22acf2b6873d7'
assert build.runtime_context == '1739827b08388842fc90ccc0b6070c59b7d856fc'
assert build.context == 'e7a3d35e'
assert build.runtime_context == 'bbc84c7b817ab3dd54916c0bcd6c6bdf512f7f9c'
assert build.context == 'f1a17afd'
class TestModelsGetStreamsContexts:

View File

@@ -116,9 +116,6 @@ class TestDBModule:
local_path = os.path.join(base_dir, 'staged_data', "local_builds")
expected = [
os.path.join(
local_path,
'module-platform-f28-3/results'),
os.path.join(
local_path,
'module-parent-master-20170816080815/results'),

View File

@@ -281,20 +281,6 @@ class TestUtils:
xmd = {
'mbs': {
'commit': '',
'buildrequires': {
'platform': {
'ref': 'virtual',
'stream': 'f28',
'version': '3',
'context': '00000000',
'filtered_rpms': []}},
'requires': {
'platform': {
'version': '3',
'ref': 'virtual',
'stream': 'f28',
'context': '00000000',
'filtered_rpms': []}},
'rpms': {
'perl-List-Compare': {'ref': 'fbed359411a1baa08d4a88e0d12d426fbf8f602c'},
'perl-Tangerine': {'ref': '4ceea43add2366d8b8c5a622a2fb563b625b9abf'},

View File

@@ -93,6 +93,7 @@ class TestUtilsModuleStreamExpansion:
"buildrequires": [],
"requires": [],
"commit": "ref_%s" % context,
"mse": "true",
}
}
deps_list = []
@@ -273,12 +274,12 @@ class TestUtilsModuleStreamExpansion:
requires_per_mmd = set()
for mmd in mmds:
assert len(mmd.get_dependencies()) == 1
requires = set()
mmd_requires = set()
dep = mmd.get_dependencies()[0]
for req_name, req_streams in dep.get_requires().items():
for req_stream in req_streams.get():
requires.add(":".join([req_name, req_stream]))
requires_per_mmd.add(frozenset(requires))
mmd_requires.add(":".join([req_name, req_stream]))
requires_per_mmd.add(frozenset(mmd_requires))
assert requires_per_mmd == expected