mirror of
https://pagure.io/fm-orchestrator.git
synced 2026-02-14 02:25:02 +08:00
Removed PDC dependency from MBS and replaced it with MBS prod instance.
Signed-off-by: Martin Curlej <mcurlej@redhat.com> Removed pdc from comments. Signed-off-by: Martin Curlej <mcurlej@redhat.com> Adding missing files. Signed-off-by: Martin Curlej <mcurlej@redhat.com> Updated PR according to review. Signed-off-by: Martin Curlej <mcurlej@redhat.com> Local modules builds now Signed-off-by: Martin Curlej <mcurlej@redhat.com> Removed copr from config Signed-off-by: Martin Curlej <mcurlej@redhat.com> Fixed bugs Signed-off-by: Martin Curlej <mcurlej@redhat.com>
This commit is contained in:
@@ -1 +0,0 @@
|
||||
These are some utilities to help with bootstrapping a new MBS instance.
|
||||
@@ -1,20 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pdc_client
|
||||
|
||||
servername, token, variant_uid = sys.argv[-3], sys.argv[-2], sys.argv[-1]
|
||||
|
||||
if os.path.basename(__file__) in (servername, token, variant_uid,):
|
||||
raise ValueError("Provide a PDC server name defined in /etc/pdc.d/ and a token")
|
||||
|
||||
print("Connecting to PDC server %r with token %r" % (servername, token))
|
||||
pdc = pdc_client.PDCClient(servername, token=token)
|
||||
|
||||
print("Querying for %r to see if it is inactive" % variant_uid)
|
||||
obj = pdc['unreleasedvariants'][variant_uid]()
|
||||
assert obj['active'] is False, obj['active']
|
||||
|
||||
print("Submitting PATCH to activate.")
|
||||
pdc['unreleasedvariants'][variant_uid] += {'variant_uid': variant_uid, 'active': True}
|
||||
print("Done.")
|
||||
@@ -1,37 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from six.moves import input
|
||||
import pdc_client
|
||||
|
||||
servername, token, variant_uid, new_tag = \
|
||||
sys.argv[-4], sys.argv[-3], sys.argv[-2], sys.argv[-1]
|
||||
|
||||
if os.path.basename(__file__) in (servername, token, variant_uid,):
|
||||
raise ValueError("Provide a PDC server name defined in "
|
||||
"/etc/pdc.d/ and a token")
|
||||
|
||||
print("Connecting to PDC server %r with token %r" % (servername, token))
|
||||
pdc = pdc_client.PDCClient(servername, token=token)
|
||||
|
||||
print("Querying for %r to see what tag it has today" % variant_uid)
|
||||
obj = pdc['unreleasedvariants'][variant_uid]()
|
||||
answer = input("Change koji_tag for %r from %r to %r? [y/N]" % (
|
||||
variant_uid, obj['koji_tag'], new_tag))
|
||||
if not answer.lower() in ('y', 'yes'):
|
||||
print("Exiting, taking no action.")
|
||||
sys.exit(0)
|
||||
|
||||
print("Submitting PATCH to new_tag.")
|
||||
# Do it this way once we fix that ugly PATCH bug.
|
||||
# pdc['unreleasedvariants'][variant_uid] += {
|
||||
# 'variant_uid': variant_uid,
|
||||
# 'koji_tag': new_tag,
|
||||
# }
|
||||
try:
|
||||
# This way works, but it *always* throws a TypeError.
|
||||
pdc['unreleasedvariants/'] += {variant_uid: {'koji_tag': new_tag}}
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
print("Done.")
|
||||
@@ -1,20 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pdc_client
|
||||
|
||||
servername, token, variant_uid = sys.argv[-3], sys.argv[-2], sys.argv[-1]
|
||||
|
||||
if os.path.basename(__file__) in (servername, token, variant_uid,):
|
||||
raise ValueError("Provide a PDC server name defined in /etc/pdc.d/ and a token")
|
||||
|
||||
print("Connecting to PDC server %r with token %r" % (servername, token))
|
||||
pdc = pdc_client.PDCClient(servername, token=token)
|
||||
|
||||
print("Querying for %r to see if it is active" % variant_uid)
|
||||
obj = pdc['unreleasedvariants'][variant_uid]()
|
||||
assert obj['active'], obj['active']
|
||||
|
||||
print("Submitting PATCH to deactivate.")
|
||||
pdc['unreleasedvariants'][variant_uid] += {'variant_uid': variant_uid, 'active': False}
|
||||
print("Done.")
|
||||
@@ -1,16 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pdc_client
|
||||
|
||||
servername, token, variant_uid = sys.argv[-3], sys.argv[-2], sys.argv[-1]
|
||||
|
||||
if os.path.basename(__file__) in (servername, token, variant_uid,):
|
||||
raise ValueError("Provide a PDC server name defined in /etc/pdc.d/ and a token")
|
||||
|
||||
print("Connecting to PDC server %r with token %r" % (servername, token))
|
||||
pdc = pdc_client.PDCClient(servername, token=token)
|
||||
|
||||
print("Submitting DELETE.")
|
||||
del pdc['unreleasedvariants'][variant_uid]
|
||||
print("Done.")
|
||||
@@ -1,22 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pdc_client
|
||||
|
||||
servername, token = sys.argv[-2], sys.argv[-1]
|
||||
|
||||
if os.path.basename(__file__) in (servername, token,):
|
||||
raise ValueError("Provide a PDC server name defined in /etc/pdc.d/ and a token")
|
||||
|
||||
filename = 'base-runtime-master-3.json'
|
||||
print("Reading %s" % filename)
|
||||
with open(filename, 'r') as f:
|
||||
entry = json.loads(f.read())
|
||||
|
||||
print("Connecting to PDC server %r with token %r" % (servername, token))
|
||||
pdc = pdc_client.PDCClient(servername, token=token)
|
||||
|
||||
print("Submitting POST.")
|
||||
pdc['unreleasedvariants']._(entry)
|
||||
print("Done.")
|
||||
@@ -132,6 +132,7 @@ class LocalBuildConfiguration(BaseConfiguration):
|
||||
ARCH_FALLBACK = 'x86_64'
|
||||
|
||||
ALLOW_CUSTOM_SCMURLS = True
|
||||
RESOLVER = 'mbs'
|
||||
|
||||
|
||||
class DevConfiguration(LocalBuildConfiguration):
|
||||
|
||||
@@ -17,7 +17,6 @@ RUN yum -y install \
|
||||
kobo \
|
||||
kobo-rpmlib \
|
||||
libmodulemd \
|
||||
pdc-client \
|
||||
python-backports-ssl_match_hostname \
|
||||
python-dogpile-cache \
|
||||
python-enum34 \
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
"""Generic component build functions."""
|
||||
|
||||
# TODO: Query the PDC to find what modules satisfy the build dependencies and
|
||||
# TODO: Query the MBS to find what modules satisfy the build dependencies and
|
||||
# their tag names.
|
||||
# TODO: Ensure the RPM %dist tag is set according to the policy.
|
||||
|
||||
@@ -196,7 +196,7 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
|
||||
necessarily have to directly install artifacts (e.g. koji), just make
|
||||
them available.
|
||||
|
||||
E.g. the koji implementation of the call uses PDC to get koji_tag
|
||||
E.g. the koji implementation of the call uses MBS to get koji_tag
|
||||
associated with each module dep and adds the tag to $module-build tag
|
||||
inheritance.
|
||||
"""
|
||||
@@ -307,7 +307,7 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
|
||||
mmd = module.mmd()
|
||||
resolver = module_build_service.resolver.GenericResolver.create(conf)
|
||||
|
||||
# Resolve default buildroot groups using the PDC, but only for
|
||||
# Resolve default buildroot groups using the MBS, but only for
|
||||
# non-local modules.
|
||||
groups = resolver.resolve_profiles(
|
||||
mmd, ('buildroot', 'srpm-buildroot'))
|
||||
|
||||
@@ -38,8 +38,8 @@ from module_build_service import logger
|
||||
SUPPORTED_STRATEGIES = ['changed-and-after', 'only-changed', 'all']
|
||||
|
||||
SUPPORTED_RESOLVERS = {
|
||||
'pdc': {'builders': ['koji', 'mock']},
|
||||
'db': {'builders': ['koji', 'mock']}
|
||||
'mbs': {'builders': ['mock']},
|
||||
'db': {'builders': ['koji', 'mock', 'copr']}
|
||||
}
|
||||
|
||||
|
||||
@@ -155,18 +155,10 @@ class Config(object):
|
||||
'type': Path,
|
||||
'default': '~/modulebuild/cache',
|
||||
'desc': 'Cache directory'},
|
||||
'pdc_url': {
|
||||
'mbs_url': {
|
||||
'type': str,
|
||||
'default': '',
|
||||
'desc': 'PDC URL.'},
|
||||
'pdc_insecure': {
|
||||
'type': bool,
|
||||
'default': False,
|
||||
'desc': 'Allow insecure connection to PDC.'},
|
||||
'pdc_develop': {
|
||||
'type': bool,
|
||||
'default': False,
|
||||
'desc': 'PDC Development mode, basically noauth.'},
|
||||
'default': 'https://mbs.fedoraproject.org/module-build-service/1/module-builds/',
|
||||
'desc': 'MBS instance url for MBSResolver'},
|
||||
'koji_config': {
|
||||
'type': str,
|
||||
'default': None,
|
||||
@@ -435,7 +427,7 @@ class Config(object):
|
||||
'the "garbage" state.')},
|
||||
'resolver': {
|
||||
'type': str,
|
||||
'default': 'pdc',
|
||||
'default': 'db',
|
||||
'desc': 'Where to look up for modules. Note that this can (and '
|
||||
'probably will) be builder-specific.'},
|
||||
}
|
||||
|
||||
@@ -112,6 +112,10 @@ def build_module_locally(local_build_nsvs=None, yaml_file=None, stream=None, ski
|
||||
if 'SERVER_NAME' not in app.config or not app.config['SERVER_NAME']:
|
||||
app.config["SERVER_NAME"] = 'localhost'
|
||||
|
||||
if app.config['RESOLVER'] == 'db':
|
||||
raise ValueError("Please set RESOLVER to 'mbs' in your "
|
||||
"configuration for local builds.")
|
||||
|
||||
with app.app_context():
|
||||
conf.set_item("system", "mock")
|
||||
|
||||
|
||||
@@ -152,7 +152,7 @@ class DBResolver(GenericResolver):
|
||||
if not xmd_mbs or 'buildrequires' not in xmd_mbs.keys():
|
||||
raise RuntimeError(
|
||||
'The module {} did not contain its modulemd or did not have '
|
||||
'its xmd attribute filled out in PDC'.format(nsvc))
|
||||
'its xmd attribute filled out in MBS'.format(nsvc))
|
||||
|
||||
buildrequires = xmd_mbs['buildrequires']
|
||||
for br_name, details in buildrequires.items():
|
||||
|
||||
@@ -21,126 +21,94 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
#
|
||||
# Written by Lubos Kocman <lkocman@redhat.com>
|
||||
# Filip Valder <fvalder@redhat.com>
|
||||
# Written by Martin Curlej <mcurlej@redhat.com>
|
||||
|
||||
"""PDC handler functions."""
|
||||
"""MBS handler functions."""
|
||||
|
||||
import logging
|
||||
import kobo.rpmlib
|
||||
import requests
|
||||
|
||||
import pdc_client
|
||||
from module_build_service import db
|
||||
from module_build_service import models
|
||||
from module_build_service.errors import UnprocessableEntity
|
||||
from module_build_service.resolver.base import GenericResolver
|
||||
|
||||
import inspect
|
||||
import logging
|
||||
import kobo.rpmlib
|
||||
log = logging.getLogger()
|
||||
|
||||
|
||||
class PDCResolver(GenericResolver):
|
||||
class MBSResolver(GenericResolver):
|
||||
|
||||
backend = "pdc"
|
||||
backend = "mbs"
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.session = self._get_client_session(self.config)
|
||||
self.mbs_prod_url = config.mbs_url
|
||||
self.session = requests.Session()
|
||||
adapter = requests.adapters.HTTPAdapter(max_retries=3)
|
||||
self.session.mount(self.mbs_prod_url, adapter)
|
||||
|
||||
def _get_client_session(self, config):
|
||||
def _query_from_nsvc(self, name, stream, version=None, context=None, state="ready"):
|
||||
"""
|
||||
:param config: instance of module_build_service.config.Config
|
||||
:return pdc_client.PDCClient instance
|
||||
"""
|
||||
if 'ssl_verify' in inspect.getargspec(pdc_client.PDCClient.__init__).args:
|
||||
# New API
|
||||
return pdc_client.PDCClient(
|
||||
server=self.config.pdc_url,
|
||||
develop=self.config.pdc_develop,
|
||||
ssl_verify=not self.config.pdc_insecure,
|
||||
)
|
||||
else:
|
||||
# Old API
|
||||
return pdc_client.PDCClient(
|
||||
server=self.config.pdc_url,
|
||||
develop=self.config.pdc_develop,
|
||||
insecure=self.config.pdc_insecure,
|
||||
)
|
||||
|
||||
def _query_from_nsvc(self, name, stream, version=None, context=None, active=None):
|
||||
"""
|
||||
Generates dict with PDC query.
|
||||
Generates dict with MBS params query.
|
||||
|
||||
:param str name: Name of the module to query.
|
||||
:param str stream: Stream of the module to query.
|
||||
:param str version/int: Version of the module to query.
|
||||
:param str context: Context of the module to query.
|
||||
:param bool active: Include "active" in a query.
|
||||
"""
|
||||
query = {
|
||||
"variant_id": name,
|
||||
"variant_version": stream,
|
||||
"name": name,
|
||||
"stream": stream,
|
||||
"state": state,
|
||||
"verbose": True,
|
||||
"order_desc_by": "version"
|
||||
}
|
||||
if active is not None:
|
||||
query["active"] = active
|
||||
if version is not None:
|
||||
query["variant_release"] = str(version)
|
||||
query["version"] = str(version)
|
||||
if context is not None:
|
||||
query["variant_context"] = context
|
||||
query["context"] = context
|
||||
return query
|
||||
|
||||
def _get_modules(self, name, stream, version=None, context=None, active=None, strict=False):
|
||||
def _get_modules(self, name, stream, version=None, context=None, state="ready", strict=False):
|
||||
"""
|
||||
:param module_info: pdc variant_dict, str, mmd or module dict
|
||||
:param module_info: str, mmd or module dict
|
||||
:param strict: Normally this function returns None if no module can be
|
||||
found. If strict=True, then an UnprocessableEntity is raised.
|
||||
|
||||
:return final list of module_info which pass repoclosure
|
||||
"""
|
||||
query = self._query_from_nsvc(name, stream, version, context, active)
|
||||
query = self._query_from_nsvc(name, stream, version, context, state)
|
||||
query["page"] = 1
|
||||
query["per_page"] = 10
|
||||
modules = []
|
||||
|
||||
# TODO: So far sorting on Fedora prod PDC instance is broken and it sorts
|
||||
# only by variant_uid by default. Once the sorting is fixed, we can start
|
||||
# using '-variant_release' ordering and just get the first variant from
|
||||
# there. But in the meantime, we have to get the first variant with
|
||||
# page_size set to 1 to find out how many variants (pages) are there in
|
||||
# results set and jump to last one in another query. The last one is always
|
||||
# the latest one (the one with the highest version).
|
||||
try:
|
||||
retval = self.session['unreleasedvariants']._(page_size=1, **query)
|
||||
except Exception as ex:
|
||||
log.debug("error during PDC lookup: %r" % ex)
|
||||
raise RuntimeError("Error during PDC lookup for module %s" % name)
|
||||
while True:
|
||||
res = self.session.get(self.mbs_prod_url, params=query)
|
||||
if not res.ok:
|
||||
raise RuntimeError("Failed to query MBS with query %r returned HTTP status %s" %
|
||||
(query, res.status_code))
|
||||
break
|
||||
|
||||
data = res.json()
|
||||
modules_per_page = data["items"]
|
||||
modules += modules_per_page
|
||||
|
||||
if not data["meta"]["next"]:
|
||||
break
|
||||
|
||||
query["page"] += 1
|
||||
|
||||
# Error handling
|
||||
if not retval or len(retval["results"]) == 0:
|
||||
if not modules:
|
||||
if strict:
|
||||
raise UnprocessableEntity("Failed to find module in PDC %r" % query)
|
||||
raise UnprocessableEntity("Failed to find module in MBS %r" % query)
|
||||
else:
|
||||
return None
|
||||
|
||||
# Get all the contexts of given module in case there is just NS or NSV input.
|
||||
if not version or not context:
|
||||
# If the version is not set, we have to jump to last page to get the latest
|
||||
# version in PDC.
|
||||
if not version:
|
||||
query['page'] = retval['count']
|
||||
retval = self.session['unreleasedvariants']._(page_size=1, **query)
|
||||
del query['page']
|
||||
query["variant_release"] = retval["results"][0]["variant_release"]
|
||||
retval = self.session['unreleasedvariants']._(page_size=-1, **query)
|
||||
return retval
|
||||
return modules
|
||||
|
||||
results = retval["results"]
|
||||
# Error handling
|
||||
if len(results) == 0:
|
||||
if strict:
|
||||
raise UnprocessableEntity("Failed to find module in PDC %r" % query)
|
||||
else:
|
||||
return None
|
||||
return results
|
||||
|
||||
def _get_module(self, name, stream, version=None, context=None, active=None, strict=False):
|
||||
return self._get_modules(name, stream, version, context, active, strict)[0]
|
||||
def _get_module(self, name, stream, version, context, state="ready", strict=False):
|
||||
return self._get_modules(name, stream, version, context, state, strict)[0]
|
||||
|
||||
def get_module_modulemds(self, name, stream, version=None, context=None, strict=False):
|
||||
"""
|
||||
@@ -161,7 +129,7 @@ class PDCResolver(GenericResolver):
|
||||
if local_modules:
|
||||
return [m.mmd() for m in local_modules]
|
||||
|
||||
modules = self._get_modules(name, stream, version, context, active=True, strict=strict)
|
||||
modules = self._get_modules(name, stream, version, context, strict=strict)
|
||||
if not modules:
|
||||
return []
|
||||
|
||||
@@ -173,7 +141,7 @@ class PDCResolver(GenericResolver):
|
||||
if not yaml:
|
||||
if strict:
|
||||
raise UnprocessableEntity(
|
||||
"Failed to find modulemd entry in PDC for %r" % module)
|
||||
"Failed to find modulemd entry in MBS for %r" % module)
|
||||
else:
|
||||
return None
|
||||
|
||||
@@ -211,7 +179,7 @@ class PDCResolver(GenericResolver):
|
||||
results[key] |= set(dep_mmd.get_profiles()[key].get_rpms().get())
|
||||
continue
|
||||
|
||||
# Find the dep in the built modules in PDC
|
||||
# Find the dep in the built modules in MBS
|
||||
modules = self._get_modules(
|
||||
module_name, module_info['stream'], module_info['version'],
|
||||
module_info['context'], strict=True)
|
||||
@@ -234,11 +202,12 @@ class PDCResolver(GenericResolver):
|
||||
:param stream: a module's stream (required if mmd is not set)
|
||||
:param version: a module's version (required if mmd is not set)
|
||||
:param context: a module's context (required if mmd is not set)
|
||||
:param mmd: uses the mmd instead of the name, stream, version to query PDC
|
||||
:param mmd: uses the mmd instead of the name, stream, version
|
||||
:param strict: Normally this function returns None if no module can be
|
||||
found. If strict=True, then an UnprocessableEntity is raised.
|
||||
:return dict with koji_tag as a key and ModuleMetadata object as value.
|
||||
"""
|
||||
|
||||
if mmd:
|
||||
log.debug("get_module_build_dependencies(mmd=%r strict=%r)" % (mmd, strict))
|
||||
elif any(x is None for x in [name, stream, version, context]):
|
||||
@@ -263,7 +232,7 @@ class PDCResolver(GenericResolver):
|
||||
'buildrequires' not in queried_mmd.get_xmd()['mbs'].keys()):
|
||||
raise RuntimeError(
|
||||
'The module "{0!r}" did not contain its modulemd or did not have '
|
||||
'its xmd attribute filled out in PDC'.format(queried_mmd))
|
||||
'its xmd attribute filled out in MBS'.format(queried_mmd))
|
||||
|
||||
buildrequires = queried_mmd.get_xmd()['mbs']['buildrequires']
|
||||
# Queue up the next tier of deps that we should look at..
|
||||
@@ -275,9 +244,11 @@ class PDCResolver(GenericResolver):
|
||||
module_tags[m.koji_tag] = m.mmd()
|
||||
continue
|
||||
|
||||
if "context" not in details:
|
||||
details["context"] = "00000000"
|
||||
modules = self._get_modules(
|
||||
name, details['stream'], details['version'],
|
||||
details['context'], active=True, strict=True)
|
||||
details['context'], strict=True)
|
||||
for m in modules:
|
||||
if m["koji_tag"] in module_tags:
|
||||
continue
|
||||
@@ -304,7 +275,7 @@ class PDCResolver(GenericResolver):
|
||||
If there are some modules loaded by utils.load_local_builds(...), these
|
||||
local modules will be considered when resolving the requires.
|
||||
|
||||
Raises RuntimeError on PDC lookup error.
|
||||
Raises RuntimeError on MBS lookup error.
|
||||
"""
|
||||
new_requires = {}
|
||||
for nsvc in requires:
|
||||
@@ -342,7 +313,7 @@ class PDCResolver(GenericResolver):
|
||||
filtered_rpms = []
|
||||
module = self._get_module(
|
||||
module_name, module_stream, module_version,
|
||||
module_context, active=True, strict=True)
|
||||
module_context, strict=True)
|
||||
if module.get('modulemd'):
|
||||
mmd = self.extract_modulemd(module['modulemd'])
|
||||
if mmd.get_xmd().get('mbs') and 'commit' in mmd.get_xmd()['mbs'].keys():
|
||||
@@ -362,20 +333,20 @@ class PDCResolver(GenericResolver):
|
||||
continue
|
||||
filtered_rpms.append(nvr)
|
||||
|
||||
if module.get('variant_release'):
|
||||
version = module['variant_release']
|
||||
if module.get('version'):
|
||||
version = module['version']
|
||||
|
||||
if version and commit_hash:
|
||||
new_requires[module_name] = {
|
||||
'ref': commit_hash,
|
||||
'stream': module_stream,
|
||||
'version': str(version),
|
||||
'context': module["variant_context"],
|
||||
'context': module["context"],
|
||||
'filtered_rpms': filtered_rpms,
|
||||
}
|
||||
else:
|
||||
raise RuntimeError(
|
||||
'The module "{0}" didn\'t contain either a commit hash or a'
|
||||
' version in PDC'.format(module_name))
|
||||
' version in MBS'.format(module_name))
|
||||
|
||||
return new_requires
|
||||
@@ -25,7 +25,8 @@ import pkg_resources
|
||||
|
||||
from module_build_service.resolver.base import GenericResolver
|
||||
|
||||
|
||||
# NOTE: if you are adding a new resolver to MBS please note that you also have to add
|
||||
# a new resolver to your setup.py and update you egg-info
|
||||
for entrypoint in pkg_resources.iter_entry_points('mbs.resolver_backends'):
|
||||
GenericResolver.register_backend_class(entrypoint.load())
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ class GenericResolver(six.with_metaclass(ABCMeta)):
|
||||
@classmethod
|
||||
def create(cls, config, backend=None, **extra):
|
||||
"""
|
||||
:param backend: a string representing resolver e.g. 'pdc'
|
||||
:param backend: a string representing resolver e.g. 'db'
|
||||
|
||||
Any additional arguments are optional extras which can be passed along
|
||||
and are implementation-dependent.
|
||||
|
||||
@@ -93,7 +93,7 @@ def failed(config, session, msg):
|
||||
else:
|
||||
# Do not overwrite state_reason set by Frontend if any.
|
||||
if not build.state_reason:
|
||||
reason = "Missing koji tag. Assuming previously failed module lookup in PDC."
|
||||
reason = "Missing koji tag. Assuming previously failed module lookup."
|
||||
log.error(reason)
|
||||
build.transition(config, state="failed", state_reason=reason)
|
||||
session.commit()
|
||||
@@ -222,12 +222,12 @@ def wait(config, session, msg):
|
||||
if conf.system not in ['koji', 'test']:
|
||||
# In case of non-koji backend, we want to get the dependencies
|
||||
# of the local module build based on ModuleMetadata, because the
|
||||
# local build is not stored in PDC and therefore we cannot query
|
||||
# it using the `query` as for Koji below.
|
||||
# local build is not stored in the external MBS and therefore we
|
||||
# cannot query it using the `query` as for Koji below.
|
||||
dependencies = resolver.get_module_build_dependencies(
|
||||
mmd=build.mmd(), strict=True).keys()
|
||||
|
||||
# We also don't want to get the tag name from the PDC, but just
|
||||
# We also don't want to get the tag name from the MBS, but just
|
||||
# generate it locally instead.
|
||||
tag = '-'.join(['module', build.name, build.stream, build.version])
|
||||
else:
|
||||
@@ -257,7 +257,7 @@ def wait(config, session, msg):
|
||||
try:
|
||||
dependencies, tag, cg_build_koji_tag = _get_deps_and_tag()
|
||||
except ValueError:
|
||||
reason = "Failed to get module info from PDC. Max retries reached."
|
||||
reason = "Failed to get module info from MBS. Max retries reached."
|
||||
log.exception(reason)
|
||||
build.transition(config, state="failed", state_reason=reason)
|
||||
session.commit()
|
||||
|
||||
@@ -224,9 +224,8 @@ def create_dogpile_key_generator_func(skip_first_n_args=0):
|
||||
ModuleBuild.__str__() output, which contains also batch and other data
|
||||
which changes during the build of a module.
|
||||
- it is able to skip first N arguments of a cached method. This is useful
|
||||
when the db.session or PDCClient instance is part of cached method call,
|
||||
and the caching should work no matter what session instance is passed
|
||||
to cached method argument.
|
||||
when the db.session is part of cached method call, and the caching should
|
||||
work no matter what session instance is passed to cached method argument.
|
||||
"""
|
||||
def key_generator(namespace, fn):
|
||||
fname = fn.__name__
|
||||
|
||||
@@ -433,7 +433,7 @@ def _fetch_mmd(url, branch=None, allow_local_url=False, whitelist_url=False):
|
||||
mmd.set_stream(str(scm.branch))
|
||||
|
||||
# If the version is in the modulemd, throw an exception since the version
|
||||
# is generated by pdc-updater
|
||||
# since the version is generated by MBS
|
||||
if mmd.get_version():
|
||||
raise ValidationError('The version "{0}" is already defined in the '
|
||||
'modulemd but it shouldn\'t be since the version '
|
||||
|
||||
@@ -13,7 +13,6 @@ koji
|
||||
ldap3
|
||||
moksha.hub
|
||||
munch
|
||||
pdc-client
|
||||
pyOpenSSL
|
||||
pygobject
|
||||
requests
|
||||
|
||||
2
setup.py
2
setup.py
@@ -55,7 +55,7 @@ setup(name='module-build-service',
|
||||
'mock = module_build_service.builder.MockModuleBuilder:MockModuleBuilder',
|
||||
],
|
||||
'mbs.resolver_backends': [
|
||||
'pdc = module_build_service.resolver.PDCResolver:PDCResolver',
|
||||
'mbs = module_build_service.resolver.MBSResolver:MBSResolver',
|
||||
'db = module_build_service.resolver.DBResolver:DBResolver',
|
||||
],
|
||||
},
|
||||
|
||||
@@ -20,15 +20,10 @@
|
||||
#
|
||||
# Written by Matt Prahl <mprahl@redhat.com>
|
||||
import os
|
||||
import math
|
||||
import copy
|
||||
|
||||
import six
|
||||
import pytest
|
||||
import mock
|
||||
import pdc_client.test_helpers
|
||||
|
||||
from module_build_service import glib, Modulemd
|
||||
from module_build_service import Modulemd
|
||||
|
||||
|
||||
BASE_DIR = os.path.dirname(__file__)
|
||||
@@ -51,197 +46,21 @@ _mmd3.set_context("c2c572ed")
|
||||
TESTMODULE_MODULEMD_SECOND_CONTEXT = _mmd3.dumps()
|
||||
|
||||
|
||||
class MockPDCFilterAPI(pdc_client.test_helpers.MockAPI):
|
||||
""" A modified pdc_client.test_helpers.MockAPI that supports basic filtering on GET requests
|
||||
"""
|
||||
def _handle_get(self, filters):
|
||||
# Code taken from pdc_client/test_helpers.py
|
||||
data = self.endpoints[self.will_call]['GET']
|
||||
if callable(data):
|
||||
data = data()
|
||||
self.calls.setdefault(self.will_call, []).append(('GET', filters))
|
||||
page_size = filters.get('page_size', 20)
|
||||
# End of code taken from pdc_client/test_helpers.py
|
||||
|
||||
if not isinstance(data, list):
|
||||
return data
|
||||
|
||||
# Keep track of indexes to pop since we can't pop them during the loop
|
||||
indexes_to_pop = []
|
||||
for index, result in enumerate(data):
|
||||
for filter_key, filter_value in filters.items():
|
||||
if filter_key in ('page', 'page_size'):
|
||||
continue
|
||||
if filter_key not in result:
|
||||
raise ValueError('An unsupported filter was specified')
|
||||
# If it's a string, do a case insensitive match like the API does
|
||||
if isinstance(filter_value, six.string_types) and \
|
||||
isinstance(result[filter_key], six.string_types):
|
||||
if result[filter_key].lower() != filter_value.lower():
|
||||
indexes_to_pop.append(index)
|
||||
break
|
||||
else:
|
||||
if result[filter_key] != filter_value:
|
||||
indexes_to_pop.append(index)
|
||||
break
|
||||
# Only copy the data if we need to modify it based on the filters
|
||||
if indexes_to_pop:
|
||||
rv_data = copy.deepcopy(data)
|
||||
else:
|
||||
rv_data = data
|
||||
# Remove all the results that didn't match the filter. This is reversed so the index
|
||||
# values remain valid as we pop them.
|
||||
for index in sorted(indexes_to_pop, reverse=True):
|
||||
rv_data.pop(index)
|
||||
|
||||
if page_size <= 0:
|
||||
return rv_data
|
||||
|
||||
# Code taken from pdc_client/test_helpers.py
|
||||
page = filters.get('page', 1)
|
||||
pages = int(math.ceil(float(len(rv_data)) / page_size))
|
||||
rv_data = rv_data[(page - 1) * page_size:(page - 1) * page_size + page_size]
|
||||
return {
|
||||
'count': len(rv_data),
|
||||
'next': None if (page == pages or not pages) else self._fmt_url(page + 1),
|
||||
'previous': None if (page == 1 or not pages) else self._fmt_url(page - 1),
|
||||
'results': rv_data
|
||||
}
|
||||
# End of code taken from pdc_client/test_helpers.py
|
||||
@pytest.fixture()
|
||||
def testmodule_mmd_9c690d0e():
|
||||
return TESTMODULE_MODULEMD
|
||||
|
||||
|
||||
# This is scoped to the function in case certain tests must alter PDC
|
||||
@pytest.fixture
|
||||
def pdc():
|
||||
# Mock the PDC client
|
||||
pdc = MockPDCFilterAPI()
|
||||
# TODO: change this to the modules API when PDC > 1.9.0 is released
|
||||
pdc.add_endpoint('unreleasedvariants', 'GET', [{
|
||||
'variant_id': 'platform',
|
||||
'variant_uid': 'platform-f28-3',
|
||||
'variant_name': 'platform',
|
||||
'variant_type': 'module',
|
||||
'variant_version': 'f28',
|
||||
'variant_release': '3',
|
||||
'variant_context': '00000000',
|
||||
'koji_tag': 'module-f28-build',
|
||||
'modulemd': PLATFORM_MODULEMD,
|
||||
'runtime_deps': [],
|
||||
'build_deps': [],
|
||||
'active': True,
|
||||
'rpms': []
|
||||
}])
|
||||
pdc_patcher = mock.patch('pdc_client.PDCClient', return_value=pdc)
|
||||
pdc_patcher.start()
|
||||
yield pdc
|
||||
pdc_patcher.stop()
|
||||
@pytest.fixture()
|
||||
def testmodule_mmd_c2c572ed():
|
||||
return TESTMODULE_MODULEMD_SECOND_CONTEXT
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def pdc_module_inactive(pdc):
|
||||
pdc.endpoints['unreleasedvariants']['GET'].append({
|
||||
'variant_id': 'testmodule',
|
||||
'variant_uid': 'testmodule:master:20180205135154',
|
||||
'variant_name': 'testmodule',
|
||||
'variant_type': 'module',
|
||||
'variant_version': 'master',
|
||||
'variant_release': '20180205135154',
|
||||
'variant_context': '9c690d0e',
|
||||
'koji_tag': 'module-95b214a704c984be',
|
||||
'modulemd': TESTMODULE_MODULEMD,
|
||||
'runtime_deps': [
|
||||
{
|
||||
'dependency': 'platform',
|
||||
'stream': 'f28'
|
||||
}
|
||||
],
|
||||
'build_deps': [
|
||||
{
|
||||
'dependency': 'platform',
|
||||
'stream': 'f28'
|
||||
}
|
||||
],
|
||||
'rpms': [],
|
||||
'active': False,
|
||||
})
|
||||
return pdc
|
||||
@pytest.fixture()
|
||||
def formatted_testmodule_mmd():
|
||||
return _mmd2
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def pdc_module_active(pdc_module_inactive):
|
||||
# Rename it for clarity
|
||||
pdc_module_active = pdc_module_inactive
|
||||
pdc_module_active.endpoints['unreleasedvariants']['GET'][-1].update({
|
||||
'active': True,
|
||||
'rpms': [
|
||||
'tangerine-0:0.22-6.module+0+814cfa39.noarch.rpm',
|
||||
'tangerine-0:0.22-6.module+0+814cfa39.src.rpm',
|
||||
'perl-Tangerine-0:0.22-2.module+0+814cfa39.noarch.rpm',
|
||||
'perl-Tangerine-0:0.22-2.module+0+814cfa39.src.rpm',
|
||||
'perl-List-Compare-0:0.53-8.module+0+814cfa39.noarch.rpm',
|
||||
'perl-List-Compare-0:0.53-8.module+0+814cfa39.src.rpm'
|
||||
]
|
||||
})
|
||||
return pdc_module_active
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def pdc_module_active_two_contexts(pdc_module_active):
|
||||
# Rename it for clarity
|
||||
pdc_module_active_two_contexts = pdc_module_active
|
||||
pdc_module_active_two_contexts.endpoints['unreleasedvariants']['GET'][-1].update({
|
||||
'active': True,
|
||||
'rpms': [
|
||||
'tangerine-0:0.22-6.module+0+814cfa39.noarch.rpm',
|
||||
'tangerine-0:0.22-6.module+0+814cfa39.src.rpm',
|
||||
'perl-Tangerine-0:0.22-2.module+0+814cfa39.noarch.rpm',
|
||||
'perl-Tangerine-0:0.22-2.module+0+814cfa39.src.rpm',
|
||||
'perl-List-Compare-0:0.53-8.module+0+814cfa39.noarch.rpm',
|
||||
'perl-List-Compare-0:0.53-8.module+0+814cfa39.src.rpm'
|
||||
]
|
||||
})
|
||||
pdc_module_active_two_contexts.endpoints['unreleasedvariants']['GET'].append(
|
||||
dict(pdc_module_active.endpoints['unreleasedvariants']['GET'][-1]))
|
||||
pdc_module_active_two_contexts.endpoints['unreleasedvariants']['GET'][-1].update({
|
||||
"variant_context": "c2c572ed",
|
||||
"modulemd": TESTMODULE_MODULEMD_SECOND_CONTEXT})
|
||||
return pdc_module_active_two_contexts
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def pdc_module_reuse(pdc_module_active):
|
||||
# Rename it for clarity
|
||||
pdc_module_reuse = pdc_module_active
|
||||
mmd = Modulemd.Module().new_from_string(TESTMODULE_MODULEMD)
|
||||
mmd.upgrade()
|
||||
mmd.set_version(20170219191323)
|
||||
xmd = glib.from_variant_dict(mmd.get_xmd())
|
||||
xmd['mbs']['scmurl'] = 'git://pkgs.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79'
|
||||
xmd['mbs']['commit'] = 'ff1ea79fc952143efeed1851aa0aa006559239ba'
|
||||
mmd.set_xmd(glib.dict_values(xmd))
|
||||
pdc_module_reuse.endpoints['unreleasedvariants']['GET'].append(
|
||||
copy.deepcopy(pdc_module_reuse.endpoints['unreleasedvariants']['GET'][-1]))
|
||||
pdc_module_reuse.endpoints['unreleasedvariants']['GET'][-1].update({
|
||||
'variant_uid': 'testmodule:master:{0}'.format(mmd.get_version()),
|
||||
'variant_release': str(mmd.get_version()),
|
||||
'variant_context': '7c29193d',
|
||||
'modulemd': mmd.dumps(),
|
||||
'koji_tag': 'module-de3adf79caf3e1b8'
|
||||
})
|
||||
|
||||
mmd.set_version(20180205135154)
|
||||
xmd = glib.from_variant_dict(mmd.get_xmd())
|
||||
xmd['mbs']['scmurl'] = 'git://pkgs.stg.fedoraproject.org/modules/testmodule.git?#55f4a0a'
|
||||
xmd['mbs']['commit'] = '55f4a0a2e6cc255c88712a905157ab39315b8fd8'
|
||||
mmd.set_xmd(glib.dict_values(xmd))
|
||||
pdc_module_reuse.endpoints['unreleasedvariants']['GET'].append(
|
||||
copy.deepcopy(pdc_module_reuse.endpoints['unreleasedvariants']['GET'][-1]))
|
||||
pdc_module_reuse.endpoints['unreleasedvariants']['GET'][-1].update({
|
||||
'variant_uid': 'testmodule:master:{0}'.format(mmd.get_version()),
|
||||
'variant_release': str(mmd.get_version()),
|
||||
'modulemd': mmd.dumps(),
|
||||
'koji_tag': 'module-fe3adf73caf3e1b7',
|
||||
'rpms': [],
|
||||
'active': False
|
||||
})
|
||||
return pdc_module_reuse
|
||||
@pytest.fixture()
|
||||
def platform_mmd():
|
||||
return PLATFORM_MODULEMD
|
||||
|
||||
@@ -37,17 +37,17 @@ class TestGenericBuilder:
|
||||
init_data(1)
|
||||
self.module = module_build_service.models.ModuleBuild.query.filter_by(id=1).one()
|
||||
|
||||
@patch('module_build_service.resolver.PDCResolver')
|
||||
@patch('module_build_service.resolver.DBResolver')
|
||||
@patch('module_build_service.resolver.GenericResolver')
|
||||
def test_default_buildroot_groups_cache(self, generic_resolver, resolver):
|
||||
pdc_groups = {
|
||||
mbs_groups = {
|
||||
"buildroot": [],
|
||||
"srpm-buildroot": []
|
||||
}
|
||||
|
||||
resolver = mock.MagicMock()
|
||||
resolver.backend = 'pdc'
|
||||
resolver.resolve_profiles.return_value = pdc_groups
|
||||
resolver.backend = 'mbs'
|
||||
resolver.resolve_profiles.return_value = mbs_groups
|
||||
generic_resolver.create.return_value = resolver
|
||||
|
||||
expected_groups = {
|
||||
|
||||
@@ -153,7 +153,7 @@ class TestDBModule:
|
||||
with app.app_context():
|
||||
utils.load_local_builds(['platform'])
|
||||
mmd = models.ModuleBuild.query.get(2).mmd()
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='pdc')
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
result = resolver.resolve_profiles(mmd, ('buildroot', 'srpm-buildroot'))
|
||||
expected = {
|
||||
'buildroot':
|
||||
|
||||
318
tests/test_resolver/test_mbs.py
Normal file
318
tests/test_resolver/test_mbs.py
Normal file
@@ -0,0 +1,318 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import os
|
||||
|
||||
from mock import patch, PropertyMock, Mock, call
|
||||
|
||||
import module_build_service.resolver as mbs_resolver
|
||||
import module_build_service.utils
|
||||
import module_build_service.models
|
||||
from module_build_service import glib, Modulemd, app
|
||||
import tests
|
||||
|
||||
|
||||
base_dir = os.path.join(os.path.dirname(__file__), "..")
|
||||
|
||||
|
||||
class TestMBSModule:
|
||||
|
||||
@patch("requests.Session")
|
||||
def test_get_module_modulemds_nsvc(self, mock_session, testmodule_mmd_9c690d0e):
|
||||
""" Tests for querying a module from mbs """
|
||||
mock_res = Mock()
|
||||
mock_res.ok.return_value = True
|
||||
mock_res.json.return_value = {
|
||||
"items": [
|
||||
{
|
||||
"name": "testmodule",
|
||||
"stream": "master",
|
||||
"version": "20180205135154",
|
||||
"context": "9c690d0e",
|
||||
"modulemd": testmodule_mmd_9c690d0e
|
||||
}
|
||||
],
|
||||
"next": None
|
||||
}
|
||||
|
||||
mock_session().get.return_value = mock_res
|
||||
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
module_mmds = resolver.get_module_modulemds('testmodule', 'master', '20180205135154',
|
||||
'9c690d0e')
|
||||
nsvcs = set(m.dup_nsvc() for m in module_mmds)
|
||||
expected = set(["testmodule:master:125a91f56532:9c690d0e"])
|
||||
mbs_url = tests.conf.mbs_url
|
||||
expected_query = {
|
||||
"name": "testmodule",
|
||||
"stream": "master",
|
||||
"version": "20180205135154",
|
||||
"context": "9c690d0e",
|
||||
"verbose": True,
|
||||
"order_desc_by": "version",
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"state": "ready"
|
||||
}
|
||||
mock_session().get.assert_called_once_with(mbs_url, params=expected_query)
|
||||
assert nsvcs == expected
|
||||
|
||||
@patch("requests.Session")
|
||||
def test_get_module_modulemds_partial(self, mock_session, testmodule_mmd_9c690d0e,
|
||||
testmodule_mmd_c2c572ed):
|
||||
""" Test for querying MBS without the context of a module """
|
||||
|
||||
version = "20180205135154"
|
||||
|
||||
mock_res = Mock()
|
||||
mock_res.ok.return_value = True
|
||||
mock_res.json.return_value = {
|
||||
"items": [
|
||||
{
|
||||
"name": "testmodule",
|
||||
"stream": "master",
|
||||
"version": version,
|
||||
"context": "9c690d0e",
|
||||
"modulemd": testmodule_mmd_9c690d0e
|
||||
},
|
||||
{
|
||||
"name": "testmodule",
|
||||
"stream": "master",
|
||||
"version": version,
|
||||
"context": "c2c572ed",
|
||||
"modulemd": testmodule_mmd_c2c572ed
|
||||
}
|
||||
],
|
||||
"next": None
|
||||
}
|
||||
|
||||
mock_session().get.return_value = mock_res
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
ret = resolver.get_module_modulemds('testmodule', 'master', version)
|
||||
nsvcs = set(m.dup_nsvc() for m in ret)
|
||||
expected = set(["testmodule:master:125a91f56532:9c690d0e",
|
||||
"testmodule:master:125a91f56532:c2c572ed"])
|
||||
mbs_url = tests.conf.mbs_url
|
||||
expected_query = {
|
||||
"name": "testmodule",
|
||||
"stream": "master",
|
||||
"version": version,
|
||||
"verbose": True,
|
||||
"order_desc_by": "version",
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"state": "ready"
|
||||
}
|
||||
mock_session().get.assert_called_once_with(mbs_url, params=expected_query)
|
||||
assert nsvcs == expected
|
||||
|
||||
@patch("requests.Session")
|
||||
def test_get_module_build_dependencies(self, mock_session, platform_mmd,
|
||||
testmodule_mmd_9c690d0e):
|
||||
"""
|
||||
Tests that we return just direct build-time dependencies of testmodule.
|
||||
"""
|
||||
mock_res = Mock()
|
||||
mock_res.ok.return_value = True
|
||||
mock_res.json.side_effect = [
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"name": "testmodule",
|
||||
"stream": "master",
|
||||
"version": "20180205135154",
|
||||
"context": "9c690d0e",
|
||||
"modulemd": testmodule_mmd_9c690d0e
|
||||
}
|
||||
],
|
||||
"next": None
|
||||
}, {
|
||||
"items": [
|
||||
{
|
||||
"name": "platform",
|
||||
"stream": "f28",
|
||||
"version": "3",
|
||||
"context": "00000000",
|
||||
"modulemd": platform_mmd,
|
||||
"koji_tag": "module-f28-build"
|
||||
}
|
||||
],
|
||||
"next": None
|
||||
}
|
||||
]
|
||||
|
||||
mock_session().get.return_value = mock_res
|
||||
expected = set(['module-f28-build'])
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
result = resolver.get_module_build_dependencies(
|
||||
'testmodule', 'master', '20180205135154', '9c690d0e').keys()
|
||||
|
||||
expected_queries = [{
|
||||
"name": "testmodule",
|
||||
"stream": "master",
|
||||
"version": "20180205135154",
|
||||
"context": "9c690d0e",
|
||||
"verbose": True,
|
||||
"order_desc_by": "version",
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"state": "ready"
|
||||
}, {
|
||||
"name": "platform",
|
||||
"stream": "f28",
|
||||
"version": "3",
|
||||
"context": "00000000",
|
||||
"verbose": True,
|
||||
"order_desc_by": "version",
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"state": "ready"
|
||||
}]
|
||||
|
||||
mbs_url = tests.conf.mbs_url
|
||||
expected_calls = [call(mbs_url, params=expected_queries[0]),
|
||||
call(mbs_url, params=expected_queries[1])]
|
||||
mock_session().get.mock_calls = expected_calls
|
||||
assert mock_session().get.call_count == 2
|
||||
assert set(result) == expected
|
||||
|
||||
@patch("requests.Session")
|
||||
def test_get_module_build_dependencies_empty_buildrequires(self, mock_session,
|
||||
testmodule_mmd_9c690d0e):
|
||||
|
||||
mmd = Modulemd.Module().new_from_string(testmodule_mmd_9c690d0e)
|
||||
# Wipe out the dependencies
|
||||
mmd.set_dependencies()
|
||||
xmd = glib.from_variant_dict(mmd.get_xmd())
|
||||
xmd['mbs']['buildrequires'] = {}
|
||||
mmd.set_xmd(glib.dict_values(xmd))
|
||||
|
||||
mock_res = Mock()
|
||||
mock_res.ok.return_value = True
|
||||
mock_res.json.side_effect = [
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"name": "testmodule",
|
||||
"stream": "master",
|
||||
"version": "20180205135154",
|
||||
"context": "9c690d0e",
|
||||
"modulemd": mmd.dumps(),
|
||||
"build_deps": []
|
||||
}
|
||||
],
|
||||
"next": None
|
||||
}
|
||||
]
|
||||
|
||||
mock_session().get.return_value = mock_res
|
||||
|
||||
expected = set()
|
||||
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
result = resolver.get_module_build_dependencies(
|
||||
'testmodule', 'master', '20180205135154', '9c690d0e').keys()
|
||||
mbs_url = tests.conf.mbs_url
|
||||
expected_query = {
|
||||
"name": "testmodule",
|
||||
"stream": "master",
|
||||
"version": "20180205135154",
|
||||
"context": "9c690d0e",
|
||||
"verbose": True,
|
||||
"order_desc_by": "version",
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"state": "ready"
|
||||
}
|
||||
mock_session().get.assert_called_once_with(mbs_url, params=expected_query)
|
||||
assert set(result) == expected
|
||||
|
||||
@patch("requests.Session")
|
||||
def test_resolve_profiles(self, mock_session, formatted_testmodule_mmd, platform_mmd):
|
||||
|
||||
mock_res = Mock()
|
||||
mock_res.ok.return_value = True
|
||||
mock_res.json.return_value = {
|
||||
"items": [
|
||||
{
|
||||
"name": "platform",
|
||||
"stream": "f28",
|
||||
"version": "3",
|
||||
"context": "00000000",
|
||||
"modulemd": platform_mmd
|
||||
}
|
||||
],
|
||||
"next": None
|
||||
}
|
||||
|
||||
mock_session().get.return_value = mock_res
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
result = resolver.resolve_profiles(formatted_testmodule_mmd,
|
||||
('buildroot', 'srpm-buildroot'))
|
||||
expected = {
|
||||
'buildroot':
|
||||
set(['unzip', 'tar', 'cpio', 'gawk', 'gcc', 'xz', 'sed',
|
||||
'findutils', 'util-linux', 'bash', 'info', 'bzip2',
|
||||
'grep', 'redhat-rpm-config', 'fedora-release',
|
||||
'diffutils', 'make', 'patch', 'shadow-utils', 'coreutils',
|
||||
'which', 'rpm-build', 'gzip', 'gcc-c++']),
|
||||
'srpm-buildroot':
|
||||
set(['shadow-utils', 'redhat-rpm-config', 'rpm-build',
|
||||
'fedora-release', 'fedpkg-minimal', 'gnupg2',
|
||||
'bash'])
|
||||
}
|
||||
|
||||
mbs_url = tests.conf.mbs_url
|
||||
expected_query = {
|
||||
"name": "platform",
|
||||
"stream": "f28",
|
||||
"version": "3",
|
||||
"context": "00000000",
|
||||
"verbose": True,
|
||||
"order_desc_by": "version",
|
||||
"page": 1,
|
||||
"per_page": 10,
|
||||
"state": "ready"
|
||||
}
|
||||
|
||||
mock_session().get.assert_called_once_with(mbs_url, params=expected_query)
|
||||
assert result == expected
|
||||
|
||||
@patch("module_build_service.config.Config.system",
|
||||
new_callable=PropertyMock, return_value="test")
|
||||
@patch("module_build_service.config.Config.mock_resultsdir",
|
||||
new_callable=PropertyMock,
|
||||
return_value=os.path.join(base_dir, 'staged_data', "local_builds"))
|
||||
def test_resolve_profiles_local_module(self, local_builds, conf_system,
|
||||
formatted_testmodule_mmd):
|
||||
tests.clean_database()
|
||||
with app.app_context():
|
||||
module_build_service.utils.load_local_builds(['platform'])
|
||||
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='mbs')
|
||||
result = resolver.resolve_profiles(formatted_testmodule_mmd,
|
||||
('buildroot', 'srpm-buildroot'))
|
||||
expected = {
|
||||
'buildroot':
|
||||
set(['foo']),
|
||||
'srpm-buildroot':
|
||||
set(['bar'])
|
||||
}
|
||||
assert result == expected
|
||||
@@ -1,124 +0,0 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
#
|
||||
# Written by Ralph Bean <rbean@redhat.com>
|
||||
|
||||
import os
|
||||
|
||||
from mock import patch, PropertyMock
|
||||
import pytest
|
||||
|
||||
import module_build_service.resolver as mbs_resolver
|
||||
import module_build_service.utils
|
||||
import module_build_service.models
|
||||
from module_build_service import app
|
||||
from module_build_service import glib, Modulemd
|
||||
import tests
|
||||
|
||||
|
||||
base_dir = os.path.join(os.path.dirname(__file__), "..")
|
||||
|
||||
|
||||
class TestPDCModule:
|
||||
|
||||
def test_get_module_modulemds_nsvc(self, pdc_module_active_two_contexts):
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='pdc')
|
||||
ret = resolver.get_module_modulemds('testmodule', 'master', '20180205135154', '9c690d0e')
|
||||
nsvcs = set(m.dup_nsvc() for m in ret)
|
||||
expected = set(["testmodule:master:125a91f56532:9c690d0e"])
|
||||
assert nsvcs == expected
|
||||
|
||||
@pytest.mark.parametrize('kwargs', [{'version': '20180205135154'}, {}])
|
||||
def test_get_module_modulemds_partial(self, pdc_module_active_two_contexts, kwargs):
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='pdc')
|
||||
ret = resolver.get_module_modulemds('testmodule', 'master', **kwargs)
|
||||
nsvcs = set(m.dup_nsvc() for m in ret)
|
||||
expected = set(["testmodule:master:125a91f56532:9c690d0e",
|
||||
"testmodule:master:125a91f56532:c2c572ed"])
|
||||
assert nsvcs == expected
|
||||
|
||||
@pytest.mark.parametrize('empty_buildrequires', [False, True])
|
||||
def test_get_module_build_dependencies(self, pdc_module_active, empty_buildrequires):
|
||||
"""
|
||||
Tests that we return just direct build-time dependencies of testmodule.
|
||||
"""
|
||||
expected = set(['module-f28-build'])
|
||||
if empty_buildrequires:
|
||||
expected = set()
|
||||
pdc_item = pdc_module_active.endpoints['unreleasedvariants']['GET'][-1]
|
||||
mmd = Modulemd.Module().new_from_string(pdc_item['modulemd'])
|
||||
# Wipe out the dependencies
|
||||
mmd.set_dependencies()
|
||||
xmd = glib.from_variant_dict(mmd.get_xmd())
|
||||
xmd['mbs']['buildrequires'] = {}
|
||||
mmd.set_xmd(glib.dict_values(xmd))
|
||||
pdc_item.update({
|
||||
'modulemd': mmd.dumps(),
|
||||
'build_deps': []
|
||||
})
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='pdc')
|
||||
result = resolver.get_module_build_dependencies(
|
||||
'testmodule', 'master', '20180205135154', '9c690d0e').keys()
|
||||
assert set(result) == expected
|
||||
|
||||
def test_resolve_profiles(self, pdc_module_active):
|
||||
yaml_path = os.path.join(
|
||||
base_dir, 'staged_data', 'formatted_testmodule.yaml')
|
||||
mmd = Modulemd.Module().new_from_file(yaml_path)
|
||||
mmd.upgrade()
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='pdc')
|
||||
result = resolver.resolve_profiles(mmd, ('buildroot', 'srpm-buildroot'))
|
||||
expected = {
|
||||
'buildroot':
|
||||
set(['unzip', 'tar', 'cpio', 'gawk', 'gcc', 'xz', 'sed',
|
||||
'findutils', 'util-linux', 'bash', 'info', 'bzip2',
|
||||
'grep', 'redhat-rpm-config', 'fedora-release',
|
||||
'diffutils', 'make', 'patch', 'shadow-utils', 'coreutils',
|
||||
'which', 'rpm-build', 'gzip', 'gcc-c++']),
|
||||
'srpm-buildroot':
|
||||
set(['shadow-utils', 'redhat-rpm-config', 'rpm-build',
|
||||
'fedora-release', 'fedpkg-minimal', 'gnupg2',
|
||||
'bash'])
|
||||
}
|
||||
assert result == expected
|
||||
|
||||
@patch("module_build_service.config.Config.system",
|
||||
new_callable=PropertyMock, return_value="test")
|
||||
@patch("module_build_service.config.Config.mock_resultsdir",
|
||||
new_callable=PropertyMock,
|
||||
return_value=os.path.join(base_dir, 'staged_data', "local_builds"))
|
||||
def test_resolve_profiles_local_module(self, local_builds, conf_system):
|
||||
tests.clean_database()
|
||||
with app.app_context():
|
||||
module_build_service.utils.load_local_builds(['platform'])
|
||||
|
||||
yaml_path = os.path.join(
|
||||
base_dir, 'staged_data', 'formatted_testmodule.yaml')
|
||||
mmd = Modulemd.Module().new_from_file(yaml_path)
|
||||
mmd.upgrade()
|
||||
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='pdc')
|
||||
result = resolver.resolve_profiles(mmd, ('buildroot', 'srpm-buildroot'))
|
||||
expected = {
|
||||
'buildroot':
|
||||
set(['foo']),
|
||||
'srpm-buildroot':
|
||||
set(['bar'])
|
||||
}
|
||||
assert result == expected
|
||||
@@ -88,7 +88,7 @@ class TestModuleWait:
|
||||
return_value={'build': [], 'srpm-build': []})
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
|
||||
@patch("module_build_service.builder.GenericBuilder.create_from_module")
|
||||
@patch('module_build_service.resolver.PDCResolver')
|
||||
@patch('module_build_service.resolver.DBResolver')
|
||||
@patch('module_build_service.resolver.GenericResolver')
|
||||
def test_new_repo_called_when_macros_reused(
|
||||
self, generic_resolver, resolver, create_builder, koji_get_session, dbg):
|
||||
@@ -110,7 +110,7 @@ class TestModuleWait:
|
||||
create_builder.return_value = builder
|
||||
|
||||
resolver = mock.MagicMock()
|
||||
resolver.backend = 'pdc'
|
||||
resolver.backend = 'db'
|
||||
resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"
|
||||
generic_resolver.create.return_value = resolver
|
||||
|
||||
@@ -130,7 +130,7 @@ class TestModuleWait:
|
||||
return_value={'build': [], 'srpm-build': []})
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
|
||||
@patch("module_build_service.builder.GenericBuilder.create_from_module")
|
||||
@patch('module_build_service.resolver.PDCResolver')
|
||||
@patch('module_build_service.resolver.DBResolver')
|
||||
@patch('module_build_service.resolver.GenericResolver')
|
||||
def test_new_repo_not_called_when_macros_not_reused(
|
||||
self, generic_resolver, resolver, create_builder, koji_get_session, dbg):
|
||||
@@ -152,7 +152,7 @@ class TestModuleWait:
|
||||
create_builder.return_value = builder
|
||||
|
||||
resolver = mock.MagicMock()
|
||||
resolver.backend = 'pdc'
|
||||
resolver.backend = 'db'
|
||||
resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"
|
||||
generic_resolver.create.return_value = resolver
|
||||
|
||||
@@ -166,7 +166,7 @@ class TestModuleWait:
|
||||
return_value={'build': [], 'srpm-build': []})
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
|
||||
@patch("module_build_service.builder.GenericBuilder.create_from_module")
|
||||
@patch('module_build_service.resolver.PDCResolver')
|
||||
@patch('module_build_service.resolver.DBResolver')
|
||||
@patch('module_build_service.resolver.GenericResolver')
|
||||
def test_set_cg_build_koji_tag_fallback_to_default(
|
||||
self, generic_resolver, resolver, create_builder, koji_get_session, dbg):
|
||||
@@ -192,7 +192,7 @@ class TestModuleWait:
|
||||
create_builder.return_value = builder
|
||||
|
||||
resolver = mock.MagicMock()
|
||||
resolver.backend = 'pdc'
|
||||
resolver.backend = 'db'
|
||||
resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"
|
||||
resolver.get_module_build_dependencies.return_value = {
|
||||
"module-bootstrap-tag": base_mmd}
|
||||
@@ -209,7 +209,7 @@ class TestModuleWait:
|
||||
return_value={'build': [], 'srpm-build': []})
|
||||
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
|
||||
@patch("module_build_service.builder.GenericBuilder.create_from_module")
|
||||
@patch('module_build_service.resolver.PDCResolver')
|
||||
@patch('module_build_service.resolver.DBResolver')
|
||||
@patch('module_build_service.resolver.GenericResolver')
|
||||
@patch("module_build_service.config.Config.base_module_names",
|
||||
new_callable=mock.PropertyMock, return_value=set(["base-runtime"]))
|
||||
@@ -237,7 +237,7 @@ class TestModuleWait:
|
||||
create_builder.return_value = builder
|
||||
|
||||
resolver = mock.MagicMock()
|
||||
resolver.backend = 'pdc'
|
||||
resolver.backend = 'db'
|
||||
resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"
|
||||
resolver.get_module_build_dependencies.return_value = {
|
||||
"module-bootstrap-tag": base_mmd}
|
||||
|
||||
Reference in New Issue
Block a user