diff --git a/conf/config.py b/conf/config.py index e05ce5b0..f1ec5127 100644 --- a/conf/config.py +++ b/conf/config.py @@ -118,6 +118,7 @@ class TestConfiguration(BaseConfiguration): KOJI_REPOSITORY_URL = 'https://kojipkgs.stg.fedoraproject.org/repos' SCMURLS = ["git://pkgs.stg.fedoraproject.org/modules/"] AUTH_METHOD = 'oidc' + RESOLVER = 'db' class ProdConfiguration(BaseConfiguration): diff --git a/module_build_service/config.py b/module_build_service/config.py index f8d45de6..a97868ab 100644 --- a/module_build_service/config.py +++ b/module_build_service/config.py @@ -39,7 +39,8 @@ SUPPORTED_STRATEGIES = ['changed-and-after', 'only-changed', 'all'] SUPPORTED_RESOLVERS = { 'pdc': {'builders': ['koji', 'mock', 'copr']}, - 'copr': {'builders': ['copr', 'mock']} + 'copr': {'builders': ['copr', 'mock']}, + 'db': {'builders': ['koji', 'mock', 'copr']} } diff --git a/module_build_service/resolver/DBResolver.py b/module_build_service/resolver/DBResolver.py new file mode 100644 index 00000000..c9993ea7 --- /dev/null +++ b/module_build_service/resolver/DBResolver.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2018 Red Hat, Inc. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# +# Written by Matt Prahl + +import hashlib + +from module_build_service import log +from module_build_service.resolver.base import GenericResolver +from module_build_service import models +from module_build_service.errors import UnprocessableEntity + + +class DBResolver(GenericResolver): + """ + Resolver using the MBS database + """ + backend = 'db' + + def __init__(self, config): + self.config = config + + def get_module_tag(self, name, stream, version, context, strict=False): + """ + Gets the module tag from the resolver. Since the resolver is the DB, it is just generated + here. + :param name: a string of the module's name + :param stream: a string of the module's stream + :param version: a string or int of the module's version + :param context: a string of the module's context + :kwarg strict: Here solely for compatibility with the base class' function signature + :return: a string of the tag to use + """ + # This algorithm mimicks what pdc-updater does + tag_str = '.'.join([name, stream, str(version), context]) + return 'module-{0}'.format(hashlib.sha1(tag_str).hexdigest()[:16]) + + def _get_recursively_required_modules(self, build, session, modules=None, strict=False): + """ + Returns a dictionary of modulemds by recursively querying the DB based on the + depdendencies of the input module. The returned dictionary is a key of koji_tag + and value of Modulemd object. Note that if there are some modules loaded by + utils.load_local_builds(...), these local modules will be used instead of generically + querying the DB. + :param build: models.ModuleBuild object of the module to resolve + :param modules: dictionary of koji_tag:modulemd found by previous iteration + of this method. Used by recursion only. + :param session: SQLAlchemy database sesion to query from + :param strict: Normally this function returns an empty dictionary if no module can + be found. If strict=True, then an UnprocessableEntity is raised instead. + :return: a dictionary + """ + modules = modules or {} + koji_tag = build.koji_tag + mmd = build.mmd() + + # Check if it's already been examined + if koji_tag in modules: + return modules + + modules.update({build.koji_tag: mmd}) + # We want to use the same stream as the one used in the time this + # module was built. But we still should fallback to plain mmd.requires + # in case this module depends on some older module for which we did + # not populate mmd.xmd['mbs']['requires']. + mbs_xmd = mmd.get_xmd().get('mbs') + if 'requires' in mbs_xmd.keys(): + requires = {name: data['stream'] for name, data in mbs_xmd['requires'].items()} + else: + # Since MBS doesn't support v2 modulemds submitted by a user, we will + # always only have one stream per require. That way it's safe to just take the first + # element of the list. + # TODO: Change this once module stream expansion is implemented + requires = { + name: deps.get()[0] + for name, deps in mmd.get_dependencies()[0].get_requires().items()} + + for name, stream in requires.items(): + local_modules = models.ModuleBuild.local_modules(session, name, stream) + if local_modules: + dep = local_modules[0] + else: + dep = models.ModuleBuild.get_last_build_in_stream(session, name, stream) + if dep: + modules = self._get_recursively_required_modules(dep, session, modules, strict) + elif strict: + raise UnprocessableEntity( + 'The module {0}:{1} was not found'.format(name, stream)) + + return modules + + def resolve_profiles(self, mmd, keys): + """ + Returns a dictionary with keys set according the `keys` parameters and values + set to the union of all components defined in all installation profiles matching + the key in all buildrequires. If there are some modules loaded by + utils.load_local_builds(...), these local modules will be considered when returning + the profiles. + :param mmd: Modulemd.Module instance representing the module + :param keys: list of modulemd installation profiles to include in the result + :return: a dictionary + """ + results = {} + for key in keys: + results[key] = set() + with models.make_session(self.config) as session: + for module_name, module_info in mmd.get_xmd()['mbs']['buildrequires'].items(): + local_modules = models.ModuleBuild.local_modules( + session, module_name, module_info['stream']) + if local_modules: + local_module = local_modules[0] + log.info('Using local module {0!r} to resolve profiles.'.format(local_module)) + dep_mmd = local_module.mmd() + for key in keys: + if key in dep_mmd.get_profiles().keys(): + results[key] |= set(dep_mmd.get_profiles()[key].get_rpms().get()) + continue + + build = session.query(models.ModuleBuild).filter_by( + name=module_name, stream=module_info['stream'], + version=module_info['version'], state=models.BUILD_STATES['ready']).first() + if not build: + raise UnprocessableEntity('The module {}:{}:{} was not found'.format( + module_name, module_info['stream'], module_info['version'])) + + modules = self._get_recursively_required_modules(build, session, strict=True) + for name, dep_mmd in modules.items(): + # Take note of what rpms are in this dep's profile + for key in keys: + if key in dep_mmd.get_profiles().keys(): + results[key] |= set(dep_mmd.get_profiles()[key].get_rpms().get()) + + # Return the union of all rpms in all profiles of the given keys + return results + + def get_module_build_dependencies(self, name=None, stream=None, version=None, context=None, + mmd=None, strict=False): + """ + Returns a dictionary of koji_tag:mmd of all the dependencies + :kwarg name: a string of a module's name (required if mmd is not set) + :kwarg stream: a string of a module's stream (required if mmd is not set) + :kwarg version: a string of a module's version (required if mmd is not set) + :kwarg context: a string of a module's context (required if mmd is not set) + :kwarg mmd: Modulemd.Module object. If this is set, the mmd will be used instead of + querying the DB with the name, stream, version, and context. + :kwarg strict: Normally this function returns None if no module can be + found. If strict=True, then an UnprocessableEntity is raised. + :return: a dictionary + """ + if mmd: + log.debug('get_module_build_dependencies(mmd={0!r} strict={1!r})'.format(mmd, strict)) + elif any(x is None for x in [name, stream, version, context]): + raise RuntimeError('The name, stream, version, and/or context weren\'t specified') + else: + version = str(version) + log.debug('get_module_build_dependencies({0}, strict={1!r})'.format( + ', '.join([name, stream, str(version), context]), strict)) + + module_tags = {} + with models.make_session(self.config) as session: + if mmd: + queried_mmd = mmd + nsvc = ':'.join([ + mmd.get_name(), mmd.get_stream(), str(mmd.get_version()), + mmd.get_context() or '00000000']) + else: + build = None + for _build in session.query(models.ModuleBuild).filter_by( + name=name, stream=stream, version=version).all(): + # Figure out how to query by context directly + if _build.context == context: + build = _build + break + if not build: + raise UnprocessableEntity('The module {} was not found'.format( + ':'.join([name, stream, version, context]))) + queried_mmd = build.mmd() + nsvc = ':'.join([name, stream, version, context]) + + xmd_mbs = queried_mmd.get_xmd().get('mbs') + if not xmd_mbs or 'buildrequires' not in xmd_mbs.keys(): + raise RuntimeError( + 'The module {} did not contain its modulemd or did not have ' + 'its xmd attribute filled out in PDC'.format(nsvc)) + + buildrequires = xmd_mbs['buildrequires'] + for br_name, details in buildrequires.items(): + build = session.query(models.ModuleBuild).filter_by( + name=br_name, stream=details['stream'], version=details['version'], + state=models.BUILD_STATES['ready']).first() + if not build: + raise UnprocessableEntity('The module {} was not found'.format( + ':'.join([br_name, details['stream'], details['version']]))) + module_tags.update( + self._get_recursively_required_modules(build, session, strict=strict)) + + return module_tags + + def resolve_requires(self, requires): + """ + Resolves the requires dictionary to a dictionary with keys as the module name and the + values as a dictionary with keys of ref, stream, version, filtered_rpms. + If there are some modules loaded by utils.load_local_builds(...), these + local modules will be considered when resolving the requires. A RuntimeError + is raised on DB lookup errors. + :param requires: a dictionary with the module name as the key and the stream as the value + :return: a dictionary + """ + new_requires = {} + with models.make_session(self.config) as session: + for module_name, module_stream in requires.items(): + local_modules = models.ModuleBuild.local_modules( + session, module_name, module_stream) + if local_modules: + local_build = local_modules[0] + new_requires[module_name] = { + 'ref': None, + 'stream': local_build.stream, + 'version': local_build.version, + # No need to set filtered_rpms for local builds, because MBS + # filters the RPMs automatically when the module build is + # done. + 'filtered_rpms': [] + } + continue + + build = models.ModuleBuild.get_last_build_in_stream( + session, module_name, module_stream) + if not build: + raise UnprocessableEntity('The module {}:{} was not found'.format( + module_name, module_stream)) + commit_hash = None + filtered_rpms = [] + mmd = build.mmd() + mbs_xmd = mmd.get_xmd().get('mbs') + if mbs_xmd and 'commit' in mbs_xmd.keys(): + commit_hash = mbs_xmd['commit'] + else: + raise RuntimeError( + 'The module "{0}" didn\'t contain a commit hash in its xmd' + .format(module_name)) + + # Find out the particular NVR of filtered packages + rpm_filter = mmd.get_rpm_filter() + if rpm_filter: + for rpm in build.component_builds: + if rpm.package in rpm_filter: + filtered_rpms.append(rpm.nvr) + + new_requires[module_name] = { + 'ref': commit_hash, + 'stream': module_stream, + 'version': build.version, + 'filtered_rpms': filtered_rpms, + } + + return new_requires diff --git a/setup.py b/setup.py index 5ef8fbb0..df283023 100644 --- a/setup.py +++ b/setup.py @@ -59,6 +59,7 @@ setup(name='module-build-service', ], 'mbs.resolver_backends': [ 'pdc = module_build_service.resolver.PDCResolver:PDCResolver', + 'db = module_build_service.resolver.DBResolver:DBResolver', ], }, scripts=["contrib/mbs-build"], diff --git a/tests/__init__.py b/tests/__init__.py index 4bf53866..f3856d96 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -91,6 +91,21 @@ def clean_database(): db.session.commit() db.drop_all() db.create_all() + platform = ModuleBuild() + platform.name = 'platform' + platform.stream = 'f28' + platform.version = '3' + platform.koji_tag = 'module-f28-build' + platform.state = BUILD_STATES['ready'] + with open(os.path.join(base_dir, 'staged_data', 'platform.yaml')) as f: + platform.modulemd = f.read() + platform.rebuild_strategy = 'all' + platform.owner = 'releng' + platform.time_submitted = datetime.utcnow() + platform.time_modified = datetime.utcnow() + platform.time_completed = datetime.utcnow() + db.session.add(platform) + db.session.commit() def init_data(data_size=10, contexts=False): @@ -150,7 +165,7 @@ def _populate_data(session, data_size=10, contexts=False): component_one_build_one.state = koji.BUILD_STATES['COMPLETE'] component_one_build_one.nvr = 'nginx-1.10.1-2.{0}'.format(build_one_component_release) component_one_build_one.batch = 1 - component_one_build_one.module_id = 1 + index * 3 + component_one_build_one.module_id = 2 + index * 3 component_one_build_one.tagged = True component_one_build_one.tagged_in_final = True @@ -165,7 +180,7 @@ def _populate_data(session, data_size=10, contexts=False): component_two_build_one.nvr = \ 'module-build-macros-01-1.{0}'.format(build_one_component_release) component_two_build_one.batch = 2 - component_two_build_one.module_id = 1 + index * 3 + component_two_build_one.module_id = 2 + index * 3 component_two_build_one.tagged = True component_two_build_one.tagged_in_final = True @@ -201,7 +216,7 @@ def _populate_data(session, data_size=10, contexts=False): component_one_build_two.state = koji.BUILD_STATES['COMPLETE'] component_one_build_two.nvr = 'postgresql-9.5.3-4.{0}'.format(build_two_component_release) component_one_build_two.batch = 2 - component_one_build_two.module_id = 2 + index * 3 + component_one_build_two.module_id = 3 + index * 3 component_one_build_two.tagged = True component_one_build_two.tagged_in_final = True @@ -216,7 +231,7 @@ def _populate_data(session, data_size=10, contexts=False): component_two_build_two.nvr = \ 'module-build-macros-01-1.{0}'.format(build_two_component_release) component_two_build_two.batch = 1 - component_two_build_two.module_id = 2 + index * 3 + component_two_build_two.module_id = 3 + index * 3 component_one_build_two.tagged = True component_one_build_two.build_time_only = True @@ -252,7 +267,7 @@ def _populate_data(session, data_size=10, contexts=False): component_one_build_three.nvr = \ 'postgresql-9.5.3-4.{0}'.format(build_three_component_release) component_one_build_three.batch = 2 - component_one_build_three.module_id = 3 + index * 3 + component_one_build_three.module_id = 4 + index * 3 component_two_build_three = ComponentBuild() component_two_build_three.package = 'module-build-macros' @@ -265,7 +280,7 @@ def _populate_data(session, data_size=10, contexts=False): component_two_build_three.nvr = \ 'module-build-macros-01-1.{0}'.format(build_three_component_release) component_two_build_three.batch = 1 - component_two_build_three.module_id = 3 + index * 3 + component_two_build_three.module_id = 4 + index * 3 component_two_build_three.tagged = True component_two_build_three.build_time_only = True @@ -322,7 +337,7 @@ def scheduler_init_data(tangerine_state=None): component_one_build_one.nvr = \ 'perl-Tangerine-0.23-1.{0}'.format(build_one_component_release) component_one_build_one.batch = 2 - component_one_build_one.module_id = 1 + component_one_build_one.module_id = 2 component_one_build_one.ref = '4ceea43add2366d8b8c5a622a2fb563b625b9abf' component_one_build_one.tagged = True component_one_build_one.tagged_in_final = True @@ -338,7 +353,7 @@ def scheduler_init_data(tangerine_state=None): component_two_build_one.nvr = \ 'perl-List-Compare-0.53-5.{0}'.format(build_one_component_release) component_two_build_one.batch = 2 - component_two_build_one.module_id = 1 + component_two_build_one.module_id = 2 component_two_build_one.ref = '76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb' component_two_build_one.tagged = True component_two_build_one.tagged_in_final = True @@ -350,7 +365,7 @@ def scheduler_init_data(tangerine_state=None): '?#fbed359411a1baa08d4a88e0d12d426fbf8f602c') component_three_build_one.format = 'rpms' component_three_build_one.batch = 3 - component_three_build_one.module_id = 1 + component_three_build_one.module_id = 2 component_three_build_one.ref = 'fbed359411a1baa08d4a88e0d12d426fbf8f602c' component_three_build_one.state = tangerine_state if tangerine_state: @@ -372,7 +387,7 @@ def scheduler_init_data(tangerine_state=None): component_four_build_one.nvr = \ 'module-build-macros-0.1-1.{0}'.format(build_one_component_release) component_four_build_one.batch = 1 - component_four_build_one.module_id = 1 + component_four_build_one.module_id = 2 component_four_build_one.tagged = True component_four_build_one.build_time_only = True @@ -428,7 +443,7 @@ def reuse_component_init_data(): component_one_build_one.nvr = \ 'perl-Tangerine-0.23-1.{0}'.format(build_one_component_release) component_one_build_one.batch = 2 - component_one_build_one.module_id = 1 + component_one_build_one.module_id = 2 component_one_build_one.ref = '4ceea43add2366d8b8c5a622a2fb563b625b9abf' component_one_build_one.tagged = True component_one_build_one.tagged_in_final = True @@ -443,7 +458,7 @@ def reuse_component_init_data(): component_two_build_one.nvr = \ 'perl-List-Compare-0.53-5.{0}'.format(build_one_component_release) component_two_build_one.batch = 2 - component_two_build_one.module_id = 1 + component_two_build_one.module_id = 2 component_two_build_one.ref = '76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb' component_two_build_one.tagged = True component_two_build_one.tagged_in_final = True @@ -458,7 +473,7 @@ def reuse_component_init_data(): component_three_build_one.nvr = \ 'tangerine-0.22-3.{0}'.format(build_one_component_release) component_three_build_one.batch = 3 - component_three_build_one.module_id = 1 + component_three_build_one.module_id = 2 component_three_build_one.ref = 'fbed359411a1baa08d4a88e0d12d426fbf8f602c' component_three_build_one.tagged = True component_three_build_one.tagged_in_final = True @@ -473,7 +488,7 @@ def reuse_component_init_data(): component_four_build_one.nvr = \ 'module-build-macros-0.1-1.{0}'.format(build_one_component_release) component_four_build_one.batch = 1 - component_four_build_one.module_id = 1 + component_four_build_one.module_id = 2 component_four_build_one.tagged = True component_four_build_one.build_time_only = True @@ -506,7 +521,7 @@ def reuse_component_init_data(): '?#4ceea43add2366d8b8c5a622a2fb563b625b9abf') component_one_build_two.format = 'rpms' component_one_build_two.batch = 2 - component_one_build_two.module_id = 2 + component_one_build_two.module_id = 3 component_one_build_two.ref = '4ceea43add2366d8b8c5a622a2fb563b625b9abf' component_two_build_two = module_build_service.models.ComponentBuild() component_two_build_two.package = 'perl-List-Compare' @@ -515,7 +530,7 @@ def reuse_component_init_data(): '?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb') component_two_build_two.format = 'rpms' component_two_build_two.batch = 2 - component_two_build_two.module_id = 2 + component_two_build_two.module_id = 3 component_two_build_two.ref = '76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb' component_three_build_two = module_build_service.models.ComponentBuild() component_three_build_two.package = 'tangerine' @@ -524,7 +539,7 @@ def reuse_component_init_data(): '?#fbed359411a1baa08d4a88e0d12d426fbf8f602c') component_three_build_two.format = 'rpms' component_three_build_two.batch = 3 - component_three_build_two.module_id = 2 + component_three_build_two.module_id = 3 component_three_build_two.ref = 'fbed359411a1baa08d4a88e0d12d426fbf8f602c' component_four_build_two = module_build_service.models.ComponentBuild() component_four_build_two.package = 'module-build-macros' @@ -537,7 +552,7 @@ def reuse_component_init_data(): component_four_build_two.nvr = \ 'module-build-macros-0.1-1.{0}'.format(build_two_component_release) component_four_build_two.batch = 1 - component_four_build_two.module_id = 2 + component_four_build_two.module_id = 3 component_four_build_two.tagged = True component_four_build_two.build_time_only = True @@ -600,7 +615,7 @@ def reuse_shared_userspace_init_data(): pkgref = mmd.get_xmd()['mbs']['rpms'][pkg.get_name()]['ref'] full_url = pkg.get_repository() + "?#" + pkgref build = module_build_service.models.ComponentBuild( - module_id=1, + module_id=2, package=pkg.get_name(), format="rpms", scmurl=full_url, @@ -654,7 +669,7 @@ def reuse_shared_userspace_init_data(): pkgref = mmd2.get_xmd()['mbs']['rpms'][pkg.get_name()]['ref'] full_url = pkg.get_repository() + "?#" + pkgref build = module_build_service.models.ComponentBuild( - module_id=2, + module_id=3, package=pkg.get_name(), format="rpms", scmurl=full_url, diff --git a/tests/staged_data/local_builds/module-child-master-20170816080815/results/modules.yaml b/tests/staged_data/local_builds/module-child-master-20170816080815/results/modules.yaml index 0595c2ac..64e574be 100644 --- a/tests/staged_data/local_builds/module-child-master-20170816080815/results/modules.yaml +++ b/tests/staged_data/local_builds/module-child-master-20170816080815/results/modules.yaml @@ -29,7 +29,7 @@ data: mbs: buildrequires: parent: {ref: 147dca4ca65aa9a1ac51f71b7e687f9178ffa5df, stream: master, - version: '20170616125652'} + version: '20170816080815'} commit: 722fd739fd6cf66faf29f6fb95dd64f60ba3e39a rpms: ed: {ref: 01bf8330812fea798671925cc537f2f29b0bd216} diff --git a/tests/test_build/test_build.py b/tests/test_build/test_build.py index 5d30d266..517a92e5 100644 --- a/tests/test_build/test_build.py +++ b/tests/test_build/test_build.py @@ -322,7 +322,7 @@ class TestBuild: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') def test_submit_build(self, mocked_scm, mocked_get_user, conf_system, dbg, - pdc_module_inactive, mmd_version): + mmd_version): """ Tests the build of testmodule.yaml using FakeModuleBuilder which succeeds everytime. @@ -387,49 +387,18 @@ class TestBuild: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') - def test_submit_build_no_components(self, mocked_scm, mocked_get_user, conf_system, dbg, pdc): + def test_submit_build_no_components(self, mocked_scm, mocked_get_user, conf_system, dbg): """ Tests the build of a module with no components """ FakeSCM(mocked_scm, 'python3', 'python3-no-components.yaml', '620ec77321b2ea7b0d67d82992dda3e1d67055b4') - python3_yaml_path = os.path.join( - base_dir, 'staged_data', 'formatted_python3-no-components.yaml') - with open(python3_yaml_path) as f: - python3_yaml = f.read() - rv = self.client.post('/module-build-service/1/module-builds/', data=json.dumps( {'branch': 'master', 'scmurl': 'git://pkgs.stg.fedoraproject.org/modules/' 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) data = json.loads(rv.data) module_build_id = data['id'] - pdc.endpoints['unreleasedvariants']['GET'].append({ - 'variant_id': 'python3', - 'variant_uid': 'python3:master:20180205135154', - 'variant_name': 'python3', - 'variant_type': 'module', - 'variant_version': 'master', - 'variant_release': '20180205135154', - 'variant_context': 'c2c572ec', - 'koji_tag': 'module-95b214a704c984be', - 'modulemd': python3_yaml, - 'runtime_deps': [ - { - 'dependency': 'platform', - 'stream': 'f28' - } - ], - 'build_deps': [ - { - 'dependency': 'platform', - 'stream': 'f28' - } - ], - 'rpms': [], - 'active': False, - }) - msgs = [] stop = module_build_service.scheduler.make_simple_stop_condition(db.session) module_build_service.scheduler.main(msgs, stop) @@ -461,8 +430,7 @@ class TestBuild: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') - def test_submit_build_from_yaml_allowed(self, mocked_scm, mocked_get_user, conf_system, dbg, - pdc_module_inactive): + def test_submit_build_from_yaml_allowed(self, mocked_scm, mocked_get_user, conf_system, dbg): FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', '620ec77321b2ea7b0d67d82992dda3e1d67055b4') testmodule = os.path.join(base_dir, 'staged_data', 'testmodule.yaml') @@ -475,16 +443,7 @@ class TestBuild: content_type='multipart/form-data', data={'yaml': yaml_file}) data = json.loads(rv.data) - assert data['id'] == 1 - - # Since the module's version is derived a submission for direct yaml submissions, we must - # alter PDC with the correct version that MBS generated - version = models.ModuleBuild.query.first().version - pdc_module_inactive.endpoints['unreleasedvariants']['GET'][1]['variant_release'] = version - uid = pdc_module_inactive.endpoints['unreleasedvariants']['GET'][1]['variant_uid'] - new_uid = ':'.join([uid.rsplit(':', 1)[0], version]) - pdc_module_inactive.endpoints['unreleasedvariants']['GET'][1]['variant_uid'] = new_uid - + assert data['id'] == 2 msgs = [] stop = module_build_service.scheduler.make_simple_stop_condition(db.session) module_build_service.scheduler.main(msgs, stop) @@ -511,8 +470,7 @@ class TestBuild: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') - def test_submit_build_cancel(self, mocked_scm, mocked_get_user, conf_system, dbg, - pdc_module_inactive): + def test_submit_build_cancel(self, mocked_scm, mocked_get_user, conf_system, dbg): """ Submit all builds for a module and cancel the module build later. """ @@ -563,8 +521,7 @@ class TestBuild: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') - def test_submit_build_instant_complete(self, mocked_scm, mocked_get_user, conf_system, dbg, - pdc_module_inactive): + def test_submit_build_instant_complete(self, mocked_scm, mocked_get_user, conf_system, dbg): """ Tests the build of testmodule.yaml using FakeModuleBuilder which succeeds everytime. @@ -597,7 +554,7 @@ class TestBuild: new_callable=PropertyMock, return_value=1) def test_submit_build_concurrent_threshold(self, conf_num_concurrent_builds, mocked_scm, mocked_get_user, - conf_system, dbg, pdc_module_inactive): + conf_system, dbg): """ Tests the build of testmodule.yaml using FakeModuleBuilder with num_concurrent_builds set to 1. @@ -641,7 +598,7 @@ class TestBuild: new_callable=PropertyMock, return_value=2) def test_try_to_reach_concurrent_threshold(self, conf_num_concurrent_builds, mocked_scm, mocked_get_user, - conf_system, dbg, pdc_module_inactive): + conf_system, dbg): """ Tests that we try to submit new component build right after the previous one finished without waiting for all @@ -649,18 +606,6 @@ class TestBuild: """ FakeSCM(mocked_scm, 'testmodule-more-components', 'testmodule-more-components.yaml', '620ec77321b2ea7b0d67d82992dda3e1d67055b4') - # Modify the modulemd in PDC - current_dir = os.path.dirname(__file__) - formatted_yml_path = os.path.join( - current_dir, '..', 'staged_data', 'formatted_testmodule-more-components.yaml') - with open(formatted_yml_path) as f: - yaml = f.read() - pdc_module_inactive.endpoints['unreleasedvariants']['GET'][-1].update({ - 'variant_id': 'testmodule-more-components', - 'variant_name': 'testmodule-more-components', - 'modulemd': yaml - }) - self.client.post('/module-build-service/1/module-builds/', data=json.dumps( {'branch': 'master', 'scmurl': 'git://pkgs.stg.fedoraproject.org/modules/' 'testmodule.git?#620ec77321b2ea7b0d67d82992dda3e1d67055b4'})) @@ -702,7 +647,7 @@ class TestBuild: @patch("module_build_service.config.Config.num_concurrent_builds", new_callable=PropertyMock, return_value=1) def test_build_in_batch_fails(self, conf_num_concurrent_builds, mocked_scm, - mocked_get_user, conf_system, dbg, pdc_module_inactive): + mocked_get_user, conf_system, dbg): """ Tests that if the build in batch fails, other components in a batch are still build, but next batch is not started. @@ -761,7 +706,7 @@ class TestBuild: @patch("module_build_service.config.Config.num_concurrent_builds", new_callable=PropertyMock, return_value=1) def test_all_builds_in_batch_fail(self, conf_num_concurrent_builds, mocked_scm, - mocked_get_user, conf_system, dbg, pdc_module_inactive): + mocked_get_user, conf_system, dbg): """ Tests that if the build in batch fails, other components in a batch are still build, but next batch is not started. @@ -805,8 +750,7 @@ class TestBuild: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') - def test_submit_build_reuse_all(self, mocked_scm, mocked_get_user, conf_system, dbg, - pdc_module_reuse): + def test_submit_build_reuse_all(self, mocked_scm, mocked_get_user, conf_system, dbg): """ Tests that we do not try building module-build-macros when reusing all components in a module build. @@ -839,7 +783,7 @@ class TestBuild: assert buildtag_groups.pop(0) == set(artifacts) FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb - msgs = [MBSModule("local module build", 2, 1)] + msgs = [MBSModule("local module build", 3, 1)] stop = module_build_service.scheduler.make_simple_stop_condition(db.session) module_build_service.scheduler.main(msgs, stop) @@ -848,7 +792,7 @@ class TestBuild: # All components should be built and module itself should be in "done" # or "ready" state. - for build in models.ComponentBuild.query.filter_by(module_id=2).all(): + for build in models.ComponentBuild.query.filter_by(module_id=3).all(): assert build.state == koji.BUILD_STATES['COMPLETE'] assert build.module_build.state in [models.BUILD_STATES["done"], models.BUILD_STATES["ready"]] @@ -857,7 +801,7 @@ class TestBuild: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') def test_submit_build_reuse_all_without_build_macros(self, mocked_scm, mocked_get_user, - conf_system, dbg, pdc_module_reuse): + conf_system, dbg): """ Tests that we can reuse components even when the reused module does not have module-build-macros component. @@ -895,13 +839,13 @@ class TestBuild: assert buildtag_groups.pop(0) == set(artifacts) FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb - msgs = [MBSModule("local module build", 2, 1)] + msgs = [MBSModule("local module build", 3, 1)] stop = module_build_service.scheduler.make_simple_stop_condition(db.session) module_build_service.scheduler.main(msgs, stop) # All components should be built and module itself should be in "done" # or "ready" state. - for build in models.ComponentBuild.query.filter_by(module_id=2).all(): + for build in models.ComponentBuild.query.filter_by(module_id=3).all(): assert build.state == koji.BUILD_STATES['COMPLETE'] assert build.module_build.state in [models.BUILD_STATES["done"], models.BUILD_STATES["ready"]] @@ -909,16 +853,13 @@ class TestBuild: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') - def test_submit_build_resume(self, mocked_scm, mocked_get_user, conf_system, dbg, - pdc_module_inactive): + def test_submit_build_resume(self, mocked_scm, mocked_get_user, conf_system, dbg): """ Tests that resuming the build works even when previous batches are already built. """ now = datetime.utcnow() submitted_time = now - timedelta(minutes=3) - pdc_module_inactive.endpoints['unreleasedvariants']['GET'][-1]['variant_context'] = \ - '7c29193d' # Create a module in the failed state build_one = models.ModuleBuild() build_one.name = 'testmodule' @@ -959,7 +900,7 @@ class TestBuild: component_one.state = koji.BUILD_STATES['COMPLETE'] component_one.nvr = 'perl-Tangerine-0:0.22-2.module+0+814cfa39' component_one.batch = 2 - component_one.module_id = 1 + component_one.module_id = 2 component_one.ref = '7e96446223f1ad84a26c7cf23d6591cd9f6326c6' component_one.tagged = True component_one.tagged_in_final = True @@ -971,14 +912,14 @@ class TestBuild: 'git://pkgs.stg.fedoraproject.org/rpms/perl-List-Compare.git?#master' component_two.state = koji.BUILD_STATES['FAILED'] component_two.batch = 2 - component_two.module_id = 1 + component_two.module_id = 2 # Component that isn't started yet component_three = models.ComponentBuild() component_three.package = 'tangerine' component_three.format = 'rpms' component_three.scmurl = 'git://pkgs.stg.fedoraproject.org/rpms/tangerine.git?#master' component_three.batch = 3 - component_three.module_id = 1 + component_three.module_id = 2 # module-build-macros component_four = models.ComponentBuild() component_four.package = 'module-build-macros' @@ -988,7 +929,7 @@ class TestBuild: '/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-macros-0.1-1.' 'module_testmodule_master_20170109091357.src.rpm') component_four.batch = 1 - component_four.module_id = 1 + component_four.module_id = 2 component_four.tagged = True component_four.build_time_only = True @@ -1034,8 +975,7 @@ class TestBuild: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') def test_submit_build_resume_recover_orphaned_macros( - self, mocked_scm, mocked_get_user, conf_system, dbg, - pdc_module_inactive): + self, mocked_scm, mocked_get_user, conf_system, dbg): """ Tests that resuming the build works when module-build-macros is orphaned but marked as failed in the database @@ -1043,8 +983,6 @@ class TestBuild: FakeModuleBuilder.INSTANT_COMPLETE = True now = datetime.utcnow() submitted_time = now - timedelta(minutes=3) - pdc_module_inactive.endpoints['unreleasedvariants']['GET'][-1]['variant_context'] = \ - '7c29193d' # Create a module in the failed state build_one = models.ModuleBuild() build_one.name = 'testmodule' @@ -1083,20 +1021,20 @@ class TestBuild: component_one.format = 'rpms' component_one.scmurl = 'git://pkgs.stg.fedoraproject.org/rpms/perl-Tangerine.git?#master' component_one.batch = 2 - component_one.module_id = 1 + component_one.module_id = 2 component_two = models.ComponentBuild() component_two.package = 'perl-List-Compare' component_two.format = 'rpms' component_two.scmurl = \ 'git://pkgs.stg.fedoraproject.org/rpms/perl-List-Compare.git?#master' component_two.batch = 2 - component_two.module_id = 1 + component_two.module_id = 2 component_three = models.ComponentBuild() component_three.package = 'tangerine' component_three.format = 'rpms' component_three.scmurl = 'git://pkgs.stg.fedoraproject.org/rpms/tangerine.git?#master' component_three.batch = 3 - component_three.module_id = 1 + component_three.module_id = 2 # Failed module-build-macros component_four = models.ComponentBuild() component_four.package = 'module-build-macros' @@ -1106,7 +1044,7 @@ class TestBuild: '/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-macros-0.1-1.' 'module_testmodule_master_20180205135154.src.rpm') component_four.batch = 1 - component_four.module_id = 1 + component_four.module_id = 2 component_four.build_time_only = True db.session.add(build_one) @@ -1148,8 +1086,7 @@ class TestBuild: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') - def test_submit_build_resume_failed_init(self, mocked_scm, mocked_get_user, conf_system, dbg, - pdc_module_inactive): + def test_submit_build_resume_failed_init(self, mocked_scm, mocked_get_user, conf_system, dbg): """ Tests that resuming the build works when the build failed during the init step """ @@ -1203,8 +1140,7 @@ class TestBuild: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') - def test_submit_build_resume_init_fail(self, mocked_scm, mocked_get_user, conf_system, dbg, - pdc_module_inactive): + def test_submit_build_resume_init_fail(self, mocked_scm, mocked_get_user, conf_system, dbg): """ Tests that resuming the build fails when the build is in init state """ @@ -1234,7 +1170,7 @@ class TestBuild: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') def test_submit_build_repo_regen_not_started_batch(self, mocked_scm, mocked_get_user, - conf_system, dbg, pdc_module_inactive): + conf_system, dbg): """ Tests that if MBS starts a new batch, the concurrent component threshold is met before a build can start, and an unexpected repo regen occurs, the build will not fail. @@ -1275,7 +1211,7 @@ class TestBuild: # Simulate a random repo regen message that MBS didn't expect cleanup_moksha() - module = db.session.query(models.ModuleBuild).first() + module = db.session.query(models.ModuleBuild).get(module_build_id) msgs = [module_build_service.messaging.KojiRepoChange( msg_id='a faked internal message', repo_tag=module.koji_tag + '-build')] db.session.expire_all() diff --git a/tests/test_builder/test_koji.py b/tests/test_builder/test_koji.py index a2cd88c0..a958a548 100644 --- a/tests/test_builder/test_koji.py +++ b/tests/test_builder/test_koji.py @@ -62,7 +62,7 @@ class TestKojiBuilder: self.config = mock.Mock() self.config.koji_profile = conf.koji_profile self.config.koji_repository_url = conf.koji_repository_url - self.module = module_build_service.models.ModuleBuild.query.filter_by(id=1).one() + self.module = module_build_service.models.ModuleBuild.query.filter_by(id=2).one() def test_tag_to_repo(self): """ Test that when a repo msg hits us and we have no match, @@ -91,7 +91,7 @@ class TestKojiBuilder: build_tagged = [{"nvr": "foo-1.0-1.module+e0095747", "task_id": 12345, 'build_id': 91}] dest_tagged = [{"nvr": "foo-1.0-1.module+e0095747", "task_id": 12345, 'build_id': 91}] builder.koji_session.listTagged.side_effect = [build_tagged, dest_tagged] - module_build = module_build_service.models.ModuleBuild.query.get(3) + module_build = module_build_service.models.ModuleBuild.query.get(4) component_build = module_build.component_builds[0] component_build.task_id = None component_build.state = None @@ -106,7 +106,7 @@ class TestKojiBuilder: assert actual[0].build_name == 'rubygem-rails' assert actual[0].build_version == '1.0' assert actual[0].build_release == '1.module+e0095747' - assert actual[0].module_build_id == 3 + assert actual[0].module_build_id == 4 assert type(actual[1]) == module_build_service.messaging.KojiTagChange assert actual[1].tag == 'module-foo-build' assert actual[1].artifact == 'rubygem-rails' @@ -129,7 +129,7 @@ class TestKojiBuilder: builder.module_tag = {"name": "module-foo", "id": 1} builder.module_build_tag = {"name": "module-foo-build", "id": 2} - dist_tag = 'module+1+b8661ee4' + dist_tag = 'module+2+b8661ee4' # Set listTagged to return test data builder.koji_session.listTagged.side_effect = [[], [], []] untagged = [{ @@ -145,7 +145,7 @@ class TestKojiBuilder: 'build_id': 91 } builder.koji_session.getBuild.return_value = build_info - module_build = module_build_service.models.ModuleBuild.query.get(3) + module_build = module_build_service.models.ModuleBuild.query.get(4) component_build = module_build.component_builds[0] component_build.task_id = None component_build.nvr = None @@ -160,7 +160,7 @@ class TestKojiBuilder: assert actual[0].build_name == 'rubygem-rails' assert actual[0].build_version == '1.0' assert actual[0].build_release == '1.{0}'.format(dist_tag) - assert actual[0].module_build_id == 3 + assert actual[0].module_build_id == 4 assert component_build.state == koji.BUILD_STATES['COMPLETE'] assert component_build.task_id == 12345 assert component_build.state_reason == 'Found existing build' @@ -186,7 +186,7 @@ class TestKojiBuilder: "release": "nope", }] builder.koji_session.untaggedBuilds.return_value = untagged - module_build = module_build_service.models.ModuleBuild.query.get(3) + module_build = module_build_service.models.ModuleBuild.query.get(4) component_build = module_build.component_builds[0] component_build.task_id = None component_build.nvr = None diff --git a/tests/test_builder/test_mock.py b/tests/test_builder/test_mock.py index d211c58a..e937a215 100644 --- a/tests/test_builder/test_mock.py +++ b/tests/test_builder/test_mock.py @@ -29,7 +29,7 @@ class TestMockModuleBuilder: def _create_module_with_filters(self, session, batch, state): comp_builds = [ { - "module_id": 1, + "module_id": 2, "package": "ed", "format": "rpms", "scmurl": ("git://pkgs.fedoraproject.org/rpms/ed" @@ -38,7 +38,7 @@ class TestMockModuleBuilder: "ref": "01bf8330812fea798671925cc537f2f29b0bd216" }, { - "module_id": 1, + "module_id": 2, "package": "mksh", "format": "rpms", "scmurl": ("git://pkgs.fedoraproject.org/rpms/mksh" diff --git a/tests/test_content_generator.py b/tests/test_content_generator.py index c3944e82..8f24bdee 100644 --- a/tests/test_content_generator.py +++ b/tests/test_content_generator.py @@ -48,7 +48,7 @@ class TestBuild: def setup_method(self, test_method): init_data(1) - module = models.ModuleBuild.query.filter_by(id=1).one() + module = models.ModuleBuild.query.filter_by(id=2).one() module.cg_build_koji_tag = "f27-module-candidate" self.cg = KojiContentGenerator(module, conf) diff --git a/tests/test_get_generator_json_expected_output.json b/tests/test_get_generator_json_expected_output.json index fb03cf6e..4b232346 100644 --- a/tests/test_get_generator_json_expected_output.json +++ b/tests/test_get_generator_json_expected_output.json @@ -650,7 +650,7 @@ "context": "00000000", "stream": "1", "version": "2", - "module_build_service_id": 1, + "module_build_service_id": 2, "content_koji_tag": "module-nginx-1.2", "modulemd_str": "# Document type identifier\ndocument: modulemd\n# Module metadata format version\nversion: 1\ndata:\n # Module name, optional\n # Typically filled in by the buildsystem, using the VCS repository\n # name as the name of the module.\n name: nginx\n # Module update stream, optional\n # Typically filled in by the buildsystem, using the VCS branch name\n # as the name of the stream.\n stream: 1\n # Module version, integer, optional, cannot be negative\n # Typically filled in by the buildsystem, using the VCS commit\n # timestamp. Module version defines upgrade path for the particular\n # update stream.\n version: 2\n # A short summary describing the module, required\n summary: An example nginx module\n # A verbose description of the module, required\n description: >\n A module for the tests of module build service\n # Module and content licenses in the Fedora license identifier\n # format, required\n license:\n # Module license, required\n # This list covers licenses used for the module metadata, SPEC\n # files or extra patches\n module:\n - MIT\n" } diff --git a/tests/test_get_generator_json_expected_output_with_log.json b/tests/test_get_generator_json_expected_output_with_log.json index 4aeaf552..63fc406a 100644 --- a/tests/test_get_generator_json_expected_output_with_log.json +++ b/tests/test_get_generator_json_expected_output_with_log.json @@ -659,7 +659,7 @@ "context": "00000000", "stream": "1", "version": "2", - "module_build_service_id": 1, + "module_build_service_id": 2, "content_koji_tag": "module-nginx-1.2", "modulemd_str": "# Document type identifier\ndocument: modulemd\n# Module metadata format version\nversion: 1\ndata:\n # Module name, optional\n # Typically filled in by the buildsystem, using the VCS repository\n # name as the name of the module.\n name: nginx\n # Module update stream, optional\n # Typically filled in by the buildsystem, using the VCS branch name\n # as the name of the stream.\n stream: 1\n # Module version, integer, optional, cannot be negative\n # Typically filled in by the buildsystem, using the VCS commit\n # timestamp. Module version defines upgrade path for the particular\n # update stream.\n version: 2\n # A short summary describing the module, required\n summary: An example nginx module\n # A verbose description of the module, required\n description: >\n A module for the tests of module build service\n # Module and content licenses in the Fedora license identifier\n # format, required\n license:\n # Module license, required\n # This list covers licenses used for the module metadata, SPEC\n # files or extra patches\n module:\n - MIT\n" } diff --git a/tests/test_logger.py b/tests/test_logger.py index f2452787..d27ce411 100644 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -63,13 +63,13 @@ class TestLogger: """ Tests that ModuleBuildLogs is logging properly to build log file. """ - build = models.ModuleBuild.query.filter_by(id=1).one() + build = models.ModuleBuild.query.filter_by(id=2).one() # Initialize logging, get the build log path and remove it to # ensure we are not using some garbage from previous failed test. self.build_log.start(build) path = self.build_log.path(build) - assert path[len(self.base):] == "/build-1.log" + assert path[len(self.base):] == "/build-2.log" if os.path.exists(path): os.unlink(path) @@ -82,8 +82,8 @@ class TestLogger: self.build_log.stop(build) assert not os.path.exists(path) - # Try logging with current_module_build_id set to 1 and then to 2. - # Only messages with current_module_build_id set to 1 should appear in + # Try logging with current_module_build_id set to 2 and then to 2. + # Only messages with current_module_build_id set to 2 should appear in # the log. self.build_log.start(build) MBSConsumer.current_module_build_id = 1 @@ -104,11 +104,11 @@ class TestLogger: data = f.read() # Note that DEBUG is not present unless configured server-wide. for level in ["INFO", "WARNING", "ERROR"]: - assert data.find("%s - ignore this test msg1" % level) != -1 + assert data.find("%s - ignore this test msg2" % level) != -1 # Try to log more messages when build_log for module 1 is stopped. # New messages should not appear in a log. - MBSConsumer.current_module_build_id = 1 + MBSConsumer.current_module_build_id = 2 log.debug("ignore this test msg3") log.info("ignore this test msg3") log.warn("ignore this test msg3") @@ -119,11 +119,11 @@ class TestLogger: assert data.find("ignore this test msg3") == -1 def test_module_build_logs_name_format(self): - build = models.ModuleBuild.query.filter_by(id=1).one() + build = models.ModuleBuild.query.filter_by(id=2).one() log1 = ModuleBuildLogs("/some/path", "build-{id}.log") - assert log1.name(build) == "build-1.log" - assert log1.path(build) == "/some/path/build-1.log" + assert log1.name(build) == "build-2.log" + assert log1.path(build) == "/some/path/build-2.log" log2 = ModuleBuildLogs("/some/path", "build-{name}-{stream}-{version}.log") assert log2.name(build) == "build-nginx-1-2.log" diff --git a/tests/test_resolver/test_db.py b/tests/test_resolver/test_db.py new file mode 100644 index 00000000..3169fdc1 --- /dev/null +++ b/tests/test_resolver/test_db.py @@ -0,0 +1,169 @@ +# Copyright (c) 2018 Red Hat, Inc. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# +# Written by Matt Prahl + +import os + +from mock import patch, PropertyMock +import pytest +import gi +gi.require_version('Modulemd', '1.0') # noqa +from gi.repository import Modulemd + +import module_build_service.resolver as mbs_resolver +from module_build_service import app, db, models, glib, utils +import tests + + +base_dir = os.path.join(os.path.dirname(__file__), "..") + + +class TestDBModule: + + def setup_method(self): + tests.reuse_component_init_data() + + @pytest.mark.parametrize('empty_buildrequires', [False, True]) + def test_get_module_build_dependencies(self, empty_buildrequires): + """ + Tests that the buildrequires of testmodule are returned + """ + expected = set(['module-f28-build']) + if empty_buildrequires: + expected = set() + module = models.ModuleBuild.query.get(2) + mmd = module.mmd() + # Wipe out the dependencies + mmd.set_dependencies() + xmd = glib.from_variant_dict(mmd.get_xmd()) + xmd['mbs']['buildrequires'] = {} + mmd.set_xmd(glib.dict_values(xmd)) + module.modulemd = mmd.dumps() + db.session.add(module) + db.session.commit() + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db') + result = resolver.get_module_build_dependencies( + 'testmodule', 'master', '20170109091357', '7c29193d').keys() + assert set(result) == expected + + def test_get_module_build_dependencies_recursive(self): + """ + Tests that the buildrequires are returned when it is two layers deep + """ + # Add testmodule2 that requires testmodule + module = models.ModuleBuild.query.get(3) + mmd = module.mmd() + mmd.set_name('testmodule2') + mmd.set_version(20180123171545) + requires = mmd.get_dependencies()[0].get_requires() + requires['testmodule'] = Modulemd.SimpleSet() + requires['testmodule'].add('master') + mmd.get_dependencies()[0].set_requires(requires) + xmd = glib.from_variant_dict(mmd.get_xmd()) + xmd['mbs']['requires']['testmodule'] = { + 'filtered_rpms': [], + 'ref': '620ec77321b2ea7b0d67d82992dda3e1d67055b4', + 'stream': 'master', + 'version': '20180205135154' + } + mmd.set_xmd(glib.dict_values(xmd)) + module.modulemd = mmd.dumps() + module.name = 'testmodule2' + module.version = str(mmd.get_version()) + module.koji_tag = 'module-ae2adf69caf0e1b6' + + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db') + result = resolver.get_module_build_dependencies( + 'testmodule2', 'master', '20180123171545', '7c29193d').keys() + assert set(result) == set(['module-f28-build']) + + @patch("module_build_service.config.Config.system", + new_callable=PropertyMock, return_value="test") + @patch("module_build_service.config.Config.mock_resultsdir", + new_callable=PropertyMock, + return_value=os.path.join(base_dir, 'staged_data', "local_builds")) + def test_get_module_build_dependencies_recursive_requires( + self, resultdir, conf_system): + """ + Tests that it returns the requires of the buildrequires recursively + """ + with app.app_context(): + utils.load_local_builds(["platform", "parent", "child", "testmodule"]) + + build = models.ModuleBuild.local_modules( + db.session, "child", "master") + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db') + result = resolver.get_module_build_dependencies(mmd=build[0].mmd()).keys() + + local_path = os.path.join(base_dir, 'staged_data', "local_builds") + + expected = [ + os.path.join( + local_path, + 'module-platform-f28-3/results'), + os.path.join( + local_path, + 'module-parent-master-20170816080815/results'), + ] + assert set(result) == set(expected) + + def test_resolve_profiles(self): + """ + Tests that the profiles get resolved recursively + """ + mmd = models.ModuleBuild.query.get(2).mmd() + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='db') + result = resolver.resolve_profiles(mmd, ('buildroot', 'srpm-buildroot')) + expected = { + 'buildroot': + set(['unzip', 'tar', 'cpio', 'gawk', 'gcc', 'xz', 'sed', + 'findutils', 'util-linux', 'bash', 'info', 'bzip2', + 'grep', 'redhat-rpm-config', 'fedora-release', + 'diffutils', 'make', 'patch', 'shadow-utils', 'coreutils', + 'which', 'rpm-build', 'gzip', 'gcc-c++']), + 'srpm-buildroot': + set(['shadow-utils', 'redhat-rpm-config', 'rpm-build', + 'fedora-release', 'fedpkg-minimal', 'gnupg2', + 'bash']) + } + assert result == expected + + @patch("module_build_service.config.Config.system", + new_callable=PropertyMock, return_value="test") + @patch("module_build_service.config.Config.mock_resultsdir", + new_callable=PropertyMock, + return_value=os.path.join(base_dir, 'staged_data', "local_builds")) + def test_resolve_profiles_local_module(self, local_builds, conf_system): + """ + Test that profiles get resolved recursively on local builds + """ + with app.app_context(): + utils.load_local_builds(['platform']) + mmd = models.ModuleBuild.query.get(2).mmd() + resolver = mbs_resolver.GenericResolver.create(tests.conf, backend='pdc') + result = resolver.resolve_profiles(mmd, ('buildroot', 'srpm-buildroot')) + expected = { + 'buildroot': + set(['foo']), + 'srpm-buildroot': + set(['bar']) + } + assert result == expected diff --git a/tests/test_scheduler/test_module_init.py b/tests/test_scheduler/test_module_init.py index c4430dfe..8bcd4d52 100644 --- a/tests/test_scheduler/test_module_init.py +++ b/tests/test_scheduler/test_module_init.py @@ -56,21 +56,21 @@ class TestModuleInit: pass @patch('module_build_service.scm.SCM') - def test_init_basic(self, mocked_scm, pdc): + def test_init_basic(self, mocked_scm): FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', '620ec77321b2ea7b0d67d82992dda3e1d67055b4') msg = module_build_service.messaging.MBSModule( - msg_id=None, module_build_id=1, module_build_state='init') + msg_id=None, module_build_id=2, module_build_state='init') with make_session(conf) as session: self.fn(config=conf, session=session, msg=msg) - build = ModuleBuild.query.filter_by(id=1).one() + build = ModuleBuild.query.filter_by(id=2).one() # Make sure the module entered the wait state assert build.state == 1, build.state # Make sure format_mmd was run properly assert type(build.mmd().get_xmd()['mbs']) is GLib.Variant @patch('module_build_service.scm.SCM') - def test_init_scm_not_available(self, mocked_scm, pdc): + def test_init_scm_not_available(self, mocked_scm): def mocked_scm_get_latest(): raise RuntimeError("Failed in mocked_scm_get_latest") @@ -78,10 +78,10 @@ class TestModuleInit: '620ec77321b2ea7b0d67d82992dda3e1d67055b4') mocked_scm.return_value.get_latest = mocked_scm_get_latest msg = module_build_service.messaging.MBSModule( - msg_id=None, module_build_id=1, module_build_state='init') + msg_id=None, module_build_id=2, module_build_state='init') with make_session(conf) as session: self.fn(config=conf, session=session, msg=msg) - build = ModuleBuild.query.filter_by(id=1).one() + build = ModuleBuild.query.filter_by(id=2).one() # Make sure the module entered the failed state # since the git server is not available assert build.state == 4, build.state @@ -89,7 +89,7 @@ class TestModuleInit: @patch("module_build_service.config.Config.modules_allow_repository", new_callable=PropertyMock, return_value=True) @patch('module_build_service.scm.SCM') - def test_init_includedmodule(self, mocked_scm, mocked_mod_allow_repo, pdc): + def test_init_includedmodule(self, mocked_scm, mocked_mod_allow_repo): FakeSCM(mocked_scm, "includedmodules", ['testmodule.yaml']) includedmodules_yml_path = os.path.join( self.staged_data_dir, 'includedmodules.yaml') @@ -100,13 +100,13 @@ class TestModuleInit: ModuleBuild.create( session, conf, 'includemodule', '1', 3, yaml, scmurl, 'mprahl') msg = module_build_service.messaging.MBSModule( - msg_id=None, module_build_id=2, module_build_state='init') + msg_id=None, module_build_id=3, module_build_state='init') self.fn(config=conf, session=session, msg=msg) - build = ModuleBuild.query.filter_by(id=2).one() + build = ModuleBuild.query.filter_by(id=3).one() assert build.state == 1 assert build.name == 'includemodule' batches = {} - for comp_build in ComponentBuild.query.filter_by(module_id=2).all(): + for comp_build in ComponentBuild.query.filter_by(module_id=3).all(): batches[comp_build.package] = comp_build.batch assert batches['perl-List-Compare'] == 2 assert batches['perl-Tangerine'] == 2 @@ -125,14 +125,14 @@ class TestModuleInit: @patch('module_build_service.models.ModuleBuild.from_module_event') @patch('module_build_service.scm.SCM') - def test_init_when_get_latest_raises(self, mocked_scm, mocked_from_module_event, pdc): + def test_init_when_get_latest_raises(self, mocked_scm, mocked_from_module_event): FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', '7035bd33614972ac66559ac1fdd019ff6027ad22', get_latest_raise=True) msg = module_build_service.messaging.MBSModule( - msg_id=None, module_build_id=1, module_build_state='init') + msg_id=None, module_build_id=2, module_build_state='init') with make_session(conf) as session: - build = session.query(ModuleBuild).filter_by(id=1).one() + build = session.query(ModuleBuild).filter_by(id=2).one() mocked_from_module_event.return_value = build self.fn(config=conf, session=session, msg=msg) # Query the database again to make sure the build object is updated diff --git a/tests/test_scheduler/test_module_wait.py b/tests/test_scheduler/test_module_wait.py index cc68af0c..665574eb 100644 --- a/tests/test_scheduler/test_module_wait.py +++ b/tests/test_scheduler/test_module_wait.py @@ -116,7 +116,7 @@ class TestModuleWait: resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357" generic_resolver.create.return_value = resolver - msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=1, + msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=2, module_build_state='some state') module_build_service.scheduler.handlers.modules.wait( config=conf, session=db.session, msg=msg) @@ -125,7 +125,7 @@ class TestModuleWait: # When module-build-macros is reused, it still has to appear only # once in database. builds_count = db.session.query(ComponentBuild).filter_by( - package="module-build-macros", module_id=1).count() + package="module-build-macros", module_id=2).count() assert builds_count == 1 @patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", @@ -158,7 +158,7 @@ class TestModuleWait: resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357" generic_resolver.create.return_value = resolver - msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=1, + msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=2, module_build_state='some state') module_build_service.scheduler.handlers.modules.wait( config=conf, session=db.session, msg=msg) @@ -200,11 +200,11 @@ class TestModuleWait: "module-bootstrap-tag": base_mmd} generic_resolver.create.return_value = resolver - msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=1, + msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=2, module_build_state='some state') module_build_service.scheduler.handlers.modules.wait( config=conf, session=db.session, msg=msg) - module_build = ModuleBuild.query.filter_by(id=1).one() + module_build = ModuleBuild.query.filter_by(id=2).one() assert module_build.cg_build_koji_tag == "modular-updates-candidate" @patch("module_build_service.builder.GenericBuilder.default_buildroot_groups", @@ -245,9 +245,9 @@ class TestModuleWait: "module-bootstrap-tag": base_mmd} generic_resolver.create.return_value = resolver - msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=1, + msg = module_build_service.messaging.MBSModule(msg_id=None, module_build_id=2, module_build_state='some state') module_build_service.scheduler.handlers.modules.wait( config=conf, session=db.session, msg=msg) - module_build = ModuleBuild.query.filter_by(id=1).one() + module_build = ModuleBuild.query.filter_by(id=2).one() assert module_build.cg_build_koji_tag == "f27-modular-updates-candidate" diff --git a/tests/test_scheduler/test_poller.py b/tests/test_scheduler/test_poller.py index 1218c539..4ec11b50 100644 --- a/tests/test_scheduler/test_poller.py +++ b/tests/test_scheduler/test_poller.py @@ -62,7 +62,7 @@ class TestPoller: # Change the batch to 2, so the module build is in state where # it is not building anything, but the state is "build". - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.batch = 2 # If fresh is set, then we simulate that activity just occurred 2 minutes ago on the build if fresh: @@ -77,7 +77,7 @@ class TestPoller: poller.poll() # Refresh our module_build object. - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() db.session.refresh(module_build) # If fresh is set, we expect the poller to not touch the module build since it's been less @@ -117,7 +117,7 @@ class TestPoller: # Change the batch to 2, so the module build is in state where # it is not building anything, but the state is "build". - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.batch = 2 module_build.new_repo_task_id = 123456 db.session.commit() @@ -151,7 +151,7 @@ class TestPoller: # Change the batch to 2, so the module build is in state where # it is not building anything, but the state is "build". - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.batch = 2 module_build.new_repo_task_id = 123456 db.session.commit() @@ -161,7 +161,7 @@ class TestPoller: poller.poll() # Refresh our module_build object. - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() db.session.refresh(module_build) assert not koji_session.newRepo.called @@ -185,7 +185,7 @@ class TestPoller: # Change the batch to 2, so the module build is in state where # it is not building anything, but the state is "build". - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.batch = 2 module_build.new_repo_task_id = 123456 db.session.commit() @@ -196,7 +196,7 @@ class TestPoller: poller.poll() # Refresh our module_build object. - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() db.session.refresh(module_build) # Components should not be in building state @@ -227,7 +227,7 @@ class TestPoller: # Change the batch to 2, so the module build is in state where # it is not building anything, but the state is "build". - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.state = state module_build.koji_tag = "module-tag" module_build.time_completed = datetime.utcnow() @@ -239,7 +239,7 @@ class TestPoller: poller = MBSProducer(hub) poller.delete_old_koji_targets(conf, db.session) - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() db.session.refresh(module_build) module_build.time_completed = datetime.utcnow() - timedelta(hours=23) db.session.commit() @@ -281,7 +281,7 @@ class TestPoller: # Change the batch to 2, so the module build is in state where # it is not building anything, but the state is "build". - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.state = 1 original = datetime.utcnow() - timedelta(minutes=11) module_build.time_modified = original @@ -295,7 +295,7 @@ class TestPoller: poller.process_waiting_module_builds(db.session) assert consumer.incoming.qsize() == 1 - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() # ensure the time_modified was changed. assert module_build.time_modified > original @@ -312,7 +312,7 @@ class TestPoller: # Change the batch to 2, so the module build is in state where # it is not building anything, but the state is "build". - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.state = 1 original = datetime.utcnow() - timedelta(minutes=9) module_build.time_modified = original @@ -355,15 +355,15 @@ class TestPoller: """ builder = mock.MagicMock() create_builder.return_value = builder - module_build_one = models.ModuleBuild.query.get(1) - module_build_two = models.ModuleBuild.query.get(2) + module_build_one = models.ModuleBuild.query.get(2) + module_build_two = models.ModuleBuild.query.get(3) module_build_one.state = models.BUILD_STATES['failed'] module_build_one.time_modified = datetime.utcnow() - timedelta( days=conf.cleanup_failed_builds_time + 1) module_build_two.time_modified = datetime.utcnow() module_build_two.state = models.BUILD_STATES['failed'] failed_component = models.ComponentBuild.query.filter_by( - package='tangerine', module_id=2).one() + package='tangerine', module_id=3).one() failed_component.state = koji.BUILD_STATES['FAILED'] failed_component.tagged = False failed_component.tagged_in_final = False diff --git a/tests/test_scheduler/test_repo_done.py b/tests/test_scheduler/test_repo_done.py index 52517e80..d5c69872 100644 --- a/tests/test_scheduler/test_repo_done.py +++ b/tests/test_scheduler/test_repo_done.py @@ -59,7 +59,7 @@ class TestRepoDone: @mock.patch('module_build_service.builder.KojiModuleBuilder.' 'KojiModuleBuilder.buildroot_connect') def test_a_single_match(self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, - mock_uea, pdc): + mock_uea): """ Test that when a repo msg hits us and we have a single match. """ scheduler_init_data() @@ -92,7 +92,7 @@ class TestRepoDone: @mock.patch('module_build_service.builder.KojiModuleBuilder.' 'KojiModuleBuilder.buildroot_connect') def test_a_single_match_build_fail(self, connect, build_fn, config, ready, list_tasks_fn, - mock_gabt, mock_uea, pdc): + mock_gabt, mock_uea): """ Test that when a KojiModuleBuilder.build fails, the build is marked as failed with proper state_reason. """ @@ -129,7 +129,7 @@ class TestRepoDone: config=conf, session=db.session, msg=msg) mock_log_info.assert_called_once_with( 'Ignoring repo regen, because not all components are tagged.') - module_build = module_build_service.models.ModuleBuild.query.get(1) + module_build = module_build_service.models.ModuleBuild.query.get(2) # Make sure the module build didn't transition since all the components weren't tagged assert module_build.state == module_build_service.models.BUILD_STATES['build'] @@ -153,12 +153,12 @@ class TestRepoDone: with app.app_context(): scheduler_init_data(3) config.return_value = mock.Mock(), 'development' - build_fn.return_value = None, 4, 'Failed to submit artifact communicator to Koji', None + build_fn.return_value = None, 4, 'Failed to submit artifact x to Koji', None msg = module_build_service.messaging.KojiRepoChange( 'some_msg_id', 'module-95b214a704c984be-build') module_build_service.scheduler.handlers.repos.done( config=conf, session=db.session, msg=msg) - module_build = module_build_service.models.ModuleBuild.query.first() + module_build = module_build_service.models.ModuleBuild.query.get(2) assert module_build.state == module_build_service.models.BUILD_STATES["failed"] diff --git a/tests/test_scheduler/test_tag_tagged.py b/tests/test_scheduler/test_tag_tagged.py index 602f8d9e..5e005ad5 100644 --- a/tests/test_scheduler/test_tag_tagged.py +++ b/tests/test_scheduler/test_tag_tagged.py @@ -78,7 +78,7 @@ class TestTagTagged: builder.module_build_tag = {"name": "module-fe3adf73caf3e1b7-build"} create_builder.return_value = builder - module_build = module_build_service.models.ModuleBuild.query.filter_by(id=2).one() + module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one() # Set previous components as COMPLETE and tagged. module_build.batch = 1 @@ -128,7 +128,7 @@ class TestTagTagged: # Refresh our module_build object. db.session.expunge(module_build) - module_build = module_build_service.models.ModuleBuild.query.filter_by(id=2).one() + module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one() # newRepo task_id should be stored in database, so we can check its # status later in poller. @@ -154,7 +154,7 @@ class TestTagTagged: builder.module_build_tag = {"name": "module-fe3adf73caf3e1b7-build"} create_builder.return_value = builder - module_build = module_build_service.models.ModuleBuild.query.filter_by(id=2).one() + module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one() module_build.batch = 2 component = module_build_service.models.ComponentBuild.query\ .filter_by(package='perl-Tangerine', module_id=module_build.id).one() @@ -196,7 +196,7 @@ class TestTagTagged: builder.module_build_tag = {"name": "module-fe3adf73caf3e1b7-build"} create_builder.return_value = builder - module_build = module_build_service.models.ModuleBuild.query.filter_by(id=2).one() + module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one() # Set previous components as COMPLETE and tagged. module_build.batch = 1 @@ -231,7 +231,7 @@ class TestTagTagged: # Refresh our module_build object. db.session.expunge(module_build) - module_build = module_build_service.models.ModuleBuild.query.filter_by(id=2).one() + module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one() # newRepo task_id should be stored in database, so we can check its # status later in poller. @@ -260,10 +260,10 @@ class TestTagTagged: builder.module_build_tag = {"name": "module-fe3adf73caf3e1b7-build"} create_builder.return_value = builder - module_build = module_build_service.models.ModuleBuild.query.filter_by(id=2).one() + module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one() module_build.batch = 2 mbm = module_build_service.models.ComponentBuild.query.filter_by( - module_id=2, package='module-build-macros').one() + module_id=3, package='module-build-macros').one() mbm.tagged = False db.session.add(mbm) for c in module_build.current_batch(): @@ -316,7 +316,7 @@ class TestTagTagged: # Refresh our module_build object. db.session.expunge(module_build) - module_build = module_build_service.models.ModuleBuild.query.filter_by(id=2).one() + module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one() # newRepo task_id should be stored in database, so we can check its # status later in poller. @@ -343,7 +343,7 @@ class TestTagTagged: builder.module_build_tag = {"name": "module-fe3adf73caf3e1b7-build"} create_builder.return_value = builder - module_build = module_build_service.models.ModuleBuild.query.filter_by(id=2).one() + module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one() # Set previous components as COMPLETE and tagged. module_build.batch = 1 @@ -387,7 +387,7 @@ class TestTagTagged: # Refresh our module_build object. db.session.expunge(module_build) - module_build = module_build_service.models.ModuleBuild.query.filter_by(id=2).one() + module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one() # newRepo task_id should be stored in database, so we can check its # status later in poller. diff --git a/tests/test_utils/test_utils.py b/tests/test_utils/test_utils.py index 5ffd9c14..3e0ff20e 100644 --- a/tests/test_utils/test_utils.py +++ b/tests/test_utils/test_utils.py @@ -90,14 +90,14 @@ class TestUtilsComponentReuse: 'perl-List-Compare', 'perl-Tangerine', 'tangerine', None ]) def test_get_reusable_component_different_component(self, changed_component): - second_module_build = models.ModuleBuild.query.filter_by(id=2).one() + second_module_build = models.ModuleBuild.query.filter_by(id=3).one() if changed_component: mmd = second_module_build.mmd() mmd.get_rpm_components()['tangerine'].set_ref( '00ea1da4192a2030f9ae023de3b3143ed647bbab') second_module_build.modulemd = mmd.dumps() second_module_changed_component = models.ComponentBuild.query.filter_by( - package=changed_component, module_id=2).one() + package=changed_component, module_id=3).one() second_module_changed_component.ref = '00ea1da4192a2030f9ae023de3b3143ed647bbab' db.session.add(second_module_changed_component) db.session.commit() @@ -132,7 +132,7 @@ class TestUtilsComponentReuse: assert tangerine_rv.package == 'tangerine' def test_get_reusable_component_different_rpm_macros(self): - second_module_build = models.ModuleBuild.query.filter_by(id=2).one() + second_module_build = models.ModuleBuild.query.filter_by(id=3).one() mmd = second_module_build.mmd() mmd.set_rpm_buildopts({'macros': '%my_macro 1'}) second_module_build.modulemd = mmd.dumps() @@ -147,7 +147,7 @@ class TestUtilsComponentReuse: assert pt_rv is None def test_get_reusable_component_different_buildrequires_hash(self): - second_module_build = models.ModuleBuild.query.filter_by(id=2).one() + second_module_build = models.ModuleBuild.query.filter_by(id=3).one() mmd = second_module_build.mmd() xmd = glib.from_variant_dict(mmd.get_xmd()) xmd['mbs']['buildrequires']['platform']['ref'] = \ @@ -170,7 +170,7 @@ class TestUtilsComponentReuse: assert tangerine_rv is None def test_get_reusable_component_different_buildrequires(self): - second_module_build = models.ModuleBuild.query.filter_by(id=2).one() + second_module_build = models.ModuleBuild.query.filter_by(id=3).one() mmd = second_module_build.mmd() br_list = Modulemd.SimpleSet() br_list.add('master') @@ -210,7 +210,7 @@ class TestUtilsComponentReuse: inspect if it was called with correct arguments """ module_dir = tempfile.mkdtemp() - module = models.ModuleBuild.query.filter_by(id=2).one() + module = models.ModuleBuild.query.filter_by(id=3).one() mmd = module.mmd() modulemd_yaml = mmd.dumps() modulemd_file_path = path.join(module_dir, "testmodule.yaml") @@ -248,7 +248,7 @@ class TestUtils: None ]) @patch('module_build_service.scm.SCM') - def test_format_mmd(self, mocked_scm, scmurl, pdc): + def test_format_mmd(self, mocked_scm, scmurl): mocked_scm.return_value.commit = \ '620ec77321b2ea7b0d67d82992dda3e1d67055b4' # For all the RPMs in testmodule, get_latest is called @@ -314,7 +314,7 @@ class TestUtils: reuse the components. """ reuse_shared_userspace_init_data() - new_module = models.ModuleBuild.query.filter_by(id=2).one() + new_module = models.ModuleBuild.query.get(3) rv = module_build_service.utils.get_reusable_component( db.session, new_module, 'llvm') assert rv.package == 'llvm' @@ -425,7 +425,7 @@ class TestUtils: assert str(cm.value).endswith(' No value provided.') is True @patch('module_build_service.scm.SCM') - def test_record_component_builds_duplicate_components(self, mocked_scm, pdc_module_inactive): + def test_record_component_builds_duplicate_components(self, mocked_scm): with app.app_context(): clean_database() mocked_scm.return_value.commit = \ @@ -471,7 +471,7 @@ class TestUtils: assert str(e.value) == error_msg @patch('module_build_service.scm.SCM') - def test_record_component_builds_set_weight(self, mocked_scm, pdc_module_inactive): + def test_record_component_builds_set_weight(self, mocked_scm): with app.app_context(): clean_database() mocked_scm.return_value.commit = \ @@ -600,7 +600,7 @@ class TestBatches: 5) Handling the further_work messages lead to proper tagging of reused components. """ - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.batch = 1 builder = mock.MagicMock() @@ -645,10 +645,10 @@ class TestBatches: 5) Handling the further_work messages lead to proper tagging of reused components. """ - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.batch = 1 plc_component = models.ComponentBuild.query.filter_by( - module_id=2, package='perl-List-Compare').one() + module_id=3, package='perl-List-Compare').one() plc_component.ref = '5ceea46add2366d8b8c5a623a2fb563b625b9abd' builder = mock.MagicMock() @@ -684,7 +684,7 @@ class TestBatches: Tests that start_next_batch_build can't reuse any components in the batch because the rebuild method is set to "all". """ - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.rebuild_strategy = 'all' module_build.batch = 1 @@ -711,12 +711,12 @@ class TestBatches: 2, and even though the other component in batch 2 changed and was rebuilt, the component in batch 3 can be reused. """ - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.rebuild_strategy = 'only-changed' module_build.batch = 1 # perl-List-Compare changed plc_component = models.ComponentBuild.query.filter_by( - module_id=2, package='perl-List-Compare').one() + module_id=3, package='perl-List-Compare').one() plc_component.ref = '5ceea46add2366d8b8c5a623a2fb563b625b9abd' builder = mock.MagicMock() @@ -747,7 +747,7 @@ class TestBatches: # Complete the build plc_component.state = koji.BUILD_STATES['COMPLETE'] pt_component = models.ComponentBuild.query.filter_by( - module_id=2, package='perl-Tangerine').one() + module_id=3, package='perl-Tangerine').one() pt_component.state = koji.BUILD_STATES['COMPLETE'] # Start the next build batch @@ -769,13 +769,13 @@ class TestBatches: """ Tests that components with the longest build time will be scheduled first """ - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.batch = 1 pt_component = models.ComponentBuild.query.filter_by( - module_id=2, package='perl-Tangerine').one() + module_id=3, package='perl-Tangerine').one() pt_component.ref = '6ceea46add2366d8b8c5a623b2fb563b625bfabe' plc_component = models.ComponentBuild.query.filter_by( - module_id=2, package='perl-List-Compare').one() + module_id=3, package='perl-List-Compare').one() plc_component.ref = '5ceea46add2366d8b8c5a623a2fb563b625b9abd' # Components are by default built by component id. To find out that weight is respected, @@ -809,7 +809,7 @@ class TestBatches: Tests that start_next_batch_build does not start new batch when there are unbuilt components in the current one. """ - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.batch = 2 # The component was reused when the batch first started @@ -834,7 +834,7 @@ class TestBatches: Test that start_next_batch_build does not start new batch when builder.buildroot_ready() returns False. """ - module_build = models.ModuleBuild.query.filter_by(id=2).one() + module_build = models.ModuleBuild.query.filter_by(id=3).one() module_build.batch = 1 builder = mock.MagicMock() diff --git a/tests/test_views/test_views.py b/tests/test_views/test_views.py index 4ef977d8..5f09fc6d 100644 --- a/tests/test_views/test_views.py +++ b/tests/test_views/test_views.py @@ -117,9 +117,9 @@ class TestViews: init_data(2) def test_query_build(self): - rv = self.client.get('/module-build-service/1/module-builds/1') + rv = self.client.get('/module-build-service/1/module-builds/2') data = json.loads(rv.data) - assert data['id'] == 1 + assert data['id'] == 2 assert data['context'] == '00000000' assert data['name'] == 'nginx' assert data['owner'] == 'Moe Szyslak' @@ -132,13 +132,13 @@ class TestViews: 'task_id': 12312321, 'state': 1, 'state_reason': None, - 'nvr': 'module-build-macros-01-1.module+1+b8661ee4', + 'nvr': 'module-build-macros-01-1.module+2+b8661ee4', }, 'nginx': { 'task_id': 12312345, 'state': 1, 'state_reason': None, - 'nvr': 'nginx-1.10.1-2.module+1+b8661ee4', + 'nvr': 'nginx-1.10.1-2.module+2+b8661ee4', }, }, } @@ -149,9 +149,9 @@ class TestViews: assert data['version'] == '2' def test_query_build_short(self): - rv = self.client.get('/module-build-service/1/module-builds/1?short=True') + rv = self.client.get('/module-build-service/1/module-builds/2?short=True') data = json.loads(rv.data) - assert data['id'] == 1 + assert data['id'] == 2 assert data['context'] == '00000000' assert data['name'] == 'nginx' assert data['state'] == 5 @@ -160,14 +160,14 @@ class TestViews: assert data['version'] == '2' def test_query_build_with_verbose_mode(self): - rv = self.client.get('/module-build-service/1/module-builds/1?verbose=true') + rv = self.client.get('/module-build-service/1/module-builds/2?verbose=true') data = json.loads(rv.data) assert data['component_builds'] == [1, 2] assert data['context'] == '00000000' # There is no xmd information on this module, so these values should be None assert data['build_context'] is None assert data['runtime_context'] is None - assert data['id'] == 1 + assert data['id'] == 2 with open(path.join(base_dir, "staged_data", "nginx_mmd.yaml")) as mmd: assert data['modulemd'] == mmd.read() assert data['name'] == 'nginx' @@ -180,7 +180,7 @@ class TestViews: # State trace is empty because we directly created these builds and didn't have them # transition, which creates these entries assert data['state_trace'] == [] - assert data['state_url'] == '/module-build-service/1/module-builds/1' + assert data['state_url'] == '/module-build-service/1/module-builds/2' assert data['stream'] == '1' assert data['tasks'] == { 'rpms': { @@ -188,13 +188,13 @@ class TestViews: 'task_id': 12312321, 'state': 1, 'state_reason': None, - 'nvr': 'module-build-macros-01-1.module+1+b8661ee4', + 'nvr': 'module-build-macros-01-1.module+2+b8661ee4', }, 'nginx': { 'task_id': 12312345, 'state': 1, 'state_reason': None, - 'nvr': 'nginx-1.10.1-2.module+1+b8661ee4', + 'nvr': 'nginx-1.10.1-2.module+2+b8661ee4', }, }, } @@ -209,11 +209,11 @@ class TestViews: meta_data = json.loads(rv.data)['meta'] assert meta_data['prev'].split('?', 1)[1] in ['per_page=2&page=1', 'page=1&per_page=2'] assert meta_data['next'].split('?', 1)[1] in ['per_page=2&page=3', 'page=3&per_page=2'] - assert meta_data['last'].split('?', 1)[1] in ['per_page=2&page=3', 'page=3&per_page=2'] + assert meta_data['last'].split('?', 1)[1] in ['per_page=2&page=4', 'page=4&per_page=2'] assert meta_data['first'].split('?', 1)[1] in ['per_page=2&page=1', 'page=1&per_page=2'] - assert meta_data['total'] == 6 + assert meta_data['total'] == 7 assert meta_data['per_page'] == 2 - assert meta_data['pages'] == 3 + assert meta_data['pages'] == 4 assert meta_data['page'] == 2 def test_pagination_metadata_with_args(self): @@ -222,9 +222,9 @@ class TestViews: for link in [meta_data['prev'], meta_data['next'], meta_data['last'], meta_data['first']]: assert 'order_by=id' in link assert 'per_page=2' in link - assert meta_data['total'] == 6 + assert meta_data['total'] == 7 assert meta_data['per_page'] == 2 - assert meta_data['pages'] == 3 + assert meta_data['pages'] == 4 assert meta_data['page'] == 2 def test_query_builds(self): @@ -240,13 +240,13 @@ class TestViews: "state": 1, "state_reason": None, "task_id": 47383994, - "nvr": "module-build-macros-01-1.module+6+f95651e2" + "nvr": "module-build-macros-01-1.module+7+f95651e2" }, "rubygem-rails": { "state": 3, "state_reason": None, "task_id": 2433434, - "nvr": "postgresql-9.5.3-4.module+6+f95651e2" + "nvr": "postgresql-9.5.3-4.module+7+f95651e2" } } }, @@ -258,7 +258,7 @@ class TestViews: "time_submitted": "2016-09-03T12:38:33Z", "scmurl": ("git://pkgs.domain.local/modules/testmodule" "?#ca95886c7a443b36a9ce31abda1f9bef22f2f8c9"), - "id": 6, + "id": 7, "context": "00000000", "time_completed": None, "time_modified": "2016-09-03T12:38:40Z", @@ -274,13 +274,13 @@ class TestViews: "state": 1, "state_reason": None, "task_id": 47383994, - "nvr": "module-build-macros-01-1.module+5+fa947d31" + "nvr": "module-build-macros-01-1.module+6+fa947d31" }, "postgresql": { "state": 1, "state_reason": None, "task_id": 2433434, - "nvr": "postgresql-9.5.3-4.module+5+fa947d31" + "nvr": "postgresql-9.5.3-4.module+6+fa947d31" } } }, @@ -292,7 +292,7 @@ class TestViews: "time_submitted": "2016-09-03T12:35:33Z", "scmurl": ("git://pkgs.domain.local/modules/postgressql" "?#aa95886c7a443b36a9ce31abda1f9bef22f2f8c9"), - "id": 5, + "id": 6, "context": "00000000", "time_completed": "2016-09-03T11:37:19Z", "time_modified": "2016-09-03T12:37:19Z", @@ -320,7 +320,7 @@ class TestViews: data = json.loads(rv.data) assert data['id'] == 1 assert data['format'] == 'rpms' - assert data['module_build'] == 1 + assert data['module_build'] == 2 assert data['package'] == 'nginx' assert data['state'] == 1 assert data['state_name'] == 'COMPLETE' @@ -332,7 +332,7 @@ class TestViews: data = json.loads(rv.data) assert data['id'] == 1 assert data['format'] == 'rpms' - assert data['module_build'] == 1 + assert data['module_build'] == 2 assert data['package'] == 'nginx' assert data['state'] == 1 assert data['state_name'] == 'COMPLETE' @@ -344,7 +344,7 @@ class TestViews: data = json.loads(rv.data) assert data['id'] == 3 assert data['format'] == 'rpms' - assert data['module_build'] == 2 + assert data['module_build'] == 3 assert data['package'] == 'postgresql' assert data['state'] == 1 assert data['state_name'] == 'COMPLETE' @@ -375,7 +375,7 @@ class TestViews: def test_query_component_builds_filter_nvr(self): rv = self.client.get('/module-build-service/1/component-builds/?nvr=nginx-1.10.1-2.' - 'module%2B1%2Bb8661ee4') + 'module%2B2%2Bb8661ee4') data = json.loads(rv.data) assert data['meta']['total'] == 1 @@ -404,7 +404,7 @@ class TestViews: rv = self.client.get( '/module-build-service/1/module-builds/?completed_after=2016-09-03T11:35:00Z') data = json.loads(rv.data) - assert data['meta']['total'] == 2 + assert data['meta']['total'] == 3 def test_query_builds_filter_submitted_before(self): rv = self.client.get( @@ -416,7 +416,7 @@ class TestViews: rv = self.client.get( '/module-build-service/1/module-builds/?submitted_after=2016-09-03T11:35:00Z') data = json.loads(rv.data) - assert data['meta']['total'] == 4 + assert data['meta']['total'] == 5 def test_query_builds_filter_modified_before(self): rv = self.client.get( @@ -428,7 +428,7 @@ class TestViews: rv = self.client.get( '/module-build-service/1/module-builds/?modified_after=2016-09-03T11:35:00Z') data = json.loads(rv.data) - assert data['meta']['total'] == 5 + assert data['meta']['total'] == 6 def test_query_builds_filter_owner(self): rv = self.client.get( @@ -508,7 +508,7 @@ class TestViews: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') - def test_submit_build(self, mocked_scm, mocked_get_user, pdc): + def test_submit_build(self, mocked_scm, mocked_get_user): FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', '620ec77321b2ea7b0d67d82992dda3e1d67055b4') @@ -528,10 +528,10 @@ class TestViews: assert data['time_completed'] is None assert data['stream'] == 'master' assert data['owner'] == 'Homer J. Simpson' - assert data['id'] == 7 + assert data['id'] == 8 assert data['rebuild_strategy'] == 'changed-and-after' assert data['state_name'] == 'init' - assert data['state_url'] == '/module-build-service/1/module-builds/7' + assert data['state_url'] == '/module-build-service/1/module-builds/8' assert len(data['state_trace']) == 1 assert data['state_trace'][0]['state'] == 0 assert data['tasks'] == {} @@ -618,7 +618,7 @@ class TestViews: assert data['time_completed'] is None assert data['stream'] == 'master' assert data['owner'] == 'Homer J. Simpson' - assert data['id'] == 7 + assert data['id'] == 8 assert data['state_name'] == 'init' assert data['rebuild_strategy'] == 'changed-and-after' @@ -683,7 +683,7 @@ class TestViews: @patch('module_build_service.auth.get_user', return_value=other_user) def test_cancel_build(self, mocked_get_user): - rv = self.client.patch('/module-build-service/1/module-builds/6', + rv = self.client.patch('/module-build-service/1/module-builds/7', data=json.dumps({'state': 'failed'})) data = json.loads(rv.data) @@ -692,11 +692,11 @@ class TestViews: @patch('module_build_service.auth.get_user', return_value=other_user) def test_cancel_build_already_failed(self, mocked_get_user): - module = ModuleBuild.query.filter_by(id=6).one() + module = ModuleBuild.query.filter_by(id=7).one() module.state = 4 db.session.add(module) db.session.commit() - rv = self.client.patch('/module-build-service/1/module-builds/6', + rv = self.client.patch('/module-build-service/1/module-builds/7', data=json.dumps({'state': 'failed'})) data = json.loads(rv.data) @@ -705,7 +705,7 @@ class TestViews: @patch('module_build_service.auth.get_user', return_value=('sammy', set())) def test_cancel_build_unauthorized_no_groups(self, mocked_get_user): - rv = self.client.patch('/module-build-service/1/module-builds/6', + rv = self.client.patch('/module-build-service/1/module-builds/7', data=json.dumps({'state': 'failed'})) data = json.loads(rv.data) @@ -714,7 +714,7 @@ class TestViews: @patch('module_build_service.auth.get_user', return_value=('sammy', set(["packager"]))) def test_cancel_build_unauthorized_not_owner(self, mocked_get_user): - rv = self.client.patch('/module-build-service/1/module-builds/6', + rv = self.client.patch('/module-build-service/1/module-builds/7', data=json.dumps({'state': 'failed'})) data = json.loads(rv.data) @@ -726,7 +726,7 @@ class TestViews: def test_cancel_build_admin(self, mocked_get_user): with patch("module_build_service.config.Config.admin_groups", new_callable=PropertyMock, return_value=set(["mbs-admin"])): - rv = self.client.patch('/module-build-service/1/module-builds/6', + rv = self.client.patch('/module-build-service/1/module-builds/7', data=json.dumps({'state': 'failed'})) data = json.loads(rv.data) @@ -738,7 +738,7 @@ class TestViews: def test_cancel_build_no_admin(self, mocked_get_user): with patch("module_build_service.config.Config.admin_groups", new_callable=PropertyMock, return_value=set(["mbs-admin"])): - rv = self.client.patch('/module-build-service/1/module-builds/6', + rv = self.client.patch('/module-build-service/1/module-builds/7', data=json.dumps({'state': 'failed'})) data = json.loads(rv.data) @@ -747,7 +747,7 @@ class TestViews: @patch('module_build_service.auth.get_user', return_value=other_user) def test_cancel_build_wrong_param(self, mocked_get_user): - rv = self.client.patch('/module-build-service/1/module-builds/6', + rv = self.client.patch('/module-build-service/1/module-builds/7', data=json.dumps({'some_param': 'value'})) data = json.loads(rv.data) @@ -757,7 +757,7 @@ class TestViews: @patch('module_build_service.auth.get_user', return_value=other_user) def test_cancel_build_wrong_state(self, mocked_get_user): - rv = self.client.patch('/module-build-service/1/module-builds/6', + rv = self.client.patch('/module-build-service/1/module-builds/7', data=json.dumps({'state': 'some_state'})) data = json.loads(rv.data) @@ -839,7 +839,7 @@ class TestViews: @patch('module_build_service.scm.SCM') @patch("module_build_service.config.Config.no_auth", new_callable=PropertyMock, return_value=True) - def test_submit_build_no_auth_set_owner(self, mocked_conf, mocked_scm, mocked_get_user, pdc): + def test_submit_build_no_auth_set_owner(self, mocked_conf, mocked_scm, mocked_get_user): FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', '620ec77321b2ea7b0d67d82992dda3e1d67055b4') @@ -858,7 +858,7 @@ class TestViews: @patch('module_build_service.auth.get_user', return_value=anonymous_user) @patch('module_build_service.scm.SCM') @patch("module_build_service.config.Config.no_auth", new_callable=PropertyMock) - def test_patch_set_different_owner(self, mocked_no_auth, mocked_scm, mocked_get_user, pdc): + def test_patch_set_different_owner(self, mocked_no_auth, mocked_scm, mocked_get_user): FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', '620ec77321b2ea7b0d67d82992dda3e1d67055b4') @@ -902,7 +902,7 @@ class TestViews: @patch('module_build_service.auth.get_user', return_value=user) @patch('module_build_service.scm.SCM') @patch("module_build_service.config.Config.allow_custom_scmurls", new_callable=PropertyMock) - def test_submit_custom_scmurl(self, allow_custom_scmurls, mocked_scm, mocked_get_user, pdc): + def test_submit_custom_scmurl(self, allow_custom_scmurls, mocked_scm, mocked_get_user): FakeSCM(mocked_scm, 'testmodule', 'testmodule.yaml', '620ec77321b2ea7b0d67d82992dda3e1d67055b4')