Make db_session singleton

Please note that this patch does not change the use of database session
in MBS. So, in the frontend, the database session is still managed by
Flask-SQLAlchemy, that is the db.session. And the backend, running event
handlers, has its own database session created from SQLAclehmy session
API directly.

This patch aims to reduce the number of scoped_session created when call
original function make_db_session. For technical detailed information,
please refer to SQLAlchemy documentation Contextual/Thread-local
Sessions.

As a result, a global scoped_session is accessible from the
code running inside backend, both the event handlers and functions
called from handlers. The library code shared by frontend and backend,
like resolvers, has no change.

Similarly, db.session is only used to recreate database for every test.

Signed-off-by: Chenxiong Qi <cqi@redhat.com>
This commit is contained in:
Chenxiong Qi
2019-11-01 21:29:18 +08:00
parent 090f4bb7dc
commit f24cd4222f
50 changed files with 1025 additions and 1078 deletions

View File

@@ -17,9 +17,10 @@ from module_build_service.utils import get_rpm_release, import_mmd, mmd_to_str
from module_build_service.config import init_config
from module_build_service.models import (
ModuleBuild, ModuleArch, ComponentBuild, VirtualStream,
make_db_session, BUILD_STATES,
BUILD_STATES,
)
from module_build_service import Modulemd
from module_build_service.db_session import db_session
base_dir = os.path.dirname(__file__)
@@ -101,10 +102,14 @@ def clean_database(add_platform_module=True, add_default_arches=True):
Please note that, this function relies on database objects managed by
Flask-SQLAlchemy.
"""
# Ensure all pending transactions are committed and do not block subsequent
# DML on tables.
# TODO: Should the code be fixed that forget to commit?
db.session.commit()
# Helpful for writing tests if any changes were made using the database
# session but the test didn't commit or rollback.
#
# clean_database is usually called before a test run. So, it makes no sense
# to keep any changes in the transaction made by previous test.
db_session.remove()
db.drop_all()
db.create_all()
@@ -130,6 +135,7 @@ def init_data(data_size=10, contexts=False, multiple_stream_versions=None, scrat
the generated base module streams.
"""
clean_database()
if multiple_stream_versions:
if multiple_stream_versions is True:
multiple_stream_versions = ["f28.0.0", "f29.0.0", "f29.1.0", "f29.2.0"]
@@ -147,11 +153,11 @@ def init_data(data_size=10, contexts=False, multiple_stream_versions=None, scrat
# Just to possibly confuse tests by adding another base module.
mmd = mmd.copy("bootstrap", stream)
import_mmd(db.session, mmd)
with make_db_session(conf) as db_session:
_populate_data(db_session, data_size, contexts=contexts, scratch=scratch)
_populate_data(data_size, contexts=contexts, scratch=scratch)
def _populate_data(db_session, data_size=10, contexts=False, scratch=False):
def _populate_data(data_size=10, contexts=False, scratch=False):
# Query arch from passed database session, otherwise there will be an error
# like "Object '<ModuleBuild at 0x7f4ccc805c50>' is already attached to
# session '275' (this is '276')" when add new module build object to passed
@@ -326,7 +332,7 @@ def _populate_data(db_session, data_size=10, contexts=False, scratch=False):
db_session.commit()
def scheduler_init_data(db_session, tangerine_state=None, scratch=False):
def scheduler_init_data(tangerine_state=None, scratch=False):
""" Creates a testmodule in the building state with all the components in the same batch
"""
clean_database()
@@ -435,7 +441,6 @@ def make_module(
base_module=None,
filtered_rpms=None,
xmd=None,
db_session=None,
store_to_db=False,
virtual_streams=None,
arches=None,
@@ -459,7 +464,6 @@ def make_module(
:param dict xmd: a mapping representing XMD section in module metadata. A
custom xmd could be passed for testing a particular scenario and some
default key/value pairs are added if not present.
:param db_session: SQLAlchemy database session.
:param bool store_to_db: whether to store created module metadata to the
database. If set to True, ``db_session`` is required.
:param virtual_streams: List of virtual streams provided by this module.
@@ -479,9 +483,9 @@ def make_module(
if base_module:
assert db_session is not None
if virtual_streams:
assert db_session and store_to_db
assert store_to_db
if arches:
assert db_session and store_to_db
assert store_to_db
name, stream, version, context = nsvc.split(":")
mmd = Modulemd.ModuleStreamV2.new(name, stream)

View File

@@ -8,8 +8,8 @@ from datetime import datetime
import module_build_service
from module_build_service import conf
from module_build_service.models import make_db_session, BUILD_STATES
from module_build_service.models import BUILD_STATES
from module_build_service.db_session import db_session
from module_build_service.utils.general import mmd_to_str, load_mmd, get_rpm_release
from tests import clean_database, read_staged_data, module_build_from_modulemd
@@ -48,13 +48,7 @@ def platform_mmd():
@pytest.fixture()
def db_session():
with make_db_session(conf) as db_session:
yield db_session
@pytest.fixture()
def model_tests_init_data(db_session):
def model_tests_init_data():
"""Initialize data for model tests
This is refactored from tests/test_models/__init__.py, which was able to be
@@ -78,7 +72,7 @@ def model_tests_init_data(db_session):
@pytest.fixture()
def reuse_component_init_data(db_session):
def reuse_component_init_data():
clean_database()
mmd = load_mmd(read_staged_data("formatted_testmodule"))
@@ -265,7 +259,7 @@ def reuse_component_init_data(db_session):
@pytest.fixture()
def reuse_shared_userspace_init_data(db_session):
def reuse_shared_userspace_init_data():
# Create shared-userspace-570, state is COMPLETE, all components
# are properly built.
scmurl = "https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453"

View File

@@ -16,6 +16,7 @@ import module_build_service.scheduler.handlers.repos
import module_build_service.utils
from module_build_service.errors import Forbidden
from module_build_service import models, conf, build_logs
from module_build_service.db_session import db_session
from module_build_service.scheduler import make_simple_stop_condition
from mock import patch, PropertyMock, Mock, MagicMock
@@ -337,10 +338,10 @@ def cleanup_moksha():
class BaseTestBuild:
def run_scheduler(self, db_session, msgs=None, stop_condition=None):
def run_scheduler(self, msgs=None, stop_condition=None):
module_build_service.scheduler.main(
msgs or [],
stop_condition or make_simple_stop_condition(db_session)
stop_condition or make_simple_stop_condition()
)
@@ -413,8 +414,7 @@ class TestBuild(BaseTestBuild):
cleanup_moksha()
for i in range(20):
try:
with models.make_db_session(conf) as db_session:
os.remove(build_logs.path(db_session, i))
os.remove(build_logs.path(db_session, i))
except Exception:
pass
@@ -422,7 +422,7 @@ class TestBuild(BaseTestBuild):
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build(
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, mmd_version, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, mmd_version
):
"""
Tests the build of testmodule.yaml using FakeModuleBuilder which
@@ -474,7 +474,7 @@ class TestBuild(BaseTestBuild):
FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb
self.run_scheduler(db_session)
self.run_scheduler()
# All components should be built and module itself should be in "done"
# or "ready" state.
@@ -501,7 +501,7 @@ class TestBuild(BaseTestBuild):
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_buildonly(
self, mocked_scm, mocked_get_user, mocked_get_session, conf_system, dbg, hmsc, db_session
self, mocked_scm, mocked_get_user, mocked_get_session, conf_system, dbg, hmsc
):
def list_build_rpms(nvr):
assert nvr == "perl-List-Compare-1-1"
@@ -553,7 +553,7 @@ class TestBuild(BaseTestBuild):
FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb
self.run_scheduler(db_session)
self.run_scheduler()
# All components should be built and module itself should be in "done"
# or "ready" state.
@@ -569,7 +569,7 @@ class TestBuild(BaseTestBuild):
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_no_components(
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, gating_result, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, gating_result
):
"""
Tests the build of a module with no components
@@ -581,6 +581,7 @@ class TestBuild(BaseTestBuild):
"python3-no-components.yaml",
"620ec77321b2ea7b0d67d82992dda3e1d67055b4",
)
rv = self.client.post(
"/module-build-service/1/module-builds/",
data=json.dumps({
@@ -593,9 +594,10 @@ class TestBuild(BaseTestBuild):
data = json.loads(rv.data)
module_build_id = data["id"]
self.run_scheduler(db_session)
self.run_scheduler()
module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)
# Make sure no component builds were registered
assert len(module_build.component_builds) == 0
# Make sure the build is done
@@ -665,7 +667,7 @@ class TestBuild(BaseTestBuild):
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_from_yaml_allowed(
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc
):
FakeSCM(
mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")
@@ -687,7 +689,7 @@ class TestBuild(BaseTestBuild):
module_build_id = data["id"]
assert module_build_id == 2
self.run_scheduler(db_session)
self.run_scheduler()
module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)
assert module_build.state == models.BUILD_STATES["ready"]
@@ -695,7 +697,7 @@ class TestBuild(BaseTestBuild):
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_cancel(
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc
):
"""
Submit all builds for a module and cancel the module build later.
@@ -739,7 +741,7 @@ class TestBuild(BaseTestBuild):
FakeModuleBuilder.on_cancel_cb = on_cancel_cb
FakeModuleBuilder.on_finalize_cb = on_finalize_cb
self.run_scheduler(db_session)
self.run_scheduler()
# Because we did not finished single component build and canceled the
# module build, all components and even the module itself should be in
@@ -757,7 +759,7 @@ class TestBuild(BaseTestBuild):
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_instant_complete(
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc
):
"""
Tests the build of testmodule.yaml using FakeModuleBuilder which
@@ -779,7 +781,7 @@ class TestBuild(BaseTestBuild):
module_build_id = data["id"]
FakeModuleBuilder.INSTANT_COMPLETE = True
self.run_scheduler(db_session)
self.run_scheduler()
# All components should be built and module itself should be in "done"
# or "ready" state.
@@ -799,7 +801,7 @@ class TestBuild(BaseTestBuild):
)
def test_submit_build_concurrent_threshold(
self, conf_num_concurrent_builds, mocked_scm, mocked_get_user,
conf_system, dbg, hmsc, db_session
conf_system, dbg, hmsc
):
"""
Tests the build of testmodule.yaml using FakeModuleBuilder with
@@ -825,16 +827,18 @@ class TestBuild(BaseTestBuild):
Stop the scheduler when the module is built or when we try to build
more components than the num_concurrent_builds.
"""
main_stop = make_simple_stop_condition(db_session)
main_stop = make_simple_stop_condition()
build_count = (
db_session.query(models.ComponentBuild).filter_by(
state=koji.BUILD_STATES["BUILDING"]
).count()
)
over_threshold = conf.num_concurrent_builds < build_count
return main_stop(message) or over_threshold
result = main_stop(message) or over_threshold
db_session.remove()
return result
self.run_scheduler(db_session, stop_condition=stop)
self.run_scheduler(stop_condition=stop)
# All components should be built and module itself should be in "done"
# or "ready" state.
@@ -856,7 +860,7 @@ class TestBuild(BaseTestBuild):
)
def test_try_to_reach_concurrent_threshold(
self, conf_num_concurrent_builds, mocked_scm, mocked_get_user,
conf_system, dbg, hmsc, db_session
conf_system, dbg, hmsc
):
"""
Tests that we try to submit new component build right after
@@ -887,7 +891,7 @@ class TestBuild(BaseTestBuild):
Stop the scheduler when the module is built or when we try to build
more components than the num_concurrent_builds.
"""
main_stop = module_build_service.scheduler.make_simple_stop_condition(db_session)
main_stop = module_build_service.scheduler.make_simple_stop_condition()
num_building = (
db_session.query(models.ComponentBuild)
.filter_by(state=koji.BUILD_STATES["BUILDING"])
@@ -895,9 +899,11 @@ class TestBuild(BaseTestBuild):
)
over_threshold = conf.num_concurrent_builds < num_building
TestBuild._global_var.append(num_building)
return main_stop(message) or over_threshold
result = main_stop(message) or over_threshold
db_session.remove()
return result
self.run_scheduler(db_session, stop_condition=stop)
self.run_scheduler(stop_condition=stop)
# _global_var looks similar to this: [0, 1, 0, 0, 2, 2, 1, 0, 0, 0]
# It shows the number of concurrent builds in the time. At first we
@@ -921,7 +927,7 @@ class TestBuild(BaseTestBuild):
)
def test_build_in_batch_fails(
self, conf_num_concurrent_builds, mocked_scm, mocked_get_user,
conf_system, dbg, hmsc, db_session
conf_system, dbg, hmsc
):
"""
Tests that if the build in batch fails, other components in a batch
@@ -959,7 +965,7 @@ class TestBuild(BaseTestBuild):
FakeModuleBuilder.on_tag_artifacts_cb = on_tag_artifacts_cb
self.run_scheduler(db_session)
self.run_scheduler()
for c in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:
# perl-Tangerine is expected to fail as configured in on_build_cb.
@@ -990,7 +996,7 @@ class TestBuild(BaseTestBuild):
)
def test_all_builds_in_batch_fail(
self, conf_num_concurrent_builds, mocked_scm, mocked_get_user,
conf_system, dbg, hmsc, db_session
conf_system, dbg, hmsc
):
"""
Tests that if the build in batch fails, other components in a batch
@@ -1019,7 +1025,7 @@ class TestBuild(BaseTestBuild):
FakeModuleBuilder.on_build_cb = on_build_cb
self.run_scheduler(db_session)
self.run_scheduler()
for c in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:
# perl-Tangerine is expected to fail as configured in on_build_cb.
@@ -1044,7 +1050,7 @@ class TestBuild(BaseTestBuild):
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_reuse_all(
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc
):
"""
Tests that we do not try building module-build-macros when reusing all
@@ -1083,9 +1089,10 @@ class TestBuild(BaseTestBuild):
FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb
from module_build_service.db_session import db_session
# Create a dedicated database session for scheduler to avoid hang
with models.make_db_session(conf) as scheduler_db_session:
self.run_scheduler(scheduler_db_session, msgs=[MBSModule("local module build", 3, 1)])
self.run_scheduler(msgs=[MBSModule("local module build", 3, 1)])
reused_component_ids = {
"module-build-macros": None,
@@ -1108,7 +1115,7 @@ class TestBuild(BaseTestBuild):
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_reuse_all_without_build_macros(
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc
):
"""
Tests that we can reuse components even when the reused module does
@@ -1116,6 +1123,8 @@ class TestBuild(BaseTestBuild):
"""
# Firstly, remove all existing module-build-macros component builds
from module_build_service.db_session import db_session
macros_cb_query = db_session.query(models.ComponentBuild).filter_by(
package="module-build-macros")
db_session.query(models.ComponentBuildTrace).filter(
@@ -1162,8 +1171,7 @@ class TestBuild(BaseTestBuild):
FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb
with models.make_db_session(conf) as scheduler_db_session:
self.run_scheduler(scheduler_db_session, msgs=[MBSModule("local module build", 3, 1)])
self.run_scheduler(msgs=[MBSModule("local module build", 3, 1)])
# All components should be built and module itself should be in "done"
# or "ready" state.
@@ -1178,7 +1186,7 @@ class TestBuild(BaseTestBuild):
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_resume(
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc
):
"""
Tests that resuming the build works even when previous batches
@@ -1299,8 +1307,7 @@ class TestBuild(BaseTestBuild):
db_session.expire_all()
# Run the backend
with models.make_db_session(conf) as scheduler_db_session:
self.run_scheduler(scheduler_db_session)
self.run_scheduler()
# All components should be built and module itself should be in "done"
# or "ready" state.
@@ -1314,7 +1321,7 @@ class TestBuild(BaseTestBuild):
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_resume_recover_orphaned_macros(
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc
):
"""
Tests that resuming the build works when module-build-macros is orphaned but marked as
@@ -1421,8 +1428,7 @@ class TestBuild(BaseTestBuild):
db_session.expire_all()
# Run the backend
with models.make_db_session(conf) as scheduler_db_session:
self.run_scheduler(scheduler_db_session)
self.run_scheduler()
# All components should be built and module itself should be in "done"
# or "ready" state.
@@ -1436,7 +1442,7 @@ class TestBuild(BaseTestBuild):
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_resume_failed_init(
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc
):
"""
Tests that resuming the build works when the build failed during the init step
@@ -1455,11 +1461,12 @@ class TestBuild(BaseTestBuild):
}),
)
# Run the backend so that it fails in the "init" handler
self.run_scheduler(db_session)
self.run_scheduler()
cleanup_moksha()
module_build_id = json.loads(rv.data)["id"]
module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)
db_session.refresh(module_build)
assert module_build.state == models.BUILD_STATES["failed"]
assert module_build.state_reason == "Custom component repositories aren't allowed."
assert len(module_build.module_builds_trace) == 2
@@ -1478,7 +1485,7 @@ class TestBuild(BaseTestBuild):
}),
)
module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()
db_session.refresh(module_build)
components = (
db_session.query(models.ComponentBuild)
.filter_by(module_id=module_build_id, batch=2)
@@ -1490,14 +1497,15 @@ class TestBuild(BaseTestBuild):
assert module_build.state_reason == "Resubmitted by Homer J. Simpson"
# Make sure there are no components
assert components == []
db_session.expire_all()
# Run the backend again
self.run_scheduler(db_session)
self.run_scheduler()
# All components should be built and module itself should be in "done"
# or "ready" state.
for build in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:
db_session.refresh(module_build)
# for build in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:
for build in module_build.component_builds:
assert build.state == koji.BUILD_STATES["COMPLETE"]
assert build.module_build.state in [
models.BUILD_STATES["done"],
@@ -1507,7 +1515,7 @@ class TestBuild(BaseTestBuild):
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_resume_init_fail(
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc
):
"""
Tests that resuming the build fails when the build is in init state
@@ -1525,7 +1533,7 @@ class TestBuild(BaseTestBuild):
)
assert rv.status_code == 201
# Run the backend
self.run_scheduler(db_session)
self.run_scheduler()
# Post again and make sure it fails
rv2 = self.client.post(
"/module-build-service/1/module-builds/",
@@ -1554,7 +1562,7 @@ class TestBuild(BaseTestBuild):
return_value=True,
)
def test_submit_scratch_vs_normal(
self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc
):
"""
Tests that submitting a scratch build with the same NSV as a previously
@@ -1574,18 +1582,18 @@ class TestBuild(BaseTestBuild):
assert rv.status_code == 201
data = json.loads(rv.data)
module_build_id = data["id"]
module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()
module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)
# make sure normal build has expected context without a suffix
assert module_build.context == "9c690d0e"
# Run the backend
self.run_scheduler(db_session)
self.run_scheduler()
# Post again as a scratch build and make sure it succeeds
post_data["scratch"] = True
rv2 = self.client.post(post_url, data=json.dumps(post_data))
assert rv2.status_code == 201
data = json.loads(rv2.data)
module_build_id = data["id"]
module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()
module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)
# make sure scratch build has expected context with unique suffix
assert module_build.context == "9c690d0e_1"
@@ -1597,7 +1605,7 @@ class TestBuild(BaseTestBuild):
return_value=True,
)
def test_submit_normal_vs_scratch(
self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc
):
"""
Tests that submitting a normal build with the same NSV as a previously
@@ -1618,18 +1626,18 @@ class TestBuild(BaseTestBuild):
assert rv.status_code == 201
data = json.loads(rv.data)
module_build_id = data["id"]
module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()
module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)
# make sure scratch build has expected context with unique suffix
assert module_build.context == "9c690d0e_1"
# Run the backend
self.run_scheduler(db_session)
self.run_scheduler()
# Post again as a non-scratch build and make sure it succeeds
post_data["scratch"] = False
rv2 = self.client.post(post_url, data=json.dumps(post_data))
assert rv2.status_code == 201
data = json.loads(rv2.data)
module_build_id = data["id"]
module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()
module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)
# make sure normal build has expected context without suffix
assert module_build.context == "9c690d0e"
@@ -1641,7 +1649,7 @@ class TestBuild(BaseTestBuild):
return_value=True,
)
def test_submit_scratch_vs_scratch(
self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
self, mocked_allow_scratch, mocked_scm, mocked_get_user, conf_system, dbg, hmsc
):
"""
Tests that submitting a scratch build with the same NSV as a previously
@@ -1661,24 +1669,24 @@ class TestBuild(BaseTestBuild):
assert rv.status_code == 201
data = json.loads(rv.data)
module_build_id = data["id"]
module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()
module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)
# make sure first scratch build has expected context with unique suffix
assert module_build.context == "9c690d0e_1"
# Run the backend
self.run_scheduler(db_session)
self.run_scheduler()
# Post scratch build again and make sure it succeeds
rv2 = self.client.post(post_url, data=json.dumps(post_data))
assert rv2.status_code == 201
data = json.loads(rv2.data)
module_build_id = data["id"]
module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()
module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)
# make sure second scratch build has expected context with unique suffix
assert module_build.context == "9c690d0e_2"
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_repo_regen_not_started_batch(
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc
):
"""
Tests that if MBS starts a new batch, the concurrent component threshold is met before a
@@ -1704,18 +1712,21 @@ class TestBuild(BaseTestBuild):
def _stop_condition(message):
# Stop the backend if the module batch is 2 (where we simulate the concurrent threshold
# being met). For safety, also stop the backend if the module erroneously completes.
module = db_session.query(models.ModuleBuild).get(module_build_id)
return module.batch == 2 or module.state >= models.BUILD_STATES["done"]
module = models.ModuleBuild.get_by_id(db_session, module_build_id)
result = module.batch == 2 or module.state >= models.BUILD_STATES["done"]
if result:
db_session.remove()
return result
with patch(
"module_build_service.utils.batches.at_concurrent_component_threshold"
) as mock_acct:
# Once we get to batch 2, then simulate the concurrent threshold being met
def _at_concurrent_component_threshold(config, session):
return session.query(models.ModuleBuild).get(module_build_id).batch == 2
def _at_concurrent_component_threshold(config):
return models.ModuleBuild.get_by_id(db_session, module_build_id).batch == 2
mock_acct.side_effect = _at_concurrent_component_threshold
self.run_scheduler(db_session, stop_condition=_stop_condition)
self.run_scheduler(stop_condition=_stop_condition)
# Only module-build-macros should be built
for build in (
@@ -1737,7 +1748,13 @@ class TestBuild(BaseTestBuild):
]
db_session.expire_all()
# Stop after processing the seeded message
self.run_scheduler(db_session, msgs, lambda message: True)
def stop(message):
db_session.remove()
return True
self.run_scheduler(msgs, stop_condition=stop)
# Make sure the module build didn't fail so that the poller can resume it later
module = models.ModuleBuild.get_by_id(db_session, module_build_id)
assert module.state == models.BUILD_STATES["build"]
@@ -1745,7 +1762,7 @@ class TestBuild(BaseTestBuild):
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_br_metadata_only_module(
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc
):
"""
Test that when a build is submitted with a buildrequire without a Koji tag,
@@ -1779,7 +1796,7 @@ class TestBuild(BaseTestBuild):
assert set(dependencies.keys()) == {"module-f28-build"}
FakeModuleBuilder.on_buildroot_add_repos_cb = on_buildroot_add_repos_cb
self.run_scheduler(db_session)
self.run_scheduler()
module = models.ModuleBuild.get_by_id(db_session, module_build_id)
assert module.state == models.BUILD_STATES["ready"]
@@ -1801,8 +1818,7 @@ class TestLocalBuild(BaseTestBuild):
cleanup_moksha()
for i in range(20):
try:
with models.make_db_session(conf) as db_session:
os.remove(build_logs.path(db_session, i))
os.remove(build_logs.path(db_session, i))
except Exception:
pass
@@ -1815,12 +1831,12 @@ class TestLocalBuild(BaseTestBuild):
return_value=staged_data_filename('local_builds'),
)
def test_submit_build_local_dependency(
self, resultsdir, mocked_scm, mocked_get_user, conf_system, hmsc, db_session
self, resultsdir, mocked_scm, mocked_get_user, conf_system, hmsc
):
"""
Tests local module build dependency.
"""
module_build_service.utils.load_local_builds(db_session, ["platform"])
module_build_service.utils.load_local_builds(["platform"])
FakeSCM(
mocked_scm,
"testmodule",
@@ -1844,7 +1860,7 @@ class TestLocalBuild(BaseTestBuild):
# the local one and not the main one.
FakeModuleBuilder.DEFAULT_GROUPS = {"srpm-build": {"bar"}, "build": {"foo"}}
self.run_scheduler(db_session)
self.run_scheduler()
# All components should be built and module itself should be in "done"
# or "ready" state.

View File

@@ -8,6 +8,7 @@ import module_build_service.resolver
from tests import init_data
from module_build_service.db_session import db_session
from module_build_service.builder import GenericBuilder
from mock import patch
@@ -18,7 +19,7 @@ class TestGenericBuilder:
@patch("module_build_service.resolver.DBResolver")
@patch("module_build_service.builder.base.GenericResolver")
def test_default_buildroot_groups_cache(self, generic_resolver, resolver, db_session):
def test_default_buildroot_groups_cache(self, generic_resolver, resolver):
mbs_groups = {"buildroot": [], "srpm-buildroot": []}
resolver = mock.MagicMock()

View File

@@ -15,6 +15,7 @@ import module_build_service.scheduler.handlers.repos
import module_build_service.models
import module_build_service.builder
from module_build_service import Modulemd
from module_build_service.db_session import db_session
from module_build_service.utils.general import mmd_to_str
import pytest
@@ -116,7 +117,7 @@ class TestKojiBuilder:
"/module-base-runtime-0.25-9/latest/x86_64"
)
def test_recover_orphaned_artifact_when_tagged(self, db_session):
def test_recover_orphaned_artifact_when_tagged(self):
""" Test recover_orphaned_artifact when the artifact is found and tagged in both tags
"""
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
@@ -144,6 +145,10 @@ class TestKojiBuilder:
component_build.nvr = None
actual = builder.recover_orphaned_artifact(component_build)
# recover_orphaned_artifact modifies a component build, but doesn't
# commit the changes.
db_session.commit()
assert len(actual) == 3
assert type(actual[0]) == module_build_service.messaging.KojiBuildChange
assert actual[0].build_id == 91
@@ -164,7 +169,7 @@ class TestKojiBuilder:
assert component_build.state_reason == "Found existing build"
assert builder.koji_session.tagBuild.call_count == 0
def test_recover_orphaned_artifact_when_untagged(self, db_session):
def test_recover_orphaned_artifact_when_untagged(self):
""" Tests recover_orphaned_artifact when the build is found but untagged
"""
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
@@ -194,8 +199,11 @@ class TestKojiBuilder:
component_build.task_id = None
component_build.nvr = None
component_build.state = None
db_session.commit()
actual = builder.recover_orphaned_artifact(component_build)
db_session.commit()
assert len(actual) == 1
assert type(actual[0]) == module_build_service.messaging.KojiBuildChange
assert actual[0].build_id == 91
@@ -210,7 +218,7 @@ class TestKojiBuilder:
assert component_build.state_reason == "Found existing build"
builder.koji_session.tagBuild.assert_called_once_with(2, "foo-1.0-1.{0}".format(dist_tag))
def test_recover_orphaned_artifact_when_module_build_macros_untagged(self, db_session):
def test_recover_orphaned_artifact_when_module_build_macros_untagged(self):
""" Tests recover_orphaned_artifact when module-build-macros is found but untagged
"""
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
@@ -245,8 +253,11 @@ class TestKojiBuilder:
component_build.task_id = None
component_build.nvr = None
component_build.state = None
db_session.commit()
actual = builder.recover_orphaned_artifact(component_build)
db_session.commit()
assert len(actual) == 1
assert type(actual[0]) == module_build_service.messaging.KojiBuildChange
assert actual[0].build_id == 91
@@ -267,7 +278,7 @@ class TestKojiBuilder:
[mock.call(2, "srpm-build", "module-build-macros"),
mock.call(2, "build", "module-build-macros")])
def test_recover_orphaned_artifact_when_nothing_exists(self, db_session):
def test_recover_orphaned_artifact_when_nothing_exists(self):
""" Test recover_orphaned_artifact when the build is not found
"""
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
@@ -294,14 +305,17 @@ class TestKojiBuilder:
component_build.task_id = None
component_build.nvr = None
component_build.state = None
db_session.commit()
actual = builder.recover_orphaned_artifact(component_build)
db_session.commit()
assert actual == []
# Make sure nothing erroneous gets tag
assert builder.koji_session.tagBuild.call_count == 0
@patch("koji.util")
def test_buildroot_ready(self, mocked_kojiutil, db_session):
def test_buildroot_ready(self, mocked_kojiutil):
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
attrs = {"checkForBuilds.return_value": None, "checkForBuilds.side_effect": IOError}
@@ -321,7 +335,7 @@ class TestKojiBuilder:
assert mocked_kojiutil.checkForBuilds.call_count == 3
@pytest.mark.parametrize("blocklist", [False, True])
def test_tagging_already_tagged_artifacts(self, blocklist, db_session):
def test_tagging_already_tagged_artifacts(self, blocklist):
"""
Tests that buildroot_add_artifacts and tag_artifacts do not try to
tag already tagged artifacts
@@ -380,7 +394,7 @@ class TestKojiBuilder:
@patch.object(FakeKojiModuleBuilder, "get_session")
@patch.object(FakeKojiModuleBuilder, "_get_tagged_nvrs")
def test_untagged_artifacts(self, mock_get_tagged_nvrs, mock_get_session, db_session):
def test_untagged_artifacts(self, mock_get_tagged_nvrs, mock_get_session):
"""
Tests that only tagged artifacts will be untagged
"""
@@ -526,9 +540,10 @@ class TestKojiBuilder:
@pytest.mark.parametrize("custom_whitelist", [False, True])
@pytest.mark.parametrize("repo_include_all", [False, True])
def test_buildroot_connect(
self, custom_whitelist, blocklist, repo_include_all, db_session
self, custom_whitelist, blocklist, repo_include_all
):
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
db_session.refresh(module_build)
if blocklist:
mmd = module_build.mmd()
@@ -566,6 +581,7 @@ class TestKojiBuilder:
db_session.commit()
module_build.arches.append(module_build_service.models.ModuleArch(name="i686"))
db_session.commit()
builder = FakeKojiModuleBuilder(
db_session=db_session,
@@ -613,7 +629,7 @@ class TestKojiBuilder:
expected_calls = []
assert session.packageListBlock.mock_calls == expected_calls
expected_arches = "x86_64 i686"
expected_arches = "i686 x86_64"
expected_calls = [
mock.call(
@@ -638,8 +654,9 @@ class TestKojiBuilder:
assert session.editTag2.mock_calls == expected_calls
@pytest.mark.parametrize("blocklist", [False, True])
def test_buildroot_connect_create_tag(self, blocklist, db_session):
def test_buildroot_connect_create_tag(self, blocklist):
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
db_session.refresh(module_build)
if blocklist:
mmd = module_build.mmd()
@@ -675,7 +692,7 @@ class TestKojiBuilder:
assert session.packageListBlock.mock_calls == expected_calls
@pytest.mark.parametrize("scratch", [False, True])
def test_buildroot_connect_create_target(self, scratch, db_session):
def test_buildroot_connect_create_target(self, scratch):
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
if scratch:
@@ -710,7 +727,7 @@ class TestKojiBuilder:
assert session.createBuildTarget.mock_calls == expected_calls
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_built_rpms_in_module_build(self, ClientSession, db_session):
def test_get_built_rpms_in_module_build(self, ClientSession):
session = ClientSession.return_value
session.listTaggedRPMS.return_value = (
[
@@ -802,7 +819,7 @@ class TestKojiBuilder:
)
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_filtered_rpms_on_self_dep(
self, ClientSession, br_filtered_rpms, expected, db_session
self, ClientSession, br_filtered_rpms, expected
):
session = ClientSession.return_value
session.listTaggedRPMS.return_value = (
@@ -857,9 +874,12 @@ class TestKojiBuilder:
"cg_enabled,cg_devel_enabled", [(False, False), (True, False), (True, True)]
)
@mock.patch("module_build_service.builder.KojiModuleBuilder.KojiContentGenerator")
def test_finalize(self, mock_koji_cg_cls, cg_enabled, cg_devel_enabled, db_session):
def test_finalize(self, mock_koji_cg_cls, cg_enabled, cg_devel_enabled):
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
db_session.refresh(module_build)
module_build.state = 2
db_session.commit()
with patch(
"module_build_service.config.Config.koji_enable_content_generator",
new_callable=mock.PropertyMock,
@@ -899,14 +919,14 @@ class TestKojiBuilder:
@patch.dict("sys.modules", krbV=MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_ensure_builder_use_a_logged_in_koji_session(self, ClientSession, db_session):
def test_ensure_builder_use_a_logged_in_koji_session(self, ClientSession):
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
builder = KojiModuleBuilder(db_session, "owner", module_build, conf, "module-tag", [])
builder.koji_session.krb_login.assert_called_once()
@patch.dict("sys.modules", krbV=MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_module_build_arches(self, ClientSession, db_session):
def test_get_module_build_arches(self, ClientSession):
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
arches = "x86_64 i686 ppc64le aarch64 s390x"
session = ClientSession.return_value
@@ -961,29 +981,28 @@ class TestGetDistTagSRPM:
@patch("tempfile.mkdtemp")
@patch("module_build_service.builder.KojiModuleBuilder.execute_cmd")
def _build_srpm(self, db_session, execute_cmd, mkdtemp):
def _build_srpm(self, execute_cmd, mkdtemp):
module_build = make_module_in_db(
"{name}:{stream}:{version}:{context}".format(**self.module_nsvc),
xmd=self.xmd,
db_session=db_session)
xmd=self.xmd)
mkdtemp.return_value = self.tmp_srpm_build_dir
return KojiModuleBuilder.get_disttag_srpm("disttag", module_build)
def test_return_srpm_file(self, db_session):
srpm_file = self._build_srpm(db_session)
def test_return_srpm_file(self):
srpm_file = self._build_srpm()
assert self.expected_srpm_file == srpm_file
def test_filtered_rpms_are_added(self, db_session):
self._build_srpm(db_session)
def test_filtered_rpms_are_added(self):
self._build_srpm()
with open(self.spec_file, "r") as f:
content = f.read()
for nevr in ["baz-devel-0:0.1-6.fc28", "baz-doc-0:0.1-6.fc28"]:
assert KojiModuleBuilder.format_conflicts_line(nevr) + "\n" in content
def test_ursine_rpms_are_added(self, db_session):
self._build_srpm(db_session)
def test_ursine_rpms_are_added(self):
self._build_srpm()
with open(self.spec_file, "r") as f:
content = f.read()

View File

@@ -10,6 +10,7 @@ from textwrap import dedent
import kobo.rpmlib
from module_build_service import conf
from module_build_service.db_session import db_session
from module_build_service.models import ModuleBuild, ComponentBuild
from module_build_service.builder.MockModuleBuilder import MockModuleBuilder
from module_build_service.utils import import_fake_base_module, mmd_to_str, load_mmd
@@ -113,7 +114,7 @@ class TestMockModuleBuilder:
return module
@mock.patch("module_build_service.conf.system", new="mock")
def test_createrepo_filter_last_batch(self, db_session):
def test_createrepo_filter_last_batch(self):
module = self._create_module_with_filters(db_session, 3, koji.BUILD_STATES["COMPLETE"])
builder = MockModuleBuilder(
@@ -140,7 +141,7 @@ class TestMockModuleBuilder:
assert "ed" not in rpm_names
@mock.patch("module_build_service.conf.system", new="mock")
def test_createrepo_not_last_batch(self, db_session):
def test_createrepo_not_last_batch(self):
module = self._create_module_with_filters(db_session, 2, koji.BUILD_STATES["COMPLETE"])
builder = MockModuleBuilder(
@@ -165,7 +166,7 @@ class TestMockModuleBuilder:
assert "ed" in rpm_names
@mock.patch("module_build_service.conf.system", new="mock")
def test_createrepo_empty_rmp_list(self, db_session):
def test_createrepo_empty_rmp_list(self):
module = self._create_module_with_filters(db_session, 3, koji.BUILD_STATES["COMPLETE"])
builder = MockModuleBuilder(
@@ -199,17 +200,17 @@ class TestMockModuleBuilderAddRepos:
"module_build_service.builder.MockModuleBuilder.MockModuleBuilder._write_mock_config"
)
def test_buildroot_add_repos(
self, write_config, load_config, patched_open, base_module_repofiles, db_session
self, write_config, load_config, patched_open, base_module_repofiles
):
import_fake_base_module(db_session, "platform:f29:1:000000")
import_fake_base_module("platform:f29:1:000000")
platform = ModuleBuild.get_last_build_in_stream(db_session, "platform", "f29")
module_deps = [{
"requires": {"platform": ["f29"]},
"buildrequires": {"platform": ["f29"]},
}]
foo = make_module_in_db("foo:1:1:1", module_deps, db_session=db_session)
app = make_module_in_db("app:1:1:1", module_deps, db_session=db_session)
foo = make_module_in_db("foo:1:1:1", module_deps)
app = make_module_in_db("app:1:1:1", module_deps)
patched_open.side_effect = [
mock.mock_open(read_data="[fake]\nrepofile 1\n").return_value,

View File

@@ -10,6 +10,7 @@ from os import path
import module_build_service.messaging
import module_build_service.scheduler.handlers.repos # noqa
from module_build_service import models, conf, build_logs, Modulemd
from module_build_service.db_session import db_session
from module_build_service.utils.general import mmd_to_str
from mock import patch, Mock, call, mock_open
@@ -34,7 +35,7 @@ GET_USER_RV = {
class TestBuild:
def setup_method(self, test_method):
init_data(1, contexts=True)
module = models.ModuleBuild.query.filter_by(id=2).one()
module = models.ModuleBuild.get_by_id(db_session, 2)
module.cg_build_koji_tag = "f27-module-candidate"
self.cg = KojiContentGenerator(module, conf)
@@ -50,8 +51,7 @@ class TestBuild:
# Ensure that there is no build log from other tests
try:
with models.make_db_session(conf) as db_session:
file_path = build_logs.path(db_session, self.cg.module)
file_path = build_logs.path(db_session, self.cg.module)
os.remove(file_path)
except OSError:
pass
@@ -68,8 +68,7 @@ class TestBuild:
import moksha.hub.reactor # noqa
try:
with models.make_db_session(conf) as db_session:
file_path = build_logs.path(db_session, self.cg.module)
file_path = build_logs.path(db_session, self.cg.module)
os.remove(file_path)
except OSError:
pass
@@ -116,8 +115,7 @@ class TestBuild:
expected_output = json.load(expected_output_file)
# create the build.log
with models.make_db_session(conf) as db_session:
build_logs.start(db_session, self.cg.module)
build_logs.start(db_session, self.cg.module)
build_logs.stop(self.cg.module)
self.cg.devel = devel

View File

@@ -6,6 +6,7 @@ import shutil
import tempfile
from module_build_service import log, models
from module_build_service.db_session import db_session
from module_build_service.logger import ModuleBuildLogs
from module_build_service.scheduler.consumer import MBSConsumer
from tests import init_data
@@ -39,7 +40,7 @@ class TestLogger:
MBSConsumer.current_module_build_id = None
shutil.rmtree(self.base)
def test_module_build_logs(self, db_session):
def test_module_build_logs(self):
"""
Tests that ModuleBuildLogs is logging properly to build log file.
"""
@@ -98,7 +99,7 @@ class TestLogger:
data = f.read()
assert data.find("ignore this test msg3") == -1
def test_module_build_logs_name_format(self, db_session):
def test_module_build_logs_name_format(self):
build = models.ModuleBuild.get_by_id(db_session, 2)
log1 = ModuleBuildLogs("/some/path", "build-{id}.log")

View File

@@ -4,6 +4,7 @@ import pytest
from mock import patch
from module_build_service import app, models
from module_build_service.db_session import db_session
from module_build_service.manage import manager_wrapper, retire
from module_build_service.models import BUILD_STATES, ModuleBuild
from module_build_service.utils.general import deps_to_dict
@@ -44,7 +45,7 @@ class TestMBSManage:
),
)
@patch("module_build_service.manage.prompt_bool")
def test_retire_build(self, prompt_bool, overrides, identifier, changed_count, db_session):
def test_retire_build(self, prompt_bool, overrides, identifier, changed_count):
prompt_bool.return_value = True
module_builds = db_session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all()
@@ -83,7 +84,7 @@ class TestMBSManage:
)
@patch("module_build_service.manage.prompt_bool")
def test_retire_build_confirm_prompt(
self, prompt_bool, confirm_prompt, confirm_arg, confirm_expected, db_session
self, prompt_bool, confirm_prompt, confirm_arg, confirm_expected
):
prompt_bool.return_value = confirm_prompt
@@ -149,7 +150,7 @@ class TestCommandBuildModuleLocally:
app.config["SQLALCHEMY_DATABASE_URI"] = original_db_uri
@patch("module_build_service.scheduler.main")
def test_set_stream(self, main, db_session):
def test_set_stream(self, main):
cli_cmd = [
"mbs-manager", "build_module_locally",
"--set-stream", "platform:f28",
@@ -193,7 +194,7 @@ class TestCommandBuildModuleLocally:
args, _ = logging.error.call_args_list[1]
assert "Use '-s module_name:module_stream' to choose the stream" == args[0]
def test_module_build_failed(self, db_session):
def test_module_build_failed(self):
cli_cmd = [
"mbs-manager", "build_module_locally",
"--set-stream", "platform:f28",

View File

@@ -4,6 +4,7 @@ import pytest
from mock import patch
from module_build_service import conf
from module_build_service.db_session import db_session
from module_build_service.models import ComponentBuild, ComponentBuildTrace, ModuleBuild
from module_build_service.utils.general import mmd_to_str, load_mmd
from tests import init_data as init_data_contexts, clean_database, read_staged_data
@@ -13,7 +14,7 @@ from tests import make_module_in_db, module_build_from_modulemd
@pytest.mark.usefixtures("model_tests_init_data")
class TestModels:
def test_app_sqlalchemy_events(self, db_session):
def test_app_sqlalchemy_events(self):
component_build = ComponentBuild(
package="before_models_committed",
scmurl="git://pkgs.domain.local/rpms/before_models_committed?"
@@ -40,7 +41,7 @@ class TestModels:
assert component_builds_trace.state_reason is None
assert component_builds_trace.task_id == 999999999
def test_context_functions(self, db_session):
def test_context_functions(self):
""" Test that the build_context, runtime_context, and context hashes are correctly
determined"""
db_session.commit()
@@ -55,7 +56,7 @@ class TestModels:
assert build.context == "3ee22b28"
assert build.build_context_no_bms == "089df24993c037e10174f3fa7342ab4dc191a4d4"
def test_siblings_property(self, db_session):
def test_siblings_property(self):
""" Tests that the siblings property returns the ID of all modules with
the same name:stream:version
"""
@@ -93,7 +94,7 @@ class TestModels:
class TestModelsGetStreamsContexts:
def test_get_last_build_in_all_streams(self, db_session):
def test_get_last_build_in_all_streams(self):
init_data_contexts(contexts=True)
builds = ModuleBuild.get_last_build_in_all_streams(db_session, "nginx")
builds = sorted([
@@ -102,7 +103,7 @@ class TestModelsGetStreamsContexts:
db_session.commit()
assert builds == ["nginx:%d:%d" % (i, i + 2) for i in range(10)]
def test_get_last_build_in_all_stream_last_version(self, db_session):
def test_get_last_build_in_all_stream_last_version(self):
init_data_contexts(contexts=False)
builds = ModuleBuild.get_last_build_in_all_streams(db_session, "nginx")
builds = [
@@ -111,7 +112,7 @@ class TestModelsGetStreamsContexts:
db_session.commit()
assert builds == ["nginx:1:11"]
def test_get_last_builds_in_stream(self, db_session):
def test_get_last_builds_in_stream(self):
init_data_contexts(contexts=True)
builds = ModuleBuild.get_last_builds_in_stream(db_session, "nginx", "1")
builds = [
@@ -121,7 +122,7 @@ class TestModelsGetStreamsContexts:
db_session.commit()
assert builds == ["nginx:1:3:d5a6c0fa", "nginx:1:3:795e97c1"]
def test_get_last_builds_in_stream_version_lte(self, db_session):
def test_get_last_builds_in_stream_version_lte(self):
init_data_contexts(1, multiple_stream_versions=True)
builds = ModuleBuild.get_last_builds_in_stream_version_lte(db_session, "platform", 290100)
builds = {
@@ -131,7 +132,7 @@ class TestModelsGetStreamsContexts:
db_session.commit()
assert builds == {"platform:f29.0.0:3:00000000", "platform:f29.1.0:3:00000000"}
def test_get_last_builds_in_stream_version_lte_different_versions(self, db_session):
def test_get_last_builds_in_stream_version_lte_different_versions(self):
"""
Tests that get_last_builds_in_stream_version_lte works in case the
name:stream_ver modules have different versions.
@@ -139,26 +140,19 @@ class TestModelsGetStreamsContexts:
clean_database(False)
make_module_in_db(
"platform:f29.1.0:10:old_version",
db_session=db_session, virtual_streams=["f29"])
"platform:f29.1.0:10:old_version", virtual_streams=["f29"])
make_module_in_db(
"platform:f29.1.0:15:c11.another",
db_session=db_session, virtual_streams=["f29"])
"platform:f29.1.0:15:c11.another", virtual_streams=["f29"])
make_module_in_db(
"platform:f29.1.0:15:c11",
db_session=db_session, virtual_streams=["f29"])
"platform:f29.1.0:15:c11", virtual_streams=["f29"])
make_module_in_db(
"platform:f29.2.0:0:old_version",
db_session=db_session, virtual_streams=["f29"])
"platform:f29.2.0:0:old_version", virtual_streams=["f29"])
make_module_in_db(
"platform:f29.2.0:1:c11",
db_session=db_session, virtual_streams=["f29"])
"platform:f29.2.0:1:c11", virtual_streams=["f29"])
make_module_in_db(
"platform:f29.3.0:15:old_version",
db_session=db_session, virtual_streams=["f29"])
"platform:f29.3.0:15:old_version", virtual_streams=["f29"])
make_module_in_db(
"platform:f29.3.0:20:c11",
db_session=db_session, virtual_streams=["f29"])
"platform:f29.3.0:20:c11", virtual_streams=["f29"])
builds = ModuleBuild.get_last_builds_in_stream_version_lte(
db_session, "platform", 290200)
@@ -173,30 +167,26 @@ class TestModelsGetStreamsContexts:
"platform:f29.2.0:1:c11",
}
def test_get_module_count(self, db_session):
def test_get_module_count(self):
clean_database(False)
make_module_in_db("platform:f29.1.0:10:c11", db_session=db_session)
make_module_in_db("platform:f29.1.0:10:c12", db_session=db_session)
make_module_in_db("platform:f29.1.0:10:c11")
make_module_in_db("platform:f29.1.0:10:c12")
count = ModuleBuild.get_module_count(db_session, name="platform")
db_session.commit()
assert count == 2
def test_add_virtual_streams_filter(self, db_session):
def test_add_virtual_streams_filter(self):
clean_database(False)
make_module_in_db(
"platform:f29.1.0:10:c1",
db_session=db_session, virtual_streams=["f29"])
"platform:f29.1.0:10:c1", virtual_streams=["f29"])
make_module_in_db(
"platform:f29.1.0:15:c1",
db_session=db_session, virtual_streams=["f29"])
"platform:f29.1.0:15:c1", virtual_streams=["f29"])
make_module_in_db(
"platform:f29.3.0:15:old_version",
db_session=db_session, virtual_streams=["f28", "f29"])
"platform:f29.3.0:15:old_version", virtual_streams=["f28", "f29"])
make_module_in_db(
"platform:f29.3.0:20:c11",
db_session=db_session, virtual_streams=["f30"])
"platform:f29.3.0:20:c11", virtual_streams=["f30"])
query = db_session.query(ModuleBuild).filter_by(name="platform")
query = ModuleBuild._add_virtual_streams_filter(db_session, query, ["f28", "f29"])

View File

@@ -7,6 +7,7 @@ import mock
import module_build_service.config as mbs_config
import module_build_service.monitor
from module_build_service import models
from module_build_service.db_session import db_session
from conf.config import TestConfiguration
from six.moves import reload_module
@@ -49,14 +50,17 @@ def test_standalone_metrics_server():
@mock.patch("module_build_service.monitor.builder_failed_counter.labels")
@mock.patch("module_build_service.monitor.builder_success_counter.inc")
def test_monitor_state_changing_success(succ_cnt, failed_cnt, db_session):
def test_monitor_state_changing_success(succ_cnt, failed_cnt):
conf = mbs_config.Config(TestConfiguration)
b = make_module_in_db(
"pkg:0.1:1:c1", [{
"requires": {"platform": ["el8"]},
"buildrequires": {"platform": ["el8"]},
}],
db_session=db_session)
"pkg:0.1:1:c1",
[
{
"requires": {"platform": ["el8"]},
"buildrequires": {"platform": ["el8"]},
}
],
)
b.transition(db_session, conf, models.BUILD_STATES["wait"])
b.transition(db_session, conf, models.BUILD_STATES["build"])
b.transition(db_session, conf, models.BUILD_STATES["done"])
@@ -67,15 +71,18 @@ def test_monitor_state_changing_success(succ_cnt, failed_cnt, db_session):
@mock.patch("module_build_service.monitor.builder_failed_counter.labels")
@mock.patch("module_build_service.monitor.builder_success_counter.inc")
def test_monitor_state_changing_failure(succ_cnt, failed_cnt, db_session):
def test_monitor_state_changing_failure(succ_cnt, failed_cnt):
failure_type = "user"
conf = mbs_config.Config(TestConfiguration)
b = make_module_in_db(
"pkg:0.1:1:c1", [{
"requires": {"platform": ["el8"]},
"buildrequires": {"platform": ["el8"]},
}],
db_session=db_session)
"pkg:0.1:1:c1",
[
{
"requires": {"platform": ["el8"]},
"buildrequires": {"platform": ["el8"]},
}
],
)
b.transition(db_session, conf, models.BUILD_STATES["wait"])
b.transition(db_session, conf, models.BUILD_STATES["build"])
b.transition(db_session, conf, models.BUILD_STATES["failed"], failure_type=failure_type)

View File

@@ -11,13 +11,14 @@ from module_build_service import models, utils, Modulemd
from module_build_service.utils import import_mmd, mmd_to_str, load_mmd
from module_build_service.models import ModuleBuild
from module_build_service.errors import UnprocessableEntity
from module_build_service.db_session import db_session
import tests
@pytest.mark.usefixtures("reuse_component_init_data")
class TestDBModule:
def test_get_buildrequired_modulemds(self, db_session):
def test_get_buildrequired_modulemds(self):
mmd = load_mmd(tests.read_staged_data("platform"))
mmd = mmd.copy(mmd.get_module_name(), "f30.1.3")
@@ -52,9 +53,7 @@ class TestDBModule:
assert nsvcs == {"testmodule:master:20170109091357:123"}
@pytest.mark.parametrize("stream_versions", [False, True])
def test_get_compatible_base_module_modulemds_stream_versions(
self, stream_versions, db_session
):
def test_get_compatible_base_module_modulemds_stream_versions(self, stream_versions):
tests.init_data(1, multiple_stream_versions=True)
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
platform = db_session.query(ModuleBuild).filter_by(name="platform", stream="f29.1.0").one()
@@ -73,7 +72,7 @@ class TestDBModule:
}
@pytest.mark.parametrize("empty_buildrequires", [False, True])
def test_get_module_build_dependencies(self, empty_buildrequires, db_session):
def test_get_module_build_dependencies(self, empty_buildrequires):
"""
Tests that the buildrequires of testmodule are returned
"""
@@ -95,7 +94,7 @@ class TestDBModule:
"testmodule", "master", "20170109091357", "78e4a6fd").keys()
assert set(result) == expected
def test_get_module_build_dependencies_recursive(self, db_session):
def test_get_module_build_dependencies_recursive(self):
"""
Tests that the buildrequires are returned when it is two layers deep
"""
@@ -136,13 +135,11 @@ class TestDBModule:
new_callable=PropertyMock,
return_value=tests.staged_data_filename("local_builds"),
)
def test_get_module_build_dependencies_recursive_requires(
self, resultdir, conf_system, db_session
):
def test_get_module_build_dependencies_recursive_requires(self, resultdir, conf_system):
"""
Tests that it returns the requires of the buildrequires recursively
"""
utils.load_local_builds(db_session, ["platform", "parent", "child", "testmodule"])
utils.load_local_builds(["platform", "parent", "child", "testmodule"])
build = models.ModuleBuild.local_modules(db_session, "child", "master")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
@@ -153,7 +150,7 @@ class TestDBModule:
expected = [os.path.join(local_path, "module-parent-master-20170816080815/results")]
assert set(result) == set(expected)
def test_resolve_requires(self, db_session):
def test_resolve_requires(self):
build = models.ModuleBuild.get_by_id(db_session, 2)
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
result = resolver.resolve_requires(
@@ -170,7 +167,7 @@ class TestDBModule:
}
}
def test_resolve_requires_exception(self, db_session):
def test_resolve_requires_exception(self):
build = models.ModuleBuild.get_by_id(db_session, 2)
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
with pytest.raises(UnprocessableEntity):
@@ -178,7 +175,7 @@ class TestDBModule:
[":".join(["abcdefghi", build.stream, build.version, build.context])]
)
def test_resolve_requires_siblings(self, db_session):
def test_resolve_requires_siblings(self):
tests.clean_database()
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
mmd = load_mmd(tests.read_staged_data("formatted_testmodule"))
@@ -205,7 +202,7 @@ class TestDBModule:
db_session.commit()
def test_resolve_profiles(self, db_session):
def test_resolve_profiles(self):
"""
Tests that the profiles get resolved recursively
"""
@@ -259,30 +256,30 @@ class TestDBModule:
new_callable=PropertyMock,
return_value=tests.staged_data_filename("local_builds")
)
def test_resolve_profiles_local_module(self, local_builds, conf_system, db_session):
def test_resolve_profiles_local_module(self, local_builds, conf_system):
"""
Test that profiles get resolved recursively on local builds
"""
utils.load_local_builds(db_session, ["platform"])
utils.load_local_builds(["platform"])
mmd = models.ModuleBuild.get_by_id(db_session, 2).mmd()
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))
expected = {"buildroot": {"foo"}, "srpm-buildroot": {"bar"}}
assert result == expected
def test_get_latest_with_virtual_stream(self, db_session):
def test_get_latest_with_virtual_stream(self):
tests.init_data(1, multiple_stream_versions=True)
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
mmd = resolver.get_latest_with_virtual_stream("platform", "f29")
assert mmd
assert mmd.get_stream_name() == "f29.2.0"
def test_get_latest_with_virtual_stream_none(self, db_session):
def test_get_latest_with_virtual_stream_none(self):
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
mmd = resolver.get_latest_with_virtual_stream("platform", "doesnotexist")
assert not mmd
def test_get_module_count(self, db_session):
def test_get_module_count(self):
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
count = resolver.get_module_count(name="platform", stream="f28")
assert count == 1

View File

@@ -5,15 +5,16 @@ from mock import patch, MagicMock
from datetime import datetime
import module_build_service.resolver as mbs_resolver
from module_build_service.utils.general import import_mmd, mmd_to_str, load_mmd
from module_build_service.models import ModuleBuild, BUILD_STATES
import tests
from module_build_service.db_session import db_session
from module_build_service.models import ModuleBuild, BUILD_STATES
from module_build_service.utils.general import import_mmd, mmd_to_str, load_mmd
@pytest.mark.usefixtures("reuse_component_init_data")
class TestLocalResolverModule:
def _create_test_modules(self, db_session, koji_tag_with_modules="foo-test"):
def _create_test_modules(self, koji_tag_with_modules="foo-test"):
mmd = load_mmd(tests.read_staged_data("platform"))
mmd = mmd.copy(mmd.get_module_name(), "f30.1.3")
@@ -51,8 +52,8 @@ class TestLocalResolverModule:
db_session.add(build)
db_session.commit()
def test_get_buildrequired_modulemds_fallback_to_db_resolver(self, db_session):
self._create_test_modules(db_session, koji_tag_with_modules=None)
def test_get_buildrequired_modulemds_fallback_to_db_resolver(self):
self._create_test_modules(koji_tag_with_modules=None)
platform = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")
@@ -64,7 +65,7 @@ class TestLocalResolverModule:
"testmodule:master:20170109091357:7c29193e"}
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_buildrequired_modulemds_name_not_tagged(self, ClientSession, db_session):
def test_get_buildrequired_modulemds_name_not_tagged(self, ClientSession):
koji_session = ClientSession.return_value
koji_session.getLastEvent.return_value = {"id": 123}
@@ -72,7 +73,7 @@ class TestLocalResolverModule:
koji_session.listTagged.return_value = []
koji_session.multiCall.return_value = [[]]
self._create_test_modules(db_session)
self._create_test_modules()
platform = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")
result = resolver.get_buildrequired_modulemds("testmodule", "master", platform.mmd())
@@ -82,7 +83,7 @@ class TestLocalResolverModule:
"foo-test", inherit=True, package="testmodule", type="module", event=123)
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_buildrequired_modulemds_multiple_streams(self, ClientSession, db_session):
def test_get_buildrequired_modulemds_multiple_streams(self, ClientSession):
koji_session = ClientSession.return_value
# We will ask for testmodule:master, but there is also testmodule:2 in a tag.
@@ -99,7 +100,7 @@ class TestLocalResolverModule:
koji_session.multiCall.return_value = [
[build] for build in koji_session.listTagged.return_value]
self._create_test_modules(db_session)
self._create_test_modules()
platform = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")
result = resolver.get_buildrequired_modulemds("testmodule", "master", platform.mmd())
@@ -108,7 +109,7 @@ class TestLocalResolverModule:
assert nsvcs == {"testmodule:master:20170109091357:7c29193d"}
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_buildrequired_modulemds_tagged_but_not_in_db(self, ClientSession, db_session):
def test_get_buildrequired_modulemds_tagged_but_not_in_db(self, ClientSession):
koji_session = ClientSession.return_value
# We will ask for testmodule:2, but it is not in database, so it should raise
@@ -126,7 +127,7 @@ class TestLocalResolverModule:
koji_session.multiCall.return_value = [
[build] for build in koji_session.listTagged.return_value]
self._create_test_modules(db_session)
self._create_test_modules()
platform = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")
expected_error = ("Module testmodule:2:820181219174508:9edba152 is tagged in the "
@@ -135,8 +136,7 @@ class TestLocalResolverModule:
resolver.get_buildrequired_modulemds("testmodule", "2", platform.mmd())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_buildrequired_modulemds_multiple_versions_contexts(
self, ClientSession, db_session):
def test_get_buildrequired_modulemds_multiple_versions_contexts(self, ClientSession):
koji_session = ClientSession.return_value
# We will ask for testmodule:2, but it is not in database, so it should raise
@@ -162,7 +162,7 @@ class TestLocalResolverModule:
koji_session.multiCall.return_value = [
[build] for build in koji_session.listTagged.return_value]
self._create_test_modules(db_session)
self._create_test_modules()
platform = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")
result = resolver.get_buildrequired_modulemds("testmodule", "master", platform.mmd())
@@ -173,7 +173,7 @@ class TestLocalResolverModule:
"testmodule:master:20170109091357:7c29193e"}
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_buildrequired_modules(self, ClientSession, db_session):
def test_get_buildrequired_modules(self, ClientSession):
koji_session = ClientSession.return_value
# We will ask for testmodule:master, but there is also testmodule:2 in a tag.
@@ -190,7 +190,7 @@ class TestLocalResolverModule:
koji_session.multiCall.return_value = [
[build] for build in koji_session.listTagged.return_value]
self._create_test_modules(db_session)
self._create_test_modules()
platform = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")
result = resolver.get_buildrequired_modules("testmodule", "master", platform.mmd())
@@ -199,7 +199,7 @@ class TestLocalResolverModule:
assert nvrs == {"testmodule-master-20170109091357.7c29193d"}
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_filter_inherited(self, ClientSession, db_session):
def test_filter_inherited(self, ClientSession):
koji_session = ClientSession.return_value
koji_session.getFullInheritance.return_value = [
@@ -230,7 +230,7 @@ class TestLocalResolverModule:
"testmodule-2-20180109091357.7c29193d"}
@patch("module_build_service.builder.KojiModuleBuilder.koji_multicall_map")
def test_filter_based_on_real_stream_name(self, koji_multicall_map, db_session):
def test_filter_based_on_real_stream_name(self, koji_multicall_map):
koji_session = MagicMock()
koji_multicall_map.return_value = [
{"build_id": 124, "extra": {"typeinfo": {"module": {"stream": "foo-test"}}}},
@@ -253,8 +253,7 @@ class TestLocalResolverModule:
assert build_ids == {124, 126, 127}
@patch("module_build_service.builder.KojiModuleBuilder.koji_multicall_map")
def test_filter_based_on_real_stream_name_multicall_error(
self, koji_multicall_map, db_session):
def test_filter_based_on_real_stream_name_multicall_error(self, koji_multicall_map):
koji_session = MagicMock()
koji_multicall_map.return_value = None
@@ -267,7 +266,7 @@ class TestLocalResolverModule:
with pytest.raises(RuntimeError, match=expected_error):
resolver._filter_based_on_real_stream_name(koji_session, builds, "foo-test")
def test_get_compatible_base_module_modulemds_fallback_to_dbresolver(self, db_session):
def test_get_compatible_base_module_modulemds_fallback_to_dbresolver(self):
tests.init_data(1, multiple_stream_versions=True)
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")
platform = db_session.query(ModuleBuild).filter_by(name="platform", stream="f29.1.0").one()
@@ -278,7 +277,7 @@ class TestLocalResolverModule:
assert len(result) == 2
def test_get_compatible_base_module_modulemds(self, db_session):
def test_get_compatible_base_module_modulemds(self):
tests.init_data(1, multiple_stream_versions=True)
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="koji")
@@ -288,6 +287,7 @@ class TestLocalResolverModule:
platform_xmd["mbs"]["koji_tag_with_modules"] = "module-f29-build"
platform_mmd.set_xmd(platform_xmd)
platform.modulemd = mmd_to_str(platform_mmd)
db_session.commit()
result = resolver.get_compatible_base_module_modulemds(
platform_mmd, stream_version_lte=True, virtual_streams=["f29"],

View File

@@ -4,19 +4,20 @@ import pytest
from datetime import datetime
import module_build_service.resolver as mbs_resolver
from module_build_service.utils.general import import_mmd, mmd_to_str, load_mmd
from module_build_service.db_session import db_session
from module_build_service.models import ModuleBuild
from module_build_service.utils.general import import_mmd, mmd_to_str, load_mmd
import tests
@pytest.mark.usefixtures("reuse_component_init_data")
class TestLocalResolverModule:
def test_get_buildrequired_modulemds(self, db_session):
def test_get_buildrequired_modulemds(self):
mmd = load_mmd(tests.read_staged_data("platform"))
mmd = mmd.copy(mmd.get_module_name(), "f8")
import_mmd(db_session, mmd)
platform_f8 = ModuleBuild.query.filter_by(stream="f8").one()
platform_f8 = db_session.query(ModuleBuild).filter_by(stream="f8").one()
mmd = mmd.copy("testmodule", "master")
mmd.set_version(20170109091357)
mmd.set_context("123")

View File

@@ -4,6 +4,7 @@ from mock import patch, PropertyMock, Mock, call
import module_build_service.resolver as mbs_resolver
import module_build_service.utils
from module_build_service.db_session import db_session
from module_build_service.utils.general import mmd_to_str
import module_build_service.models
import tests
@@ -11,7 +12,7 @@ import tests
class TestMBSModule:
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_module_modulemds_nsvc(self, mock_session, testmodule_mmd_9c690d0e, db_session):
def test_get_module_modulemds_nsvc(self, mock_session, testmodule_mmd_9c690d0e):
""" Tests for querying a module from mbs """
mock_res = Mock()
mock_res.ok.return_value = True
@@ -54,7 +55,7 @@ class TestMBSModule:
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_module_modulemds_partial(
self, mock_session, testmodule_mmd_9c690d0e, testmodule_mmd_c2c572ed, db_session
self, mock_session, testmodule_mmd_9c690d0e, testmodule_mmd_c2c572ed
):
""" Test for querying MBS without the context of a module """
@@ -106,7 +107,7 @@ class TestMBSModule:
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_module_build_dependencies(
self, mock_session, platform_mmd, testmodule_mmd_9c690d0e, db_session
self, mock_session, platform_mmd, testmodule_mmd_9c690d0e
):
"""
Tests that we return just direct build-time dependencies of testmodule.
@@ -183,7 +184,7 @@ class TestMBSModule:
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_module_build_dependencies_empty_buildrequires(
self, mock_session, testmodule_mmd_9c690d0e, db_session
self, mock_session, testmodule_mmd_9c690d0e
):
mmd = module_build_service.utils.load_mmd(testmodule_mmd_9c690d0e)
@@ -237,7 +238,7 @@ class TestMBSModule:
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_resolve_profiles(
self, mock_session, formatted_testmodule_mmd, platform_mmd, db_session
self, mock_session, formatted_testmodule_mmd, platform_mmd
):
mock_res = Mock()
@@ -323,20 +324,19 @@ class TestMBSModule:
return_value=tests.staged_data_filename("local_builds")
)
def test_resolve_profiles_local_module(
self, local_builds, conf_system, formatted_testmodule_mmd, db_session
self, local_builds, conf_system, formatted_testmodule_mmd
):
tests.clean_database()
with tests.app.app_context():
module_build_service.utils.load_local_builds(db_session, ["platform"])
module_build_service.utils.load_local_builds(["platform"])
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
result = resolver.resolve_profiles(
formatted_testmodule_mmd, ("buildroot", "srpm-buildroot"))
expected = {"buildroot": {"foo"}, "srpm-buildroot": {"bar"}}
assert result == expected
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
result = resolver.resolve_profiles(
formatted_testmodule_mmd, ("buildroot", "srpm-buildroot"))
expected = {"buildroot": {"foo"}, "srpm-buildroot": {"bar"}}
assert result == expected
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_empty_buildrequired_modulemds(self, request_session, db_session):
def test_get_empty_buildrequired_modulemds(self, request_session):
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
request_session.get.return_value = Mock(ok=True)
request_session.get.return_value.json.return_value = {"items": [], "meta": {"next": None}}
@@ -346,7 +346,7 @@ class TestMBSModule:
assert [] == result
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_buildrequired_modulemds(self, mock_session, db_session):
def test_get_buildrequired_modulemds(self, mock_session):
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
mock_session.get.return_value = Mock(ok=True)
mock_session.get.return_value.json.return_value = {
@@ -384,7 +384,7 @@ class TestMBSModule:
assert "c1" == mmd.get_context()
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_module_count(self, mock_session, db_session):
def test_get_module_count(self, mock_session):
mock_res = Mock()
mock_res.ok.return_value = True
mock_res.json.return_value = {
@@ -403,7 +403,7 @@ class TestMBSModule:
)
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_latest_with_virtual_stream(self, mock_session, platform_mmd, db_session):
def test_get_latest_with_virtual_stream(self, mock_session, platform_mmd):
mock_res = Mock()
mock_res.ok.return_value = True
mock_res.json.return_value = {
@@ -445,11 +445,11 @@ class TestMBSModule:
return_value=tests.staged_data_filename("local_builds")
)
def test_get_buildrequired_modulemds_local_builds(
self, local_builds, conf_system, db_session
self, local_builds, conf_system
):
tests.clean_database()
with tests.app.app_context():
module_build_service.utils.load_local_builds(db_session, ["testmodule"])
module_build_service.utils.load_local_builds(["testmodule"])
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
result = resolver.get_buildrequired_modulemds(
@@ -462,7 +462,7 @@ class TestMBSModule:
assert "321" == mmd.get_context()
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_buildrequired_modulemds_kojiresolver(self, mock_session, db_session):
def test_get_buildrequired_modulemds_kojiresolver(self, mock_session):
"""
Test that MBSResolver uses KojiResolver as input when KojiResolver is enabled for
the base module.

View File

@@ -73,7 +73,7 @@ class TestConsumer:
}
consumer.consume(msg)
assert process_message.call_count == 1
msg_obj = process_message.call_args[0][1]
msg_obj = process_message.call_args[0][0]
assert isinstance(msg_obj, KojiRepoChange)
assert msg_obj.msg_id == msg["body"]["msg_id"]
assert msg_obj.repo_tag == msg["body"]["msg"]["tag"]

View File

@@ -7,6 +7,7 @@ import dnf
from mock import call, Mock, patch, PropertyMock
import pytest
from module_build_service.db_session import db_session
from module_build_service.errors import UnprocessableEntity
from module_build_service.models import ModuleBuild
from module_build_service.scheduler import default_modules
@@ -16,7 +17,7 @@ from tests import clean_database, conf, make_module_in_db, read_staged_data, imp
@patch("module_build_service.scheduler.default_modules.handle_collisions_with_base_module_rpms")
@patch("module_build_service.scheduler.default_modules._get_default_modules")
def test_add_default_modules(mock_get_dm, mock_hc, db_session):
def test_add_default_modules(mock_get_dm, mock_hc):
"""
Test that default modules present in the database are added, and the others are ignored.
"""
@@ -42,10 +43,8 @@ def test_add_default_modules(mock_get_dm, mock_hc, db_session):
dependencies = [
{"requires": {"platform": ["f28"]},
"buildrequires": {"platform": ["f28"]}}]
make_module_in_db("python:3:12345:1", base_module=platform, db_session=db_session,
dependencies=dependencies)
make_module_in_db("nodejs:11:2345:2", base_module=platform, db_session=db_session,
dependencies=dependencies)
make_module_in_db("python:3:12345:1", base_module=platform, dependencies=dependencies)
make_module_in_db("nodejs:11:2345:2", base_module=platform, dependencies=dependencies)
db_session.commit()
mock_get_dm.return_value = {
@@ -53,7 +52,7 @@ def test_add_default_modules(mock_get_dm, mock_hc, db_session):
"python": "3",
"ruby": "2.6",
}
defaults_added = default_modules.add_default_modules(db_session, mmd, ["x86_64"])
defaults_added = default_modules.add_default_modules(mmd, ["x86_64"])
# Make sure that the default modules were added. ruby:2.6 will be ignored since it's not in
# the database
assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"nodejs", "platform", "python"}
@@ -66,19 +65,19 @@ def test_add_default_modules(mock_get_dm, mock_hc, db_session):
@patch("module_build_service.scheduler.default_modules._get_default_modules")
def test_add_default_modules_not_linked(mock_get_dm, db_session):
def test_add_default_modules_not_linked(mock_get_dm):
"""
Test that no default modules are added when they aren't linked from the base module.
"""
clean_database()
mmd = load_mmd(read_staged_data("formatted_testmodule.yaml"))
assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"platform"}
default_modules.add_default_modules(db_session, mmd, ["x86_64"])
default_modules.add_default_modules(mmd, ["x86_64"])
assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"platform"}
mock_get_dm.assert_not_called()
def test_add_default_modules_platform_not_available(db_session):
def test_add_default_modules_platform_not_available():
"""
Test that an exception is raised when the platform module that is buildrequired is missing.
@@ -89,11 +88,11 @@ def test_add_default_modules_platform_not_available(db_session):
expected_error = "Failed to retrieve the module platform:f28:3:00000000 from the database"
with pytest.raises(RuntimeError, match=expected_error):
default_modules.add_default_modules(db_session, mmd, ["x86_64"])
default_modules.add_default_modules(mmd, ["x86_64"])
@patch("module_build_service.scheduler.default_modules._get_default_modules")
def test_add_default_modules_compatible_platforms(mock_get_dm, db_session):
def test_add_default_modules_compatible_platforms(mock_get_dm):
"""
Test that default modules built against compatible base module streams are added.
"""
@@ -124,14 +123,12 @@ def test_add_default_modules_compatible_platforms(mock_get_dm, db_session):
dependencies = [
{"requires": {"platform": ["f27"]},
"buildrequires": {"platform": ["f27"]}}]
make_module_in_db("python:3:12345:1", base_module=platform_f27, db_session=db_session,
dependencies=dependencies)
make_module_in_db("python:3:12345:1", base_module=platform_f27, dependencies=dependencies)
# Create nodejs default module which requries any platform stream and therefore can be used
# as default module for platform:f28.
dependencies[0]["requires"]["platform"] = []
make_module_in_db("nodejs:11:2345:2", base_module=platform_f27, db_session=db_session,
dependencies=dependencies)
make_module_in_db("nodejs:11:2345:2", base_module=platform_f27, dependencies=dependencies)
db_session.commit()
mock_get_dm.return_value = {
@@ -139,7 +136,7 @@ def test_add_default_modules_compatible_platforms(mock_get_dm, db_session):
"python": "3",
"ruby": "2.6",
}
defaults_added = default_modules.add_default_modules(db_session, mmd, ["x86_64"])
defaults_added = default_modules.add_default_modules(mmd, ["x86_64"])
# Make sure that the default modules were added. ruby:2.6 will be ignored since it's not in
# the database
assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"nodejs", "platform"}
@@ -151,13 +148,13 @@ def test_add_default_modules_compatible_platforms(mock_get_dm, db_session):
@patch("module_build_service.scheduler.default_modules._get_default_modules")
def test_add_default_modules_request_failed(mock_get_dm, db_session):
def test_add_default_modules_request_failed(mock_get_dm):
"""
Test that an exception is raised when the call to _get_default_modules failed.
"""
clean_database()
make_module_in_db("python:3:12345:1", db_session=db_session)
make_module_in_db("nodejs:11:2345:2", db_session=db_session)
make_module_in_db("python:3:12345:1")
make_module_in_db("nodejs:11:2345:2")
mmd = load_mmd(read_staged_data("formatted_testmodule.yaml"))
xmd_brs = mmd.get_xmd()["mbs"]["buildrequires"]
assert set(xmd_brs.keys()) == {"platform"}
@@ -181,7 +178,7 @@ def test_add_default_modules_request_failed(mock_get_dm, db_session):
mock_get_dm.side_effect = ValueError(expected_error)
with pytest.raises(ValueError, match=expected_error):
default_modules.add_default_modules(db_session, mmd, ["x86_64"])
default_modules.add_default_modules(mmd, ["x86_64"])
@pytest.mark.parametrize("is_rawhide", (True, False))

View File

@@ -6,6 +6,7 @@ from mock import call, patch, Mock
from sqlalchemy import func
from module_build_service import conf
from module_build_service.db_session import db_session
from module_build_service.models import BUILD_STATES, ModuleBuild
from module_build_service.scheduler.consumer import MBSConsumer
from module_build_service.scheduler.handlers.greenwave import get_corresponding_module_build
@@ -20,10 +21,10 @@ class TestGetCorrespondingModuleBuild:
clean_database()
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_module_build_nvr_does_not_exist_in_koji(self, ClientSession, db_session):
def test_module_build_nvr_does_not_exist_in_koji(self, ClientSession):
ClientSession.return_value.getBuild.return_value = None
assert get_corresponding_module_build(db_session, "n-v-r") is None
assert get_corresponding_module_build("n-v-r") is None
@pytest.mark.parametrize(
"build_info",
@@ -37,25 +38,23 @@ class TestGetCorrespondingModuleBuild:
],
)
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_cannot_find_module_build_id_from_build_info(
self, ClientSession, build_info, db_session
):
def test_cannot_find_module_build_id_from_build_info(self, ClientSession, build_info):
ClientSession.return_value.getBuild.return_value = build_info
assert get_corresponding_module_build(db_session, "n-v-r") is None
assert get_corresponding_module_build("n-v-r") is None
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_corresponding_module_build_id_does_not_exist_in_db(self, ClientSession, db_session):
def test_corresponding_module_build_id_does_not_exist_in_db(self, ClientSession):
fake_module_build_id, = db_session.query(func.max(ModuleBuild.id)).first()
ClientSession.return_value.getBuild.return_value = {
"extra": {"typeinfo": {"module": {"module_build_service_id": fake_module_build_id + 1}}}
}
assert get_corresponding_module_build(db_session, "n-v-r") is None
assert get_corresponding_module_build("n-v-r") is None
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_find_the_module_build(self, ClientSession, db_session):
def test_find_the_module_build(self, ClientSession):
expected_module_build = (
db_session.query(ModuleBuild).filter(ModuleBuild.name == "platform").first()
)
@@ -64,7 +63,7 @@ class TestGetCorrespondingModuleBuild:
"extra": {"typeinfo": {"module": {"module_build_service_id": expected_module_build.id}}}
}
build = get_corresponding_module_build(db_session, "n-v-r")
build = get_corresponding_module_build("n-v-r")
assert expected_module_build.id == build.id
assert expected_module_build.name == build.name
@@ -74,9 +73,9 @@ class TestDecisionUpdateHandler:
"""Test handler decision_update"""
@patch("module_build_service.scheduler.handlers.greenwave.log")
def test_decision_context_is_not_match(self, log, db_session):
def test_decision_context_is_not_match(self, log):
msg = Mock(msg_id="msg-id-1", decision_context="bodhi_update_push_testing")
decision_update(conf, db_session, msg)
decision_update(conf, msg)
log.debug.assert_called_once_with(
'Skip Greenwave message %s as MBS only handles messages with the decision context "%s"',
"msg-id-1",
@@ -84,14 +83,14 @@ class TestDecisionUpdateHandler:
)
@patch("module_build_service.scheduler.handlers.greenwave.log")
def test_not_satisfy_policies(self, log, db_session):
def test_not_satisfy_policies(self, log):
msg = Mock(
msg_id="msg-id-1",
decision_context="test_dec_context",
policies_satisfied=False,
subject_identifier="pkg-0.1-1.c1",
)
decision_update(conf, db_session, msg)
decision_update(conf, msg)
log.debug.assert_called_once_with(
"Skip to handle module build %s because it has not satisfied Greenwave policies.",
msg.subject_identifier,
@@ -99,16 +98,19 @@ class TestDecisionUpdateHandler:
@patch("module_build_service.messaging.publish")
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_transform_from_done_to_ready(self, ClientSession, publish, db_session):
def test_transform_from_done_to_ready(self, ClientSession, publish):
clean_database()
# This build should be queried and transformed to ready state
module_build = make_module_in_db(
"pkg:0.1:1:c1", [{
"requires": {"platform": ["el8"]},
"buildrequires": {"platform": ["el8"]},
}],
db_session=db_session)
"pkg:0.1:1:c1",
[
{
"requires": {"platform": ["el8"]},
"buildrequires": {"platform": ["el8"]},
}
],
)
module_build.transition(
db_session, conf, BUILD_STATES["done"], "Move to done directly for running test."
)
@@ -139,6 +141,7 @@ class TestDecisionUpdateHandler:
consumer = MBSConsumer(hub)
consumer.consume(msg)
db_session.add(module_build)
# Load module build again to check its state is moved correctly
db_session.refresh(module_build)
assert BUILD_STATES["ready"] == module_build.state

View File

@@ -9,7 +9,8 @@ from tests.test_views.test_views import FakeSCM
import module_build_service.messaging
import module_build_service.scheduler.handlers.modules
from module_build_service import build_logs
from module_build_service.models import make_db_session, ModuleBuild
from module_build_service.db_session import db_session
from module_build_service.models import ModuleBuild
from module_build_service.utils.general import mmd_to_str, load_mmd
@@ -22,14 +23,12 @@ class TestModuleInit:
mmd = mmd.copy("testmodule", "1")
scmurl = "git://pkgs.domain.local/modules/testmodule?#620ec77"
clean_database()
with make_db_session(conf) as session:
ModuleBuild.create(
session, conf, "testmodule", "1", 3, mmd_to_str(mmd), scmurl, "mprahl")
ModuleBuild.create(
db_session, conf, "testmodule", "1", 3, mmd_to_str(mmd), scmurl, "mprahl")
def teardown_method(self, test_method):
try:
with make_db_session(conf) as db_session:
path = build_logs.path(db_session, 1)
path = build_logs.path(db_session, 1)
os.remove(path)
except Exception:
pass
@@ -75,7 +74,7 @@ class TestModuleInit:
msg_id=None, module_build_id=2, module_build_state="init"
)
self.fn(config=conf, db_session=db_session, msg=msg)
self.fn(config=conf, msg=msg)
build = ModuleBuild.get_by_id(db_session, 2)
# Make sure the module entered the wait state
@@ -89,7 +88,7 @@ class TestModuleInit:
hcwbmr.assert_called_once()
return build
def test_init_called_twice(self, db_session):
def test_init_called_twice(self):
build = self.init_basic(db_session)
old_component_builds = len(build.component_builds)
old_mmd = load_mmd(build.modulemd)
@@ -108,7 +107,7 @@ class TestModuleInit:
@patch("module_build_service.scm.SCM")
@patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])
def test_init_scm_not_available(self, get_build_arches, mocked_scm, db_session):
def test_init_scm_not_available(self, get_build_arches, mocked_scm):
FakeSCM(
mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4",
get_latest_raise=True,
@@ -117,7 +116,7 @@ class TestModuleInit:
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=2, module_build_state="init")
self.fn(config=conf, db_session=db_session, msg=msg)
self.fn(config=conf, msg=msg)
build = ModuleBuild.get_by_id(db_session, 2)
# Make sure the module entered the failed state
@@ -132,7 +131,7 @@ class TestModuleInit:
@patch("module_build_service.scm.SCM")
@patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])
def test_init_includedmodule(
self, get_build_arches, mocked_scm, mocked_mod_allow_repo, db_session
self, get_build_arches, mocked_scm, mocked_mod_allow_repo
):
FakeSCM(mocked_scm, "includedmodules", ["testmodule_init.yaml"])
includedmodules_yml_path = read_staged_data("includedmodules")
@@ -144,7 +143,7 @@ class TestModuleInit:
db_session, conf, "includemodule", "1", 3, mmd_to_str(mmd), scmurl, "mprahl")
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=3, module_build_state="init")
self.fn(config=conf, db_session=db_session, msg=msg)
self.fn(config=conf, msg=msg)
build = ModuleBuild.get_by_id(db_session, 3)
assert build.state == 1
assert build.name == "includemodule"
@@ -170,7 +169,7 @@ class TestModuleInit:
@patch("module_build_service.scm.SCM")
@patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])
def test_init_when_get_latest_raises(
self, get_build_arches, mocked_scm, mocked_from_module_event, db_session):
self, get_build_arches, mocked_scm, mocked_from_module_event):
FakeSCM(
mocked_scm,
"testmodule",
@@ -183,7 +182,7 @@ class TestModuleInit:
build = ModuleBuild.get_by_id(db_session, 2)
mocked_from_module_event.return_value = build
self.fn(config=conf, db_session=db_session, msg=msg)
self.fn(config=conf, msg=msg)
# Query the database again to make sure the build object is updated
db_session.refresh(build)

View File

@@ -7,10 +7,10 @@ import module_build_service.scheduler.handlers.modules
import os
import koji
import pytest
from tests import conf, scheduler_init_data, read_staged_data
from tests import conf, scheduler_init_data
import module_build_service.resolver
from module_build_service import build_logs, Modulemd
from module_build_service.utils.general import load_mmd
from module_build_service.db_session import db_session
from module_build_service.models import ComponentBuild, ModuleBuild
base_dir = os.path.dirname(os.path.dirname(__file__))
@@ -18,50 +18,34 @@ base_dir = os.path.dirname(os.path.dirname(__file__))
class TestModuleWait:
def setup_method(self, test_method):
scheduler_init_data()
self.config = conf
self.session = mock.Mock()
self.fn = module_build_service.scheduler.handlers.modules.wait
def teardown_method(self, test_method):
try:
with module_build_service.models.make_db_session(conf) as db_session:
path = build_logs.path(db_session, 1)
path = build_logs.path(db_session, 1)
os.remove(path)
except Exception:
pass
@patch("module_build_service.builder.GenericBuilder.create_from_module")
@patch("module_build_service.models.ModuleBuild.from_module_event")
def test_init_basic(self, from_module_event, create_builder):
def test_init_basic(self, create_builder):
builder = mock.Mock()
builder.get_disttag_srpm.return_value = "some srpm disttag"
builder.build.return_value = 1234, 1, "", None
builder.module_build_tag = {"name": "some-tag-build"}
create_builder.return_value = builder
mocked_module_build = mock.Mock()
mocked_module_build.name = "foo"
mocked_module_build.stream = "stream"
mocked_module_build.version = "1"
mocked_module_build.context = "1234567"
mocked_module_build.state = 1
mocked_module_build.id = 1
mocked_module_build.json.return_value = {
"name": "foo",
"stream": "1",
"version": 1,
"state": "some state",
"id": 1,
}
mocked_module_build.id = 1
mocked_module_build.mmd.return_value = load_mmd(read_staged_data("formatted_testmodule"))
mocked_module_build.component_builds = []
from_module_event.return_value = mocked_module_build
module_build_id = db_session.query(ModuleBuild).first().id
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=1, module_build_state="some state")
msg_id=None,
module_build_id=module_build_id,
module_build_state="some state")
with patch("module_build_service.resolver.GenericResolver.create"):
self.fn(config=self.config, db_session=self.session, msg=msg)
self.fn(config=self.config, msg=msg)
@patch(
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -71,12 +55,11 @@ class TestModuleWait:
@patch("module_build_service.resolver.DBResolver")
@patch("module_build_service.resolver.GenericResolver")
def test_new_repo_called_when_macros_reused(
self, generic_resolver, resolver, create_builder, dbg, db_session
self, generic_resolver, resolver, create_builder, dbg
):
"""
Test that newRepo is called when module-build-macros build is reused.
"""
scheduler_init_data(db_session)
koji_session = mock.MagicMock()
koji_session.newRepo.return_value = 123456
@@ -101,7 +84,7 @@ class TestModuleWait:
msg_id=None, module_build_id=2, module_build_state="some state")
module_build_service.scheduler.handlers.modules.wait(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
koji_session.newRepo.assert_called_once_with("module-123-build")
@@ -119,12 +102,11 @@ class TestModuleWait:
@patch("module_build_service.resolver.DBResolver")
@patch("module_build_service.resolver.GenericResolver")
def test_new_repo_not_called_when_macros_not_reused(
self, generic_resolver, resolver, create_builder, dbg, db_session
self, generic_resolver, resolver, create_builder, dbg
):
"""
Test that newRepo is called everytime for module-build-macros
"""
scheduler_init_data(db_session)
koji_session = mock.MagicMock()
koji_session.newRepo.return_value = 123456
@@ -149,7 +131,7 @@ class TestModuleWait:
msg_id=None, module_build_id=2, module_build_state="some state")
module_build_service.scheduler.handlers.modules.wait(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
assert koji_session.newRepo.called
@@ -161,14 +143,13 @@ class TestModuleWait:
@patch("module_build_service.resolver.DBResolver")
@patch("module_build_service.resolver.GenericResolver")
def test_set_cg_build_koji_tag_fallback_to_default(
self, generic_resolver, resolver, create_builder, dbg, db_session
self, generic_resolver, resolver, create_builder, dbg
):
"""
Test that build.cg_build_koji_tag fallbacks to default tag.
"""
base_mmd = Modulemd.ModuleStreamV2.new("base-runtime", "f27")
scheduler_init_data(db_session)
koji_session = mock.MagicMock()
koji_session.newRepo.return_value = 123456
@@ -196,7 +177,7 @@ class TestModuleWait:
msg_id=None, module_build_id=2, module_build_state="some state")
module_build_service.scheduler.handlers.modules.wait(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
module_build = ModuleBuild.get_by_id(db_session, 2)
assert module_build.cg_build_koji_tag == "modular-updates-candidate"
@@ -229,14 +210,12 @@ class TestModuleWait:
dbg,
koji_cg_tag_build,
expected_cg_koji_build_tag,
db_session,
):
"""
Test that build.cg_build_koji_tag is set.
"""
base_mmd = Modulemd.ModuleStreamV2.new("base-runtime", "f27")
scheduler_init_data(db_session)
koji_session = mock.MagicMock()
koji_session.newRepo.return_value = 123456
@@ -269,7 +248,7 @@ class TestModuleWait:
msg_id=None, module_build_id=2, module_build_state="some state"
)
module_build_service.scheduler.handlers.modules.wait(
config=conf, db_session=db_session, msg=msg
config=conf, msg=msg
)
module_build = ModuleBuild.get_by_id(db_session, 2)
assert module_build.cg_build_koji_tag == expected_cg_koji_build_tag

View File

@@ -9,6 +9,7 @@ import mock
import koji
from module_build_service.scheduler.producer import MBSProducer
from module_build_service.messaging import KojiTagChange
from module_build_service.db_session import db_session
import six.moves.queue as queue
from datetime import datetime, timedelta
@@ -39,7 +40,7 @@ class TestPoller:
@pytest.mark.parametrize("fresh", [True, False])
@patch("module_build_service.utils.batches.start_build_component")
def test_process_paused_module_builds(
self, start_build_component, create_builder, global_consumer, dbg, fresh, db_session
self, start_build_component, create_builder, global_consumer, dbg, fresh
):
"""
Tests general use-case of process_paused_module_builds.
@@ -67,8 +68,7 @@ class TestPoller:
poller = MBSProducer(hub)
poller.poll()
# Refresh our module_build object.
db_session.refresh(module_build)
module_build = models.ModuleBuild.get_by_id(db_session, 3)
# If fresh is set, we expect the poller to not touch the module build since it's been less
# than 10 minutes of inactivity
@@ -93,7 +93,7 @@ class TestPoller:
@patch("module_build_service.utils.batches.start_build_component")
def test_process_paused_module_builds_with_new_repo_task(
self, start_build_component, create_builder, global_consumer, dbg, task_state,
expect_start_build_component, db_session
expect_start_build_component
):
"""
Tests general use-case of process_paused_module_builds.
@@ -122,8 +122,7 @@ class TestPoller:
poller = MBSProducer(hub)
poller.poll()
# Refresh our module_build object.
db_session.refresh(module_build)
module_build = models.ModuleBuild.get_by_id(db_session, 3)
if expect_start_build_component:
expected_state = koji.BUILD_STATES["BUILDING"]
@@ -141,7 +140,7 @@ class TestPoller:
@patch.dict("sys.modules", krbV=mock.MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_retrigger_new_repo_on_failure(
self, ClientSession, create_builder, global_consumer, dbg, db_session
self, ClientSession, create_builder, global_consumer, dbg
):
"""
Tests that we call koji_sesion.newRepo when newRepo task failed.
@@ -176,7 +175,7 @@ class TestPoller:
@patch.dict("sys.modules", krbV=mock.MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_trigger_new_repo_when_succeeded(
self, ClientSession, create_builder, global_consumer, dbg, db_session
self, ClientSession, create_builder, global_consumer, dbg
):
"""
Tests that we do not call koji_sesion.newRepo when newRepo task
@@ -206,14 +205,13 @@ class TestPoller:
poller = MBSProducer(hub)
poller.poll()
# Refresh our module_build object.
db_session.refresh(module_build)
module_build = models.ModuleBuild.get_by_id(db_session, 3)
assert not koji_session.newRepo.called
assert module_build.new_repo_task_id == 123456
def test_process_paused_module_builds_waiting_for_repo(
self, create_builder, global_consumer, dbg, db_session
self, create_builder, global_consumer, dbg
):
"""
Tests that process_paused_module_builds does not start new batch
@@ -238,8 +236,7 @@ class TestPoller:
poller = MBSProducer(hub)
poller.poll()
# Refresh our module_build object.
db_session.refresh(module_build)
module_build = models.ModuleBuild.get_by_id(db_session, 3)
# Components should not be in building state
components = module_build.current_batch()
@@ -249,7 +246,7 @@ class TestPoller:
@patch.dict("sys.modules", krbV=mock.MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_old_build_targets_are_not_associated_with_any_module_builds(
self, ClientSession, create_builder, global_consumer, dbg, db_session
self, ClientSession, create_builder, global_consumer, dbg
):
consumer = mock.MagicMock()
consumer.incoming = queue.Queue()
@@ -264,14 +261,14 @@ class TestPoller:
hub = mock.MagicMock()
poller = MBSProducer(hub)
poller.delete_old_koji_targets(conf, db_session)
poller.delete_old_koji_targets(conf)
koji_session.deleteBuildTarget.assert_not_called()
@patch.dict("sys.modules", krbV=mock.MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_dont_delete_base_module_build_target(
self, ClientSession, create_builder, global_consumer, dbg, db_session
self, ClientSession, create_builder, global_consumer, dbg
):
module_build = models.ModuleBuild.get_by_id(db_session, 3)
@@ -289,14 +286,14 @@ class TestPoller:
hub = mock.MagicMock()
poller = MBSProducer(hub)
poller.delete_old_koji_targets(conf, db_session)
poller.delete_old_koji_targets(conf)
koji_session.deleteBuildTarget.assert_not_called()
@patch.dict("sys.modules", krbV=mock.MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_dont_delete_build_target_for_unfinished_module_builds(
self, ClientSession, create_builder, global_consumer, dbg, db_session
self, ClientSession, create_builder, global_consumer, dbg
):
module_build = models.ModuleBuild.get_by_id(db_session, 3)
@@ -316,14 +313,14 @@ class TestPoller:
hub = mock.MagicMock()
poller = MBSProducer(hub)
poller.delete_old_koji_targets(conf, db_session)
poller.delete_old_koji_targets(conf)
koji_session.deleteBuildTarget.assert_not_called()
@patch.dict("sys.modules", krbV=mock.MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_only_delete_build_target_with_allowed_koji_tag_prefix(
self, ClientSession, create_builder, global_consumer, dbg, db_session
self, ClientSession, create_builder, global_consumer, dbg
):
module_build_2 = models.ModuleBuild.get_by_id(db_session, 2)
# Only module build 1's build target should be deleted.
@@ -354,7 +351,7 @@ class TestPoller:
with patch.object(conf, "koji_target_delete_time", new=60):
hub = mock.MagicMock()
poller = MBSProducer(hub)
poller.delete_old_koji_targets(conf, db_session)
poller.delete_old_koji_targets(conf)
koji_session.deleteBuildTarget.assert_called_once_with(1)
koji_session.krb_login.assert_called_once()
@@ -362,7 +359,7 @@ class TestPoller:
@patch.dict("sys.modules", krbV=mock.MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_cant_delete_build_target_if_not_reach_delete_time(
self, ClientSession, create_builder, global_consumer, dbg, db_session
self, ClientSession, create_builder, global_consumer, dbg
):
module_build_2 = models.ModuleBuild.get_by_id(db_session, 2)
# Only module build 1's build target should be deleted.
@@ -389,13 +386,13 @@ class TestPoller:
# enough for test.
hub = mock.MagicMock()
poller = MBSProducer(hub)
poller.delete_old_koji_targets(conf, db_session)
poller.delete_old_koji_targets(conf)
koji_session.deleteBuildTarget.assert_not_called()
@pytest.mark.parametrize("state", ["init", "wait"])
def test_process_waiting_module_build(
self, create_builder, global_consumer, dbg, state, db_session
self, create_builder, global_consumer, dbg, state
):
""" Test that processing old waiting module builds works. """
@@ -420,7 +417,7 @@ class TestPoller:
assert consumer.incoming.qsize() == 0
# Poll :)
poller.process_waiting_module_builds(db_session)
poller.process_waiting_module_builds()
assert consumer.incoming.qsize() == 1
@@ -430,7 +427,7 @@ class TestPoller:
@pytest.mark.parametrize("state", ["init", "wait"])
def test_process_waiting_module_build_not_old_enough(
self, create_builder, global_consumer, dbg, state, db_session
self, create_builder, global_consumer, dbg, state
):
""" Test that we do not process young waiting builds. """
@@ -455,13 +452,13 @@ class TestPoller:
assert consumer.incoming.qsize() == 0
# Poll :)
poller.process_waiting_module_builds(db_session)
poller.process_waiting_module_builds()
# Ensure we did *not* process the 9 minute-old build.
assert consumer.incoming.qsize() == 0
def test_process_waiting_module_build_none_found(
self, create_builder, global_consumer, dbg, db_session
self, create_builder, global_consumer, dbg
):
""" Test nothing happens when no module builds are waiting. """
@@ -476,12 +473,12 @@ class TestPoller:
assert consumer.incoming.qsize() == 0
# Poll :)
poller.process_waiting_module_builds(db_session)
poller.process_waiting_module_builds()
# Ensure we did *not* process any of the non-waiting builds.
assert consumer.incoming.qsize() == 0
def test_cleanup_stale_failed_builds(self, create_builder, global_consumer, dbg, db_session):
def test_cleanup_stale_failed_builds(self, create_builder, global_consumer, dbg):
""" Test that one of the two module builds gets to the garbage state when running
cleanup_stale_failed_builds.
"""
@@ -513,7 +510,7 @@ class TestPoller:
# Ensure the queue is empty before we start
assert consumer.incoming.qsize() == 0
poller.cleanup_stale_failed_builds(conf, db_session)
poller.cleanup_stale_failed_builds(conf)
db_session.refresh(module_build_two)
# Make sure module_build_one was transitioned to garbage
assert module_build_one.state == models.BUILD_STATES["garbage"]
@@ -537,7 +534,7 @@ class TestPoller:
])
def test_cleanup_stale_failed_builds_no_components(
self, create_builder, global_consumer, dbg, db_session
self, create_builder, global_consumer, dbg
):
""" Test that a module build without any components built gets to the garbage state when
running cleanup_stale_failed_builds.
@@ -566,7 +563,7 @@ class TestPoller:
# Ensure the queue is empty before we start
assert consumer.incoming.qsize() == 0
poller.cleanup_stale_failed_builds(conf, db_session)
poller.cleanup_stale_failed_builds(conf)
db_session.refresh(module_build_two)
# Make sure module_build_two was transitioned to garbage
assert module_build_two.state == models.BUILD_STATES["garbage"]
@@ -584,7 +581,7 @@ class TestPoller:
"test_state", [models.BUILD_STATES[state] for state in conf.cleanup_stuck_builds_states]
)
def test_cancel_stuck_module_builds(
self, create_builder, global_consumer, dbg, test_state, db_session
self, create_builder, global_consumer, dbg, test_state
):
module_build1 = models.ModuleBuild.get_by_id(db_session, 1)
@@ -612,7 +609,7 @@ class TestPoller:
assert consumer.incoming.qsize() == 0
poller.cancel_stuck_module_builds(conf, db_session)
poller.cancel_stuck_module_builds(conf)
module = db_session.query(models.ModuleBuild).filter_by(state=4).all()
assert len(module) == 1
@@ -623,8 +620,7 @@ class TestPoller:
@pytest.mark.parametrize("btime", (True, False))
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_sync_koji_build_tags(
self, ClientSession, create_builder, global_consumer, dbg, tagged, tagged_in_final, btime,
db_session
self, ClientSession, create_builder, global_consumer, dbg, tagged, tagged_in_final, btime
):
module_build_2 = models.ModuleBuild.get_by_id(db_session, 2)
# Only module build 1's build target should be deleted.
@@ -660,7 +656,7 @@ class TestPoller:
assert consumer.incoming.qsize() == 0
poller.sync_koji_build_tags(conf, db_session)
poller.sync_koji_build_tags(conf)
assert consumer.incoming.qsize() == len(ret)
@@ -683,7 +679,7 @@ class TestPoller:
@pytest.mark.parametrize("greenwave_result", [True, False])
@patch("module_build_service.utils.greenwave.Greenwave.check_gating")
def test_poll_greenwave(
self, mock_gw, create_builder, global_consumer, dbg, greenwave_result, db_session
self, mock_gw, create_builder, global_consumer, dbg, greenwave_result
):
module_build1 = models.ModuleBuild.get_by_id(db_session, 1)
@@ -695,7 +691,7 @@ class TestPoller:
module_build3 = models.ModuleBuild.get_by_id(db_session, 3)
module_build3.state = models.BUILD_STATES["init"]
module_build4 = make_module_in_db("foo:1:1:1", {}, db_session=db_session)
module_build4 = make_module_in_db("foo:1:1:1", {})
module_build4.state = models.BUILD_STATES["done"]
module_build4.scratch = True
@@ -711,7 +707,7 @@ class TestPoller:
mock_gw.return_value = greenwave_result
poller.poll_greenwave(conf, db_session)
poller.poll_greenwave(conf)
mock_gw.assert_called_once()
modules = models.ModuleBuild.by_state(db_session, "ready")

View File

@@ -5,6 +5,7 @@ import mock
import module_build_service.messaging
import module_build_service.scheduler.handlers.repos
import module_build_service.models
from module_build_service.db_session import db_session
from module_build_service.models import ComponentBuild
from tests import conf, scheduler_init_data
@@ -12,16 +13,15 @@ from tests import conf, scheduler_init_data
class TestRepoDone:
@mock.patch("module_build_service.models.ModuleBuild.from_repo_done_event")
def test_no_match(self, from_repo_done_event, db_session):
def test_no_match(self, from_repo_done_event):
""" Test that when a repo msg hits us and we have no match,
that we do nothing gracefully.
"""
scheduler_init_data(db_session)
scheduler_init_data()
from_repo_done_event.return_value = None
msg = module_build_service.messaging.KojiRepoChange(
"no matches for this...", "2016-some-nonexistent-build")
module_build_service.scheduler.handlers.repos.done(
config=conf, db_session=db_session, msg=msg)
module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)
@mock.patch(
"module_build_service.builder.KojiModuleBuilder."
@@ -48,18 +48,17 @@ class TestRepoDone:
"module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect"
)
def test_a_single_match(
self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea, db_session
self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea
):
""" Test that when a repo msg hits us and we have a single match.
"""
scheduler_init_data(db_session)
scheduler_init_data()
get_session.return_value = mock.Mock(), "development"
build_fn.return_value = 1234, 1, "", None
msg = module_build_service.messaging.KojiRepoChange(
"some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")
module_build_service.scheduler.handlers.repos.done(
config=conf, db_session=db_session, msg=msg)
module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)
build_fn.assert_called_once_with(
artifact_name="tangerine",
source=(
@@ -94,12 +93,11 @@ class TestRepoDone:
"module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect"
)
def test_a_single_match_finalize(
self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea, finalizer,
db_session
self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea, finalizer
):
""" Test that when a repo msg hits us and we have a single match.
"""
scheduler_init_data(db_session, tangerine_state=1)
scheduler_init_data(tangerine_state=1)
get_session.return_value = mock.Mock(), "development"
build_fn.return_value = 1234, 1, "", None
@@ -120,8 +118,7 @@ class TestRepoDone:
msg = module_build_service.messaging.KojiRepoChange(
"some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")
module_build_service.scheduler.handlers.repos.done(
config=conf, db_session=db_session, msg=msg)
module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)
finalizer.assert_called_once()
@@ -150,19 +147,18 @@ class TestRepoDone:
"module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.buildroot_connect"
)
def test_a_single_match_build_fail(
self, connect, build_fn, config, ready, list_tasks_fn, mock_gabt, mock_uea, db_session
self, connect, build_fn, config, ready, list_tasks_fn, mock_gabt, mock_uea
):
""" Test that when a KojiModuleBuilder.build fails, the build is
marked as failed with proper state_reason.
"""
scheduler_init_data(db_session)
scheduler_init_data()
config.return_value = mock.Mock(), "development"
build_fn.return_value = None, 4, "Failed to submit artifact tangerine to Koji", None
msg = module_build_service.messaging.KojiRepoChange(
"some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")
module_build_service.scheduler.handlers.repos.done(
config=conf, db_session=db_session, msg=msg)
module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)
build_fn.assert_called_once_with(
artifact_name="tangerine",
source=(
@@ -176,11 +172,11 @@ class TestRepoDone:
assert component_build.state_reason == "Failed to submit artifact tangerine to Koji"
@mock.patch("module_build_service.scheduler.handlers.repos.log.info")
def test_erroneous_regen_repo_received(self, mock_log_info, db_session):
def test_erroneous_regen_repo_received(self, mock_log_info):
""" Test that when an unexpected KojiRepoRegen message is received, the module doesn't
complete or go to the next build batch.
"""
scheduler_init_data(db_session, 1)
scheduler_init_data(1)
component_build = db_session.query(ComponentBuild).filter_by(package="tangerine").one()
component_build.tagged = False
@@ -189,8 +185,7 @@ class TestRepoDone:
msg = module_build_service.messaging.KojiRepoChange(
"some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")
module_build_service.scheduler.handlers.repos.done(
config=conf, db_session=db_session, msg=msg)
module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)
mock_log_info.assert_called_with(
"Ignoring repo regen, because not all components are tagged."
@@ -218,19 +213,18 @@ class TestRepoDone:
return_value={"build": [], "srpm-build": []},
)
def test_failed_component_build(
self, dbg, connect, build_fn, config, ready, list_tasks_fn, db_session
self, dbg, connect, build_fn, config, ready, list_tasks_fn
):
""" Test that when a KojiModuleBuilder.build fails, the build is
marked as failed with proper state_reason.
"""
scheduler_init_data(db_session, 3)
scheduler_init_data(3)
config.return_value = mock.Mock(), "development"
build_fn.return_value = None, 4, "Failed to submit artifact x to Koji", None
msg = module_build_service.messaging.KojiRepoChange(
"some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")
module_build_service.scheduler.handlers.repos.done(
config=conf, db_session=db_session, msg=msg)
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
module_build_service.scheduler.handlers.repos.done(config=conf, msg=msg)
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
assert module_build.state == module_build_service.models.BUILD_STATES["failed"]

View File

@@ -10,6 +10,7 @@ import module_build_service.scheduler.handlers.repos
import module_build_service.scheduler.handlers.tags
import module_build_service.models
from tests import conf
from module_build_service.db_session import db_session
import koji
@@ -18,7 +19,7 @@ import koji
class TestTagTagged:
@mock.patch("module_build_service.models.ModuleBuild.from_tag_change_event")
def test_no_matching_module(self, from_tag_change_event, db_session):
def test_no_matching_module(self, from_tag_change_event):
""" Test that when a tag msg hits us and we have no match,
that we do nothing gracefully.
"""
@@ -26,9 +27,9 @@ class TestTagTagged:
msg = module_build_service.messaging.KojiTagChange(
"no matches for this...", "2016-some-nonexistent-build", "artifact", "artifact-1.2-1")
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
def test_no_matching_artifact(self, db_session):
def test_no_matching_artifact(self):
""" Test that when a tag msg hits us and we have no match,
that we do nothing gracefully.
"""
@@ -39,7 +40,7 @@ class TestTagTagged:
"artifact-1.2-1",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
@patch(
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -47,7 +48,7 @@ class TestTagTagged:
)
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo(self, create_builder, koji_get_session, dbg, db_session):
def test_newrepo(self, create_builder, koji_get_session, dbg):
"""
Test that newRepo is called in the expected times.
"""
@@ -92,7 +93,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg
config=conf, msg=msg
)
# Tag the first component to the final tag.
msg = module_build_service.messaging.KojiTagChange(
@@ -102,7 +103,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg
config=conf, msg=msg
)
# newRepo should not be called, because there are still components
@@ -117,7 +118,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg
config=conf, msg=msg
)
# newRepo should not be called, because the component has not been
@@ -132,7 +133,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# newRepo should be called now - all components have been tagged.
koji_session.newRepo.assert_called_once_with(
@@ -152,7 +153,7 @@ class TestTagTagged:
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo_still_building_components(
self, create_builder, koji_get_session, dbg, db_session
self, create_builder, koji_get_session, dbg
):
"""
Test that newRepo is called in the expected times.
@@ -188,7 +189,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# Tag the perl-List-Compare component to final tag.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -197,7 +198,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# newRepo should not be called, because perl-List-Compare has not been
# built yet.
@@ -209,7 +210,7 @@ class TestTagTagged:
)
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo_failed_components(self, create_builder, koji_get_session, dbg, db_session):
def test_newrepo_failed_components(self, create_builder, koji_get_session, dbg):
"""
Test that newRepo is called in the expected times.
"""
@@ -258,7 +259,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg
config=conf, msg=msg
)
# Tag the perl-List-Compare component to final tag.
msg = module_build_service.messaging.KojiTagChange(
@@ -268,7 +269,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# newRepo should be called now - all successfully built
# components have been tagged.
@@ -289,7 +290,7 @@ class TestTagTagged:
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo_multiple_batches_tagged(
self, create_builder, koji_get_session, dbg, db_session
self, create_builder, koji_get_session, dbg
):
"""
Test that newRepo is called just once and only when all components
@@ -334,7 +335,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# Tag the first component to the final tag.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -343,7 +344,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# newRepo should not be called, because there are still components
# to tag.
@@ -357,7 +358,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# Tag the second component to final tag.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -366,7 +367,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# newRepo should not be called, because there are still components
# to tag.
@@ -380,7 +381,7 @@ class TestTagTagged:
"module-build-macros-0.1-1.module+0+b0a1d1f7",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# Tag the component from first batch to the buildroot.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -389,7 +390,7 @@ class TestTagTagged:
"module-build-macros-0.1-1.module+0+b0a1d1f7",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# newRepo should be called now - all components have been tagged.
koji_session.newRepo.assert_called_once_with(
@@ -408,7 +409,7 @@ class TestTagTagged:
)
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo_build_time_only(self, create_builder, koji_get_session, dbg, db_session):
def test_newrepo_build_time_only(self, create_builder, koji_get_session, dbg):
"""
Test the component.build_time_only is respected in tag handler.
"""
@@ -461,7 +462,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
assert not koji_session.newRepo.called
# Tag the perl-List-Compare component to the buildroot.
msg = module_build_service.messaging.KojiTagChange(
@@ -471,7 +472,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# Tag the perl-List-Compare component to final tag.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -480,7 +481,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# newRepo should be called now - all successfully built
# components have been tagged.
@@ -510,7 +511,7 @@ class TestTagTagged:
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo_not_duplicated(
self, create_builder, koji_get_session, dbg, task_state, expect_new_repo, db_session
self, create_builder, koji_get_session, dbg, task_state, expect_new_repo
):
"""
Test that newRepo is not called if a task is already in progress.
@@ -560,7 +561,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# Tag the first component to the final tag.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -569,7 +570,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# Tag the second component to the buildroot.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -578,7 +579,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# Tag the second component to the final tag.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -587,7 +588,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, db_session=db_session, msg=msg)
config=conf, msg=msg)
# All components are tagged, newRepo should be called if there are no active tasks.
if expect_new_repo:

View File

@@ -13,7 +13,7 @@ class TestGreenwaveQuery():
clean_database()
@patch("module_build_service.utils.greenwave.requests")
def test_greenwave_query_decision(self, mock_requests, db_session):
def test_greenwave_query_decision(self, mock_requests):
resp_status = 200
resp_content = {
"applicable_policies": ["osci_compose_modules"],
@@ -43,7 +43,7 @@ class TestGreenwaveQuery():
"requires": {"platform": ["el8"]},
"buildrequires": {"platform": ["el8"]},
}],
db_session=db_session)
)
got_response = greenwave.query_decision(fake_build, prod_version="xxxx-8")
assert got_response == resp_content
@@ -138,7 +138,7 @@ class TestGreenwaveQuery():
@pytest.mark.parametrize("policies_satisfied", (True, False))
@patch("module_build_service.utils.greenwave.requests")
def test_greenwave_check_gating(self, mock_requests, policies_satisfied, db_session):
def test_greenwave_check_gating(self, mock_requests, policies_satisfied):
resp_status = 200
policies_content = {
"policies": [
@@ -165,7 +165,7 @@ class TestGreenwaveQuery():
"requires": {"platform": ["el8"]},
"buildrequires": {"platform": ["el8"]},
}],
db_session=db_session)
)
result = greenwave.check_gating(fake_build)
assert result == policies_satisfied

View File

@@ -112,7 +112,7 @@ class TestGetModulemdsFromUrsineContent:
clean_database()
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_return_empty_if_no_ursine_build_tag_is_found(self, ClientSession, db_session):
def test_return_empty_if_no_ursine_build_tag_is_found(self, ClientSession):
koji_session = ClientSession.return_value
# No module koji_tag in ursine content yet. This will result in empty
@@ -123,12 +123,12 @@ class TestGetModulemdsFromUrsineContent:
"url": "http://example.com/repos/tag-4-build/latest/$arch/",
}]
modulemds = ursine.get_modulemds_from_ursine_content(db_session, "tag")
modulemds = ursine.get_modulemds_from_ursine_content("tag")
assert [] == modulemds
@patch.object(conf, "koji_tag_prefixes", new=["module"])
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_modulemds(self, ClientSession, db_session):
def test_get_modulemds(self, ClientSession):
koji_session = ClientSession.return_value
# Ensure to to get build tag for further query of ursine content.
@@ -166,15 +166,15 @@ class TestGetModulemdsFromUrsineContent:
mmd_name1s2020c = make_module_in_db(
"name1:s:2020:c",
xmd={"mbs": {"koji_tag": "module-name1-s-2020-c"}},
db_session=db_session)
)
mmd_name2s2021c = make_module_in_db(
"name2:s:2021:c",
xmd={"mbs": {"koji_tag": "module-name2-s-2021-c"}},
db_session=db_session)
)
koji_tag = "tag" # It's ok to use arbitrary tag name.
with patch.object(conf, "koji_external_repo_url_prefix", new="http://example.com/"):
modulemds = ursine.get_modulemds_from_ursine_content(db_session, koji_tag)
modulemds = ursine.get_modulemds_from_ursine_content(koji_tag)
test_nsvcs = [item.get_nsvc() for item in modulemds]
test_nsvcs.sort()
@@ -192,14 +192,14 @@ class TestRecordStreamCollisionModules:
@patch.object(conf, "base_module_names", new=["platform"])
@patch.object(ursine, "find_stream_collision_modules")
def test_nothing_changed_if_no_base_module_is_in_buildrequires(
self, find_stream_collision_modules, db_session
self, find_stream_collision_modules
):
xmd = {"mbs": {"buildrequires": {"modulea": {"stream": "master"}}}}
fake_mmd = make_module("name1:s:2020:c", xmd=xmd)
original_xmd = fake_mmd.get_xmd()
with patch.object(ursine, "log") as log:
ursine.handle_stream_collision_modules(db_session, fake_mmd)
ursine.handle_stream_collision_modules(fake_mmd)
assert 2 == log.info.call_count
find_stream_collision_modules.assert_not_called()
@@ -208,7 +208,7 @@ class TestRecordStreamCollisionModules:
@patch.object(conf, "base_module_names", new=["platform"])
@patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")
def test_mark_handled_even_if_no_modules_in_ursine_content(
self, get_modulemds_from_ursine_content, db_session
self, get_modulemds_from_ursine_content
):
xmd = {
"mbs": {
@@ -224,7 +224,7 @@ class TestRecordStreamCollisionModules:
get_modulemds_from_ursine_content.return_value = []
with patch.object(ursine, "log") as log:
ursine.handle_stream_collision_modules(db_session, fake_mmd)
ursine.handle_stream_collision_modules(fake_mmd)
assert 2 == log.info.call_count
# Ensure stream_collision_modules is set.
@@ -237,7 +237,7 @@ class TestRecordStreamCollisionModules:
@patch("module_build_service.resolver.GenericResolver.create")
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_add_collision_modules(
self, ClientSession, resolver_create, get_modulemds_from_ursine_content, db_session
self, ClientSession, resolver_create, get_modulemds_from_ursine_content
):
xmd = {
"mbs": {
@@ -255,7 +255,7 @@ class TestRecordStreamCollisionModules:
}
fake_mmd = make_module("name1:s:2020:c", xmd=xmd)
def mock_get_ursine_modulemds(db_session, koji_tag):
def mock_get_ursine_modulemds(koji_tag):
if koji_tag == "module-rhel-8.0-build":
return [
# This is the one
@@ -304,7 +304,7 @@ class TestRecordStreamCollisionModules:
koji_session = ClientSession.return_value
koji_session.listTaggedRPMS.side_effect = mock_listTaggedRPMS
ursine.handle_stream_collision_modules(db_session, fake_mmd)
ursine.handle_stream_collision_modules(fake_mmd)
xmd = fake_mmd.get_xmd()
buildrequires = xmd["mbs"]["buildrequires"]
@@ -326,13 +326,13 @@ class TestFindStreamCollisionModules:
@patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")
def test_no_modulemds_found_from_ursine_content(
self, get_modulemds_from_ursine_content, db_session
self, get_modulemds_from_ursine_content
):
get_modulemds_from_ursine_content.return_value = []
assert not ursine.find_stream_collision_modules(db_session, {}, "koji_tag")
assert not ursine.find_stream_collision_modules({}, "koji_tag")
@patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")
def test_no_collisions_found(self, get_modulemds_from_ursine_content, db_session):
def test_no_collisions_found(self, get_modulemds_from_ursine_content):
xmd_mbs_buildrequires = {"modulea": {"stream": "master"}, "moduleb": {"stream": "10"}}
get_modulemds_from_ursine_content.return_value = [
make_module("moduler:1:1:c1"),
@@ -340,10 +340,10 @@ class TestFindStreamCollisionModules:
make_module("modulet:3:1:c3"),
]
assert [] == ursine.find_stream_collision_modules(
db_session, xmd_mbs_buildrequires, "koji_tag")
xmd_mbs_buildrequires, "koji_tag")
@patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")
def test_collision_modules_are_found(self, get_modulemds_from_ursine_content, db_session):
def test_collision_modules_are_found(self, get_modulemds_from_ursine_content):
xmd_mbs_buildrequires = {"modulea": {"stream": "master"}, "moduleb": {"stream": "10"}}
fake_modules = [
make_module("moduler:1:1:c1"),
@@ -353,5 +353,5 @@ class TestFindStreamCollisionModules:
get_modulemds_from_ursine_content.return_value = fake_modules
modules = ursine.find_stream_collision_modules(
db_session, xmd_mbs_buildrequires, "koji_tag")
xmd_mbs_buildrequires, "koji_tag")
assert [fake_modules[1].get_nsvc()] == modules

View File

@@ -14,6 +14,7 @@ import module_build_service.utils
import module_build_service.scm
from module_build_service import models, conf
from module_build_service.errors import ProgrammingError, ValidationError, UnprocessableEntity
from module_build_service.utils.reuse import get_reusable_module, get_reusable_component
from module_build_service.utils.general import load_mmd
from module_build_service.utils.submit import format_mmd
from tests import (
@@ -27,6 +28,7 @@ import mock
import koji
import pytest
import module_build_service.scheduler.handlers.components
from module_build_service.db_session import db_session
from module_build_service.builder.base import GenericBuilder
from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder
from module_build_service import Modulemd
@@ -79,7 +81,7 @@ class TestUtilsComponentReuse:
@pytest.mark.parametrize(
"changed_component", ["perl-List-Compare", "perl-Tangerine", "tangerine", None]
)
def test_get_reusable_component_different_component(self, changed_component, db_session):
def test_get_reusable_component_different_component(self, changed_component):
second_module_build = models.ModuleBuild.get_by_id(db_session, 3)
if changed_component:
mmd = second_module_build.mmd()
@@ -92,12 +94,9 @@ class TestUtilsComponentReuse:
db_session.add(second_module_changed_component)
db_session.commit()
plc_rv = module_build_service.utils.get_reusable_component(
db_session, second_module_build, "perl-List-Compare")
pt_rv = module_build_service.utils.get_reusable_component(
db_session, second_module_build, "perl-Tangerine")
tangerine_rv = module_build_service.utils.get_reusable_component(
db_session, second_module_build, "tangerine")
plc_rv = get_reusable_component(second_module_build, "perl-List-Compare")
pt_rv = get_reusable_component(second_module_build, "perl-Tangerine")
tangerine_rv = get_reusable_component(second_module_build, "tangerine")
if changed_component == "perl-List-Compare":
# perl-Tangerine can be reused even though a component in its batch has changed
@@ -121,7 +120,7 @@ class TestUtilsComponentReuse:
assert pt_rv.package == "perl-Tangerine"
assert tangerine_rv.package == "tangerine"
def test_get_reusable_component_different_rpm_macros(self, db_session):
def test_get_reusable_component_different_rpm_macros(self):
second_module_build = models.ModuleBuild.get_by_id(db_session, 3)
mmd = second_module_build.mmd()
buildopts = Modulemd.Buildopts()
@@ -130,18 +129,16 @@ class TestUtilsComponentReuse:
second_module_build.modulemd = mmd_to_str(mmd)
db_session.commit()
plc_rv = module_build_service.utils.get_reusable_component(
db_session, second_module_build, "perl-List-Compare")
plc_rv = get_reusable_component(second_module_build, "perl-List-Compare")
assert plc_rv is None
pt_rv = module_build_service.utils.get_reusable_component(
db_session, second_module_build, "perl-Tangerine")
pt_rv = get_reusable_component(second_module_build, "perl-Tangerine")
assert pt_rv is None
@pytest.mark.parametrize("set_current_arch", [True, False])
@pytest.mark.parametrize("set_database_arch", [True, False])
def test_get_reusable_component_different_arches(
self, set_database_arch, set_current_arch, db_session
self, set_database_arch, set_current_arch
):
second_module_build = models.ModuleBuild.get_by_id(db_session, 3)
@@ -163,14 +160,11 @@ class TestUtilsComponentReuse:
second_module_changed_component.module_build.modulemd = mmd_to_str(mmd)
db_session.commit()
tangerine = module_build_service.utils.get_reusable_component(
db_session, second_module_build, "tangerine")
tangerine = get_reusable_component(second_module_build, "tangerine")
assert bool(tangerine is None) != bool(set_current_arch == set_database_arch)
@pytest.mark.parametrize("rebuild_strategy", models.ModuleBuild.rebuild_strategies.keys())
def test_get_reusable_component_different_buildrequires_stream(
self, rebuild_strategy, db_session
):
def test_get_reusable_component_different_buildrequires_stream(self, rebuild_strategy):
first_module_build = models.ModuleBuild.get_by_id(db_session, 2)
first_module_build.rebuild_strategy = rebuild_strategy
db_session.commit()
@@ -194,18 +188,15 @@ class TestUtilsComponentReuse:
second_module_build.rebuild_strategy = rebuild_strategy
db_session.commit()
plc_rv = module_build_service.utils.get_reusable_component(
db_session, second_module_build, "perl-List-Compare")
pt_rv = module_build_service.utils.get_reusable_component(
db_session, second_module_build, "perl-Tangerine")
tangerine_rv = module_build_service.utils.get_reusable_component(
db_session, second_module_build, "tangerine")
plc_rv = get_reusable_component(second_module_build, "perl-List-Compare")
pt_rv = get_reusable_component(second_module_build, "perl-Tangerine")
tangerine_rv = get_reusable_component(second_module_build, "tangerine")
assert plc_rv is None
assert pt_rv is None
assert tangerine_rv is None
def test_get_reusable_component_different_buildrequires(self, db_session):
def test_get_reusable_component_different_buildrequires(self):
second_module_build = models.ModuleBuild.get_by_id(db_session, 3)
mmd = second_module_build.mmd()
mmd.get_dependencies()[0].add_buildtime_stream("some_module", "master")
@@ -223,20 +214,17 @@ class TestUtilsComponentReuse:
xmd["mbs"]["buildrequires"])
db_session.commit()
plc_rv = module_build_service.utils.get_reusable_component(
db_session, second_module_build, "perl-List-Compare")
plc_rv = get_reusable_component(second_module_build, "perl-List-Compare")
assert plc_rv is None
pt_rv = module_build_service.utils.get_reusable_component(
db_session, second_module_build, "perl-Tangerine")
pt_rv = get_reusable_component(second_module_build, "perl-Tangerine")
assert pt_rv is None
tangerine_rv = module_build_service.utils.get_reusable_component(
db_session, second_module_build, "tangerine")
tangerine_rv = get_reusable_component(second_module_build, "tangerine")
assert tangerine_rv is None
@patch("module_build_service.utils.submit.submit_module_build")
def test_submit_module_build_from_yaml_with_skiptests(self, mock_submit, db_session):
def test_submit_module_build_from_yaml_with_skiptests(self, mock_submit):
"""
Tests local module build from a yaml file with the skiptests option
@@ -277,22 +265,22 @@ class TestUtils:
clean_database()
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_build_arches(self, ClientSession, db_session):
def test_get_build_arches(self, ClientSession):
session = ClientSession.return_value
session.getTag.return_value = {"arches": "ppc64le"}
mmd = load_mmd(read_staged_data("formatted_testmodule"))
r = module_build_service.utils.get_build_arches(db_session, mmd, conf)
r = module_build_service.utils.get_build_arches(mmd, conf)
assert r == ["ppc64le"]
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_build_arches_no_arch_set(self, ClientSession, db_session):
def test_get_build_arches_no_arch_set(self, ClientSession):
"""
When no architecture is set in Koji tag, fallback to conf.arches.
"""
session = ClientSession.return_value
session.getTag.return_value = {"arches": ""}
mmd = load_mmd(read_staged_data("formatted_testmodule"))
r = module_build_service.utils.get_build_arches(db_session, mmd, conf)
r = module_build_service.utils.get_build_arches(mmd, conf)
assert set(r) == set(conf.arches)
@patch(
@@ -300,17 +288,17 @@ class TestUtils:
new_callable=mock.PropertyMock,
return_value=["testmodule"],
)
def test_get_build_arches_koji_tag_arches(self, cfg, db_session):
def test_get_build_arches_koji_tag_arches(self, cfg):
mmd = load_mmd(read_staged_data("formatted_testmodule"))
xmd = mmd.get_xmd()
xmd["mbs"]["koji_tag_arches"] = ["ppc64", "ppc64le"]
mmd.set_xmd(xmd)
r = module_build_service.utils.get_build_arches(db_session, mmd, conf)
r = module_build_service.utils.get_build_arches(mmd, conf)
assert r == ["ppc64", "ppc64le"]
@patch.object(conf, "base_module_arches", new={"platform:xx": ["x86_64", "i686"]})
def test_get_build_arches_base_module_override(self, db_session):
def test_get_build_arches_base_module_override(self):
mmd = load_mmd(read_staged_data("formatted_testmodule"))
xmd = mmd.get_xmd()
mbs_options = xmd["mbs"] if "mbs" in xmd.keys() else {}
@@ -318,11 +306,11 @@ class TestUtils:
xmd["mbs"] = mbs_options
mmd.set_xmd(xmd)
r = module_build_service.utils.get_build_arches(db_session, mmd, conf)
r = module_build_service.utils.get_build_arches(mmd, conf)
assert r == ["x86_64", "i686"]
@pytest.mark.parametrize("context", ["c1", None])
def test_import_mmd_contexts(self, context, db_session):
def test_import_mmd_contexts(self, context):
mmd = load_mmd(read_staged_data("formatted_testmodule"))
mmd.set_context(context)
@@ -340,7 +328,7 @@ class TestUtils:
assert mmd_context == models.DEFAULT_MODULE_CONTEXT
assert build.context == models.DEFAULT_MODULE_CONTEXT
def test_import_mmd_multiple_dependencies(self, db_session):
def test_import_mmd_multiple_dependencies(self):
mmd = load_mmd(read_staged_data("formatted_testmodule"))
mmd.add_dependencies(mmd.get_dependencies()[0].copy())
@@ -349,7 +337,7 @@ class TestUtils:
module_build_service.utils.import_mmd(db_session, mmd)
assert str(e.value) == expected_error
def test_import_mmd_no_xmd_buildrequires(self, db_session):
def test_import_mmd_no_xmd_buildrequires(self):
mmd = load_mmd(read_staged_data("formatted_testmodule"))
xmd = mmd.get_xmd()
del xmd["mbs"]["buildrequires"]
@@ -363,7 +351,7 @@ class TestUtils:
module_build_service.utils.import_mmd(db_session, mmd)
assert str(e.value) == expected_error
def test_import_mmd_minimal_xmd_from_local_repository(self, db_session):
def test_import_mmd_minimal_xmd_from_local_repository(self):
mmd = load_mmd(read_staged_data("formatted_testmodule"))
xmd = mmd.get_xmd()
xmd["mbs"] = {}
@@ -386,7 +374,7 @@ class TestUtils:
("f-28", "fedora-28", "The disttag_marking cannot contain a dash"),
),
)
def test_import_mmd_base_module(self, stream, disttag_marking, error_msg, db_session):
def test_import_mmd_base_module(self, stream, disttag_marking, error_msg):
clean_database(add_platform_module=False)
mmd = load_mmd(read_staged_data("platform"))
mmd = mmd.copy(mmd.get_module_name(), stream)
@@ -402,7 +390,7 @@ class TestUtils:
else:
module_build_service.utils.import_mmd(db_session, mmd)
def test_import_mmd_remove_dropped_virtual_streams(self, db_session):
def test_import_mmd_remove_dropped_virtual_streams(self):
mmd = load_mmd(read_staged_data("formatted_testmodule"))
# Add some virtual streams
@@ -425,9 +413,7 @@ class TestUtils:
assert ["f28", "f29"] == sorted(item.name for item in module_build.virtual_streams)
assert 0 == db_session.query(models.VirtualStream).filter_by(name="f30").count()
def test_import_mmd_dont_remove_dropped_virtual_streams_associated_with_other_modules(
self, db_session
):
def test_import_mmd_dont_remove_dropped_virtual_streams_associated_with_other_modules(self):
mmd = load_mmd(read_staged_data("formatted_testmodule"))
# Add some virtual streams to this module metadata
xmd = mmd.get_xmd()
@@ -459,7 +445,7 @@ class TestUtils:
db_session.refresh(another_module_build)
assert ["f29", "f30"] == sorted(item.name for item in another_module_build.virtual_streams)
def test_get_rpm_release_mse(self, db_session):
def test_get_rpm_release_mse(self):
init_data(contexts=True)
build_one = models.ModuleBuild.get_by_id(db_session, 2)
@@ -470,14 +456,14 @@ class TestUtils:
release_two = module_build_service.utils.get_rpm_release(db_session, build_two)
assert release_two == "module+2+17e35784"
def test_get_rpm_release_platform_stream(self, db_session):
scheduler_init_data(db_session, 1)
def test_get_rpm_release_platform_stream(self):
scheduler_init_data(1)
build_one = models.ModuleBuild.get_by_id(db_session, 2)
release = module_build_service.utils.get_rpm_release(db_session, build_one)
assert release == "module+f28+2+814cfa39"
def test_get_rpm_release_platform_stream_override(self, db_session):
scheduler_init_data(db_session, 1)
def test_get_rpm_release_platform_stream_override(self):
scheduler_init_data(1)
# Set the disttag_marking override on the platform
platform = (
@@ -502,12 +488,12 @@ class TestUtils:
new_callable=mock.PropertyMock,
return_value=["build"],
)
def test_get_rpm_release_metadata_br_stream_override(self, mock_admmn, db_session):
def test_get_rpm_release_metadata_br_stream_override(self, mock_admmn):
"""
Test that when a module buildrequires a module in conf.allowed_privileged_module_names,
and that module has the xmd.mbs.disttag_marking field set, it should influence the disttag.
"""
scheduler_init_data(db_session, 1)
scheduler_init_data(1)
metadata_mmd = load_mmd(read_staged_data("build_metadata_module"))
module_build_service.utils.import_mmd(db_session, metadata_mmd)
@@ -531,7 +517,7 @@ class TestUtils:
release = module_build_service.utils.get_rpm_release(db_session, build_one)
assert release == "module+product12+2+814cfa39"
def test_get_rpm_release_mse_scratch(self, db_session):
def test_get_rpm_release_mse_scratch(self):
init_data(contexts=True, scratch=True)
build_one = models.ModuleBuild.get_by_id(db_session, 2)
@@ -542,19 +528,19 @@ class TestUtils:
release_two = module_build_service.utils.get_rpm_release(db_session, build_two)
assert release_two == "scrmod+2+17e35784"
def test_get_rpm_release_platform_stream_scratch(self, db_session):
scheduler_init_data(db_session, 1, scratch=True)
def test_get_rpm_release_platform_stream_scratch(self):
scheduler_init_data(1, scratch=True)
build_one = models.ModuleBuild.get_by_id(db_session, 2)
release = module_build_service.utils.get_rpm_release(db_session, build_one)
assert release == "scrmod+f28+2+814cfa39"
@patch("module_build_service.utils.submit.get_build_arches")
def test_record_module_build_arches(self, get_build_arches, db_session):
def test_record_module_build_arches(self, get_build_arches):
get_build_arches.return_value = ["x86_64", "i686"]
scheduler_init_data(db_session, 1)
scheduler_init_data(1)
build = models.ModuleBuild.get_by_id(db_session, 2)
build.arches = []
module_build_service.utils.record_module_build_arches(build.mmd(), build, db_session)
module_build_service.utils.record_module_build_arches(build.mmd(), build)
arches = {arch.name for arch in build.arches}
assert arches == set(get_build_arches.return_value)
@@ -620,7 +606,7 @@ class TestUtils:
assert mmd_xmd == xmd
@pytest.mark.usefixtures("reuse_shared_userspace_init_data")
def test_get_reusable_component_shared_userspace_ordering(self, db_session):
def test_get_reusable_component_shared_userspace_ordering(self):
"""
For modules with lot of components per batch, there is big chance that
the database will return them in different order than what we have for
@@ -629,9 +615,7 @@ class TestUtils:
"""
old_module = models.ModuleBuild.get_by_id(db_session, 2)
new_module = models.ModuleBuild.get_by_id(db_session, 3)
rv = module_build_service.utils.get_reusable_component(
db_session, new_module, "llvm", previous_module_build=old_module
)
rv = get_reusable_component(new_module, "llvm", previous_module_build=old_module)
assert rv.package == "llvm"
def test_validate_koji_tag_wrong_tag_arg_during_programming(self):
@@ -754,9 +738,7 @@ class TestUtils:
validate_koji_tag_priv_mod_name(builder, "abc")
@patch("module_build_service.scm.SCM")
def test_record_component_builds_duplicate_components(self, mocked_scm, db_session):
clean_database()
def test_record_component_builds_duplicate_components(self, mocked_scm):
# Mock for format_mmd to get components' latest ref
mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4"
mocked_scm.return_value.get_latest.side_effect = [
@@ -795,14 +777,12 @@ class TestUtils:
format_mmd(mmd, module_build.scmurl)
with pytest.raises(UnprocessableEntity) as e:
module_build_service.utils.record_component_builds(
db_session, mmd, module_build, main_mmd=module_build.mmd())
mmd, module_build, main_mmd=module_build.mmd())
assert str(e.value) == error_msg
@patch("module_build_service.scm.SCM")
def test_record_component_builds_set_weight(self, mocked_scm, db_session):
clean_database()
def test_record_component_builds_set_weight(self, mocked_scm):
# Mock for format_mmd to get components' latest ref
mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4"
mocked_scm.return_value.get_latest.side_effect = [
@@ -833,7 +813,8 @@ class TestUtils:
db_session.commit()
format_mmd(mmd, module_build.scmurl)
module_build_service.utils.record_component_builds(db_session, mmd, module_build)
module_build_service.utils.record_component_builds(mmd, module_build)
db_session.commit()
assert module_build.state == models.BUILD_STATES["init"]
db_session.refresh(module_build)
@@ -841,8 +822,7 @@ class TestUtils:
assert c.weight == 1.5
@patch("module_build_service.scm.SCM")
def test_record_component_builds_component_exists_already(self, mocked_scm, db_session):
clean_database()
def test_record_component_builds_component_exists_already(self, mocked_scm):
mocked_scm.return_value.commit = "620ec77321b2ea7b0d67d82992dda3e1d67055b4"
mocked_scm.return_value.get_latest.side_effect = [
"4ceea43add2366d8b8c5a622a2fb563b625b9abf",
@@ -878,7 +858,8 @@ class TestUtils:
db_session.commit()
format_mmd(mmd, module_build.scmurl)
module_build_service.utils.record_component_builds(db_session, mmd, module_build)
module_build_service.utils.record_component_builds(mmd, module_build)
db_session.commit()
mmd = original_mmd.copy("testmodule", "master")
@@ -887,7 +868,7 @@ class TestUtils:
ValidationError,
match=r"Component build .+ of module build .+ already exists in database"):
format_mmd(mmd, module_build.scmurl)
module_build_service.utils.record_component_builds(db_session, mmd, module_build)
module_build_service.utils.record_component_builds(mmd, module_build)
@patch("module_build_service.scm.SCM")
def test_format_mmd_arches(self, mocked_scm):
@@ -929,7 +910,7 @@ class TestUtils:
@patch("module_build_service.scm.SCM")
@patch("module_build_service.utils.submit.ThreadPool")
def test_format_mmd_update_time_modified(self, tp, mocked_scm, db_session):
def test_format_mmd_update_time_modified(self, tp, mocked_scm):
init_data()
build = models.ModuleBuild.get_by_id(db_session, 2)
@@ -989,14 +970,14 @@ class TestUtils:
is_eol = module_build_service.utils.submit._is_eol_in_pdc("mariadb", "10.1")
assert is_eol
def test_get_prefixed_version_f28(self, db_session):
scheduler_init_data(db_session, 1)
def test_get_prefixed_version_f28(self):
scheduler_init_data(1)
build_one = models.ModuleBuild.get_by_id(db_session, 2)
v = module_build_service.utils.submit.get_prefixed_version(build_one.mmd())
assert v == 2820180205135154
def test_get_prefixed_version_fl701(self, db_session):
scheduler_init_data(db_session, 1)
def test_get_prefixed_version_fl701(self):
scheduler_init_data(1)
build_one = models.ModuleBuild.get_by_id(db_session, 2)
mmd = build_one.mmd()
xmd = mmd.get_xmd()
@@ -1006,13 +987,13 @@ class TestUtils:
assert v == 7000120180205135154
@patch("module_build_service.utils.mse.generate_expanded_mmds")
def test_submit_build_new_mse_build(self, generate_expanded_mmds, db_session):
def test_submit_build_new_mse_build(self, generate_expanded_mmds):
"""
Tests that finished build can be resubmitted in case the resubmitted
build adds new MSE build (it means there are new expanded
buildrequires).
"""
build = make_module_in_db("foo:stream:0:c1", db_session=db_session)
build = make_module_in_db("foo:stream:0:c1")
assert build.state == models.BUILD_STATES["ready"]
mmd1 = build.mmd()
@@ -1037,8 +1018,7 @@ class TestUtils:
new_callable=mock.PropertyMock,
return_value=["^private-.*"],
)
def test_submit_build_scratch_build_only_branches(
self, cfg, generate_expanded_mmds, db_session):
def test_submit_build_scratch_build_only_branches(self, cfg, generate_expanded_mmds):
"""
Tests the "scratch_build_only_branches" config option.
"""
@@ -1143,7 +1123,7 @@ class TestBatches:
DummyModuleBuilder.TAGGED_COMPONENTS = []
GenericBuilder.register_backend_class(KojiModuleBuilder)
def test_start_next_batch_build_reuse(self, default_buildroot_groups, db_session):
def test_start_next_batch_build_reuse(self, default_buildroot_groups):
"""
Tests that start_next_batch_build:
1) Increments module.batch.
@@ -1158,7 +1138,7 @@ class TestBatches:
builder = mock.MagicMock()
further_work = module_build_service.utils.start_next_batch_build(
conf, module_build, db_session, builder)
conf, module_build, builder)
# Batch number should increase.
assert module_build.batch == 2
@@ -1177,7 +1157,7 @@ class TestBatches:
# the components just once.
for msg in further_work:
if type(msg) == module_build_service.messaging.KojiBuildChange:
module_build_service.scheduler.handlers.components.complete(conf, db_session, msg)
module_build_service.scheduler.handlers.components.complete(conf, msg)
# Since we have reused all the components in the batch, there should
# be fake KojiRepoChange message.
@@ -1188,7 +1168,7 @@ class TestBatches:
@patch("module_build_service.utils.batches.start_build_component")
def test_start_next_batch_build_reuse_some(
self, mock_sbc, default_buildroot_groups, db_session
self, mock_sbc, default_buildroot_groups
):
"""
Tests that start_next_batch_build:
@@ -1201,6 +1181,7 @@ class TestBatches:
"""
module_build = models.ModuleBuild.get_by_id(db_session, 3)
module_build.batch = 1
plc_component = models.ComponentBuild.from_component_name(
db_session, "perl-List-Compare", 3)
plc_component.ref = "5ceea46add2366d8b8c5a623a2fb563b625b9abd"
@@ -1209,7 +1190,7 @@ class TestBatches:
builder.recover_orphaned_artifact.return_value = []
further_work = module_build_service.utils.start_next_batch_build(
conf, module_build, db_session, builder)
conf, module_build, builder)
# Batch number should increase.
assert module_build.batch == 2
@@ -1237,7 +1218,7 @@ class TestBatches:
return_value="all",
)
def test_start_next_batch_build_rebuild_strategy_all(
self, mock_rm, mock_sbc, default_buildroot_groups, db_session
self, mock_rm, mock_sbc, default_buildroot_groups
):
"""
Tests that start_next_batch_build can't reuse any components in the batch because the
@@ -1250,7 +1231,7 @@ class TestBatches:
builder = mock.MagicMock()
builder.recover_orphaned_artifact.return_value = []
further_work = module_build_service.utils.start_next_batch_build(
conf, module_build, db_session, builder)
conf, module_build, builder)
# Batch number should increase.
assert module_build.batch == 2
@@ -1259,7 +1240,7 @@ class TestBatches:
# Make sure that both components in the batch were submitted
assert len(mock_sbc.mock_calls) == 2
def test_start_build_component_failed_state(self, default_buildroot_groups, db_session):
def test_start_build_component_failed_state(self, default_buildroot_groups):
"""
Tests whether exception occured while building sets the state to failed
"""
@@ -1278,7 +1259,7 @@ class TestBatches:
return_value="only-changed",
)
def test_start_next_batch_build_rebuild_strategy_only_changed(
self, mock_rm, mock_sbc, default_buildroot_groups, db_session
self, mock_rm, mock_sbc, default_buildroot_groups
):
"""
Tests that start_next_batch_build reuses all unchanged components in the batch because the
@@ -1297,7 +1278,7 @@ class TestBatches:
builder = mock.MagicMock()
builder.recover_orphaned_artifact.return_value = []
further_work = module_build_service.utils.start_next_batch_build(
conf, module_build, db_session, builder)
conf, module_build, builder)
# Batch number should increase
assert module_build.batch == 2
@@ -1327,7 +1308,7 @@ class TestBatches:
# Start the next build batch
further_work = module_build_service.utils.start_next_batch_build(
conf, module_build, db_session, builder)
conf, module_build, builder)
# Batch number should increase
assert module_build.batch == 3
# Verify that tangerine was reused even though perl-Tangerine was rebuilt in the previous
@@ -1341,7 +1322,7 @@ class TestBatches:
@patch("module_build_service.utils.batches.start_build_component")
def test_start_next_batch_build_smart_scheduling(
self, mock_sbc, default_buildroot_groups, db_session
self, mock_sbc, default_buildroot_groups
):
"""
Tests that components with the longest build time will be scheduled first
@@ -1363,7 +1344,7 @@ class TestBatches:
builder = mock.MagicMock()
builder.recover_orphaned_artifact.return_value = []
further_work = module_build_service.utils.start_next_batch_build(
conf, module_build, db_session, builder)
conf, module_build, builder)
# Batch number should increase.
assert module_build.batch == 2
@@ -1384,7 +1365,7 @@ class TestBatches:
assert mock_sbc.mock_calls == expected_calls
@patch("module_build_service.utils.batches.start_build_component")
def test_start_next_batch_continue(self, mock_sbc, default_buildroot_groups, db_session):
def test_start_next_batch_continue(self, mock_sbc, default_buildroot_groups):
"""
Tests that start_next_batch_build does not start new batch when
there are unbuilt components in the current one.
@@ -1399,7 +1380,7 @@ class TestBatches:
builder = mock.MagicMock()
further_work = module_build_service.utils.start_next_batch_build(
conf, module_build, db_session, builder)
conf, module_build, builder)
# Batch number should not increase.
assert module_build.batch == 2
@@ -1408,13 +1389,14 @@ class TestBatches:
# No further work should be returned
assert len(further_work) == 0
def test_start_next_batch_build_repo_building(self, default_buildroot_groups, db_session):
def test_start_next_batch_build_repo_building(self, default_buildroot_groups):
"""
Test that start_next_batch_build does not start new batch when
builder.buildroot_ready() returns False.
"""
module_build = models.ModuleBuild.get_by_id(db_session, 3)
module_build.batch = 1
db_session.commit()
builder = mock.MagicMock()
builder.buildroot_ready.return_value = False
@@ -1438,16 +1420,16 @@ class TestLocalBuilds:
def teardown_method(self):
clean_database()
def test_load_local_builds_name(self, conf_system, conf_resultsdir, db_session):
module_build_service.utils.load_local_builds(db_session, "testmodule")
def test_load_local_builds_name(self, conf_system, conf_resultsdir):
module_build_service.utils.load_local_builds("testmodule")
local_modules = models.ModuleBuild.local_modules(db_session)
assert len(local_modules) == 1
assert local_modules[0].koji_tag.endswith(
"/module-testmodule-master-20170816080816/results")
def test_load_local_builds_name_stream(self, conf_system, conf_resultsdir, db_session):
module_build_service.utils.load_local_builds(db_session, "testmodule:master")
def test_load_local_builds_name_stream(self, conf_system, conf_resultsdir):
module_build_service.utils.load_local_builds("testmodule:master")
local_modules = models.ModuleBuild.local_modules(db_session)
assert len(local_modules) == 1
@@ -1455,14 +1437,14 @@ class TestLocalBuilds:
"/module-testmodule-master-20170816080816/results")
def test_load_local_builds_name_stream_non_existing(
self, conf_system, conf_resultsdir, db_session
self, conf_system, conf_resultsdir
):
with pytest.raises(RuntimeError):
module_build_service.utils.load_local_builds(db_session, "testmodule:x")
module_build_service.utils.load_local_builds("testmodule:x")
models.ModuleBuild.local_modules(db_session)
def test_load_local_builds_name_stream_version(self, conf_system, conf_resultsdir, db_session):
module_build_service.utils.load_local_builds(db_session, "testmodule:master:20170816080815")
def test_load_local_builds_name_stream_version(self, conf_system, conf_resultsdir):
module_build_service.utils.load_local_builds("testmodule:master:20170816080815")
local_modules = models.ModuleBuild.local_modules(db_session)
assert len(local_modules) == 1
@@ -1470,21 +1452,21 @@ class TestLocalBuilds:
"/module-testmodule-master-20170816080815/results")
def test_load_local_builds_name_stream_version_non_existing(
self, conf_system, conf_resultsdir, db_session
self, conf_system, conf_resultsdir
):
with pytest.raises(RuntimeError):
module_build_service.utils.load_local_builds(db_session, "testmodule:master:123")
module_build_service.utils.load_local_builds("testmodule:master:123")
models.ModuleBuild.local_modules(db_session)
def test_load_local_builds_platform(self, conf_system, conf_resultsdir, db_session):
module_build_service.utils.load_local_builds(db_session, "platform")
def test_load_local_builds_platform(self, conf_system, conf_resultsdir):
module_build_service.utils.load_local_builds("platform")
local_modules = models.ModuleBuild.local_modules(db_session)
assert len(local_modules) == 1
assert local_modules[0].koji_tag.endswith("/module-platform-f28-3/results")
def test_load_local_builds_platform_f28(self, conf_system, conf_resultsdir, db_session):
module_build_service.utils.load_local_builds(db_session, "platform:f28")
def test_load_local_builds_platform_f28(self, conf_system, conf_resultsdir):
module_build_service.utils.load_local_builds("platform:f28")
local_modules = models.ModuleBuild.local_modules(db_session)
assert len(local_modules) == 1
@@ -1498,8 +1480,8 @@ class TestOfflineLocalBuilds:
def teardown_method(self):
clean_database()
def test_import_fake_base_module(self, db_session):
module_build_service.utils.import_fake_base_module(db_session, "platform:foo:1:000000")
def test_import_fake_base_module(self):
module_build_service.utils.import_fake_base_module("platform:foo:1:000000")
module_build = models.ModuleBuild.get_build_from_nsvc(
db_session, "platform", "foo", 1, "000000")
assert module_build
@@ -1519,7 +1501,7 @@ class TestOfflineLocalBuilds:
assert set(mmd.get_profile_names()) == {"buildroot", "srpm-buildroot"}
@patch("module_build_service.utils.general.open", create=True, new_callable=mock.mock_open)
def test_import_builds_from_local_dnf_repos(self, patched_open, db_session):
def test_import_builds_from_local_dnf_repos(self, patched_open):
with patch("dnf.Base") as dnf_base:
repo = mock.MagicMock()
repo.repofile = "/etc/yum.repos.d/foo.repo"
@@ -1529,7 +1511,7 @@ class TestOfflineLocalBuilds:
base.repos = {"reponame": repo}
patched_open.return_value.readlines.return_value = ("FOO=bar", "PLATFORM_ID=platform:x")
module_build_service.utils.import_builds_from_local_dnf_repos(db_session)
module_build_service.utils.import_builds_from_local_dnf_repos()
base.read_all_repos.assert_called_once()
repo.load.assert_called_once()
@@ -1544,10 +1526,9 @@ class TestOfflineLocalBuilds:
db_session, "platform", "x", 1, "000000")
assert module_build
def test_import_builds_from_local_dnf_repos_platform_id(self, db_session):
def test_import_builds_from_local_dnf_repos_platform_id(self):
with patch("dnf.Base"):
module_build_service.utils.import_builds_from_local_dnf_repos(
db_session, platform_id="platform:y")
module_build_service.utils.import_builds_from_local_dnf_repos("platform:y")
module_build = models.ModuleBuild.get_build_from_nsvc(
db_session, "platform", "y", 1, "000000")
@@ -1557,7 +1538,7 @@ class TestOfflineLocalBuilds:
@pytest.mark.usefixtures("reuse_component_init_data")
class TestUtilsModuleReuse:
def test_get_reusable_module_when_reused_module_not_set(self, db_session):
def test_get_reusable_module_when_reused_module_not_set(self):
module = db_session.query(models.ModuleBuild)\
.filter_by(name="testmodule")\
.order_by(models.ModuleBuild.id.desc())\
@@ -1567,13 +1548,12 @@ class TestUtilsModuleReuse:
assert not module.reused_module
reusable_module = module_build_service.utils.get_reusable_module(
db_session, module)
reusable_module = get_reusable_module(module)
assert module.reused_module
assert reusable_module.id == module.reused_module_id
def test_get_reusable_module_when_reused_module_already_set(self, db_session):
def test_get_reusable_module_when_reused_module_already_set(self):
modules = db_session.query(models.ModuleBuild)\
.filter_by(name="testmodule")\
.order_by(models.ModuleBuild.id.desc())\
@@ -1587,8 +1567,7 @@ class TestUtilsModuleReuse:
assert build_module.reused_module
assert reused_module == build_module.reused_module
reusable_module = module_build_service.utils.get_reusable_module(
db_session, build_module)
reusable_module = get_reusable_module(build_module)
assert build_module.reused_module
assert reusable_module.id == build_module.reused_module_id
@@ -1599,7 +1578,7 @@ class TestUtilsModuleReuse:
"module_build_service.config.Config.allow_only_compatible_base_modules",
new_callable=mock.PropertyMock,
)
def test_get_reusable_module_use_latest_build(self, cfg, db_session, allow_ocbm):
def test_get_reusable_module_use_latest_build(self, cfg, allow_ocbm):
"""
Test that the `get_reusable_module` tries to reuse the latest module in case when
multiple modules can be reused allow_only_compatible_base_modules is True.
@@ -1654,8 +1633,7 @@ class TestUtilsModuleReuse:
.one()
db_session.commit()
reusable_module = module_build_service.utils.get_reusable_module(
db_session, module)
reusable_module = get_reusable_module(module)
if allow_ocbm:
assert reusable_module.id == latest_module.id
@@ -1675,7 +1653,7 @@ class TestUtilsModuleReuse:
new_callable=mock.PropertyMock, return_value="koji"
)
def test_get_reusable_module_koji_resolver(
self, resolver, ClientSession, cfg, db_session, allow_ocbm):
self, resolver, ClientSession, cfg, allow_ocbm):
"""
Test that get_reusable_module works with KojiResolver.
"""
@@ -1740,7 +1718,6 @@ class TestUtilsModuleReuse:
.filter_by(state=models.BUILD_STATES["build"])\
.one()
reusable_module = module_build_service.utils.get_reusable_module(
db_session, module)
reusable_module = get_reusable_module(module)
assert reusable_module.id == latest_module.id

View File

@@ -5,6 +5,7 @@ import pytest
import module_build_service.utils
from module_build_service import Modulemd, models
from module_build_service.db_session import db_session
from module_build_service.errors import StreamAmbigous
from tests import clean_database, make_module_in_db, init_data, read_staged_data
@@ -29,13 +30,13 @@ class TestUtilsModuleStreamExpansion:
nsvcs = [m.get_nsvc() for m in modules]
return nsvcs
def _generate_default_modules(self, db_session):
def _generate_default_modules(self):
"""
Generates gtk:1, gtk:2, foo:1 and foo:2 modules requiring the
platform:f28 and platform:f29 modules.
"""
platform_f28 = make_module_in_db("platform:f28:0:c10", db_session=db_session)
platform_f29 = make_module_in_db("platform:f29:0:c11", db_session=db_session)
platform_f28 = make_module_in_db("platform:f28:0:c10")
platform_f29 = make_module_in_db("platform:f29:0:c11")
f28_deps = [{
"requires": {"platform": ["f28"]},
"buildrequires": {"platform": ["f28"]},
@@ -44,18 +45,18 @@ class TestUtilsModuleStreamExpansion:
"requires": {"platform": ["f29"]},
"buildrequires": {"platform": ["f29"]},
}]
make_module_in_db("gtk:1:0:c2", f28_deps, base_module=platform_f28, db_session=db_session)
make_module_in_db("gtk:1:0:c3", f29_deps, base_module=platform_f29, db_session=db_session)
make_module_in_db("gtk:2:0:c4", f28_deps, base_module=platform_f28, db_session=db_session)
make_module_in_db("gtk:2:0:c5", f29_deps, base_module=platform_f29, db_session=db_session)
make_module_in_db("foo:1:0:c2", f28_deps, base_module=platform_f28, db_session=db_session)
make_module_in_db("foo:1:0:c3", f29_deps, base_module=platform_f29, db_session=db_session)
make_module_in_db("foo:2:0:c4", f28_deps, base_module=platform_f28, db_session=db_session)
make_module_in_db("foo:2:0:c5", f29_deps, base_module=platform_f29, db_session=db_session)
make_module_in_db("app:1:0:c6", f29_deps, base_module=platform_f29, db_session=db_session)
make_module_in_db("gtk:1:0:c2", f28_deps, base_module=platform_f28)
make_module_in_db("gtk:1:0:c3", f29_deps, base_module=platform_f29)
make_module_in_db("gtk:2:0:c4", f28_deps, base_module=platform_f28)
make_module_in_db("gtk:2:0:c5", f29_deps, base_module=platform_f29)
make_module_in_db("foo:1:0:c2", f28_deps, base_module=platform_f28)
make_module_in_db("foo:1:0:c3", f29_deps, base_module=platform_f29)
make_module_in_db("foo:2:0:c4", f28_deps, base_module=platform_f28)
make_module_in_db("foo:2:0:c5", f29_deps, base_module=platform_f29)
make_module_in_db("app:1:0:c6", f29_deps, base_module=platform_f29)
def test_generate_expanded_mmds_context(self, db_session):
self._generate_default_modules(db_session)
def test_generate_expanded_mmds_context(self):
self._generate_default_modules()
module_build = make_module_in_db(
"app:1:0:c1", [{
"requires": {"gtk": ["1", "2"]},
@@ -64,7 +65,7 @@ class TestUtilsModuleStreamExpansion:
"gtk": ["1", "2"]
},
}],
db_session=db_session)
)
mmds = module_build_service.utils.generate_expanded_mmds(
db_session, module_build.mmd())
contexts = {mmd.get_context() for mmd in mmds}
@@ -157,10 +158,10 @@ class TestUtilsModuleStreamExpansion:
],
)
def test_generate_expanded_mmds_buildrequires(
self, module_deps, stream_ambigous, expected_xmd, expected_buildrequires, db_session
self, module_deps, stream_ambigous, expected_xmd, expected_buildrequires
):
self._generate_default_modules(db_session)
module_build = make_module_in_db("app:1:0:c1", module_deps, db_session=db_session)
self._generate_default_modules()
module_build = make_module_in_db("app:1:0:c1", module_deps)
# Check that generate_expanded_mmds raises an exception if stream is ambigous
# and also that it does not raise an exception otherwise.
@@ -250,9 +251,9 @@ class TestUtilsModuleStreamExpansion:
),
],
)
def test_generate_expanded_mmds_requires(self, module_deps, expected, db_session):
self._generate_default_modules(db_session)
module_build = make_module_in_db("app:1:0:c1", module_deps, db_session=db_session)
def test_generate_expanded_mmds_requires(self, module_deps, expected):
self._generate_default_modules()
module_build = make_module_in_db("app:1:0:c1", module_deps)
mmds = module_build_service.utils.generate_expanded_mmds(db_session, module_build.mmd())
requires_per_mmd = set()
@@ -342,55 +343,55 @@ class TestUtilsModuleStreamExpansion:
),
],
)
def test_get_required_modules_simple(self, module_deps, expected, db_session):
module_build = make_module_in_db("app:1:0:c1", module_deps, db_session=db_session)
self._generate_default_modules(db_session)
def test_get_required_modules_simple(self, module_deps, expected):
module_build = make_module_in_db("app:1:0:c1", module_deps)
self._generate_default_modules()
nsvcs = self._get_mmds_required_by_module_recursively(module_build, db_session)
assert set(nsvcs) == set(expected)
def _generate_default_modules_recursion(self, db_session):
def _generate_default_modules_recursion(self):
"""
Generates the gtk:1 module requiring foo:1 module requiring bar:1
and lorem:1 modules which require base:f29 module requiring
platform:f29 module :).
"""
base_module = make_module_in_db("platform:f29:0:c11", db_session=db_session)
base_module = make_module_in_db("platform:f29:0:c11")
make_module_in_db(
"gtk:1:0:c2",
[{"requires": {"foo": ["unknown"]}, "buildrequires": {}}],
base_module=base_module, db_session=db_session)
base_module=base_module)
make_module_in_db(
"gtk:1:1:c2",
[{"requires": {"foo": ["1"]}, "buildrequires": {}}],
base_module=base_module, db_session=db_session)
base_module=base_module)
make_module_in_db(
"foo:1:0:c2",
[{"requires": {"bar": ["unknown"]}, "buildrequires": {}}],
base_module=base_module, db_session=db_session)
base_module=base_module)
make_module_in_db(
"foo:1:1:c2",
[{"requires": {"bar": ["1"], "lorem": ["1"]}, "buildrequires": {}}],
base_module=base_module, db_session=db_session)
base_module=base_module)
make_module_in_db(
"bar:1:0:c2",
[{"requires": {"base": ["unknown"]}, "buildrequires": {}}],
base_module=base_module, db_session=db_session)
base_module=base_module)
make_module_in_db(
"bar:1:1:c2",
[{"requires": {"base": ["f29"]}, "buildrequires": {}}],
base_module=base_module, db_session=db_session)
base_module=base_module)
make_module_in_db(
"lorem:1:0:c2",
[{"requires": {"base": ["unknown"]}, "buildrequires": {}}],
base_module=base_module, db_session=db_session)
base_module=base_module)
make_module_in_db(
"lorem:1:1:c2",
[{"requires": {"base": ["f29"]}, "buildrequires": {}}],
base_module=base_module, db_session=db_session)
base_module=base_module)
make_module_in_db(
"base:f29:0:c3",
[{"requires": {"platform": ["f29"]}, "buildrequires": {}}],
base_module=base_module, db_session=db_session)
base_module=base_module)
@pytest.mark.parametrize(
"module_deps,expected",
@@ -418,13 +419,13 @@ class TestUtilsModuleStreamExpansion:
),
],
)
def test_get_required_modules_recursion(self, module_deps, expected, db_session):
module_build = make_module_in_db("app:1:0:c1", module_deps, db_session=db_session)
self._generate_default_modules_recursion(db_session)
def test_get_required_modules_recursion(self, module_deps, expected):
module_build = make_module_in_db("app:1:0:c1", module_deps)
self._generate_default_modules_recursion()
nsvcs = self._get_mmds_required_by_module_recursively(module_build, db_session)
assert set(nsvcs) == set(expected)
def _generate_default_modules_modules_multiple_stream_versions(self, db_session):
def _generate_default_modules_modules_multiple_stream_versions(self):
"""
Generates the gtk:1 module requiring foo:1 module requiring bar:1
and lorem:1 modules which require base:f29 module requiring
@@ -436,17 +437,17 @@ class TestUtilsModuleStreamExpansion:
}]
f290000 = make_module_in_db(
"platform:f29.0.0:0:c11", db_session=db_session, virtual_streams=["f29"])
make_module_in_db("gtk:1:0:c2", f29_dep, base_module=f290000, db_session=db_session)
"platform:f29.0.0:0:c11", virtual_streams=["f29"])
make_module_in_db("gtk:1:0:c2", f29_dep, base_module=f290000)
f290100 = make_module_in_db(
"platform:f29.1.0:0:c11", db_session=db_session, virtual_streams=["f29"])
make_module_in_db("gtk:1:1:c2", f29_dep, base_module=f290100, db_session=db_session)
make_module_in_db("gtk:1:2:c2", f29_dep, base_module=f290100, db_session=db_session)
"platform:f29.1.0:0:c11", virtual_streams=["f29"])
make_module_in_db("gtk:1:1:c2", f29_dep, base_module=f290100)
make_module_in_db("gtk:1:2:c2", f29_dep, base_module=f290100)
f290200 = make_module_in_db(
"platform:f29.2.0:0:c11", db_session=db_session, virtual_streams=["f29"])
make_module_in_db("gtk:1:3:c2", f29_dep, base_module=f290200, db_session=db_session)
"platform:f29.2.0:0:c11", virtual_streams=["f29"])
make_module_in_db("gtk:1:3:c2", f29_dep, base_module=f290200)
@pytest.mark.parametrize(
"module_deps,expected",
@@ -460,13 +461,13 @@ class TestUtilsModuleStreamExpansion:
)
],
)
def test_get_required_modules_stream_versions(self, module_deps, expected, db_session):
module_build = make_module_in_db("app:1:0:c1", module_deps, db_session=db_session)
self._generate_default_modules_modules_multiple_stream_versions(db_session)
def test_get_required_modules_stream_versions(self, module_deps, expected):
module_build = make_module_in_db("app:1:0:c1", module_deps)
self._generate_default_modules_modules_multiple_stream_versions()
nsvcs = self._get_mmds_required_by_module_recursively(module_build, db_session)
assert set(nsvcs) == set(expected)
def test__get_base_module_mmds(self, db_session):
def test__get_base_module_mmds(self):
"""Ensure the correct results are returned without duplicates."""
init_data(data_size=1, multiple_stream_versions=True)
mmd = module_build_service.utils.load_mmd(read_staged_data("testmodule_v2.yaml"))
@@ -490,7 +491,7 @@ class TestUtilsModuleStreamExpansion:
assert actual == expected
@pytest.mark.parametrize("virtual_streams", (None, ["f29"], ["lp29"]))
def test__get_base_module_mmds_virtual_streams(self, virtual_streams, db_session):
def test__get_base_module_mmds_virtual_streams(self, virtual_streams):
"""Ensure the correct results are returned without duplicates."""
init_data(data_size=1, multiple_stream_versions=True)
mmd = module_build_service.utils.load_mmd(read_staged_data("testmodule_v2"))
@@ -502,9 +503,7 @@ class TestUtilsModuleStreamExpansion:
mmd.remove_dependencies(deps)
mmd.add_dependencies(new_deps)
make_module_in_db(
"platform:lp29.1.1:12:c11",
db_session=db_session, virtual_streams=virtual_streams)
make_module_in_db("platform:lp29.1.1:12:c11", virtual_streams=virtual_streams)
mmds = module_build_service.utils.mse.get_base_module_mmds(db_session, mmd)
if virtual_streams == ["f29"]:
@@ -528,7 +527,7 @@ class TestUtilsModuleStreamExpansion:
"module_build_service.config.Config.allow_only_compatible_base_modules",
new_callable=PropertyMock, return_value=False
)
def test__get_base_module_mmds_virtual_streams_only_major_versions(self, cfg, db_session):
def test__get_base_module_mmds_virtual_streams_only_major_versions(self, cfg):
"""Ensure the correct results are returned without duplicates."""
init_data(data_size=1, multiple_stream_versions=["foo28", "foo29", "foo30"])

View File

@@ -3,6 +3,7 @@
import json
from datetime import datetime
from functools import partial
from sqlalchemy.orm import load_only
import module_build_service.scm
@@ -20,9 +21,10 @@ import sqlalchemy
from tests import app, init_data, clean_database, staged_data_filename, make_module_in_db
from tests import read_staged_data, time_assert
from tests.test_scm import base_dir as scm_base_dir
from module_build_service.db_session import db_session
from module_build_service.errors import UnprocessableEntity
from module_build_service.models import ModuleBuild, BUILD_STATES, ComponentBuild
from module_build_service import db, version
from module_build_service import version
import module_build_service.config as mbs_config
import module_build_service.scheduler.handlers.modules
import module_build_service.utils.submit
@@ -364,13 +366,13 @@ class TestViews:
checking_build_id = 3
# Get component build ids dynamically rather than hardcode inside expected output.
component_builds = sorted(
cb.id for cb in ModuleBuild.query.get(checking_build_id).component_builds
)
component_build_ids = db_session.query(ComponentBuild).filter(
ComponentBuild.module_id == checking_build_id
).order_by(ComponentBuild.id).options(load_only("id")).all()
expected = [
{
"component_builds": component_builds,
"component_builds": [cb.id for cb in component_build_ids],
"context": "3a4057d2",
"id": checking_build_id,
"koji_tag": "module-nginx-1.2",
@@ -556,14 +558,13 @@ class TestViews:
assert data["state_trace"][0]["state_name"] == "wait"
assert data["state_url"], "/module-build-service/1/component-builds/3"
def test_query_component_builds_trace_is_serialized_with_none_state(self, db_session):
def test_query_component_builds_trace_is_serialized_with_none_state(self):
# Beside the module builds and their component builds created already
# in setup_method, some new component builds with None state must be
# created for this test to ensure the extended_json works well to
# serialize component build trace correctly.
module_build = make_module_in_db(
"cool-module:10:201907291454:c1", db_session=db_session)
module_build = make_module_in_db("cool-module:10:201907291454:c1")
component_release = get_rpm_release(db_session, module_build)
# No state is set.
@@ -775,10 +776,9 @@ class TestViews:
assert total == 5
def test_query_builds_order_by(self):
build = db.session.query(module_build_service.models.ModuleBuild).filter_by(id=2).one()
build = ModuleBuild.get_by_id(db_session, 2)
build.name = "candy"
db.session.add(build)
db.session.commit()
db_session.commit()
rv = self.client.get("/module-build-service/1/module-builds/?per_page=10&order_by=name")
items = json.loads(rv.data)["items"]
assert items[0]["name"] == "candy"
@@ -786,12 +786,11 @@ class TestViews:
def test_query_builds_order_by_multiple(self):
init_data(data_size=1, multiple_stream_versions=True)
platform_f28 = module_build_service.models.ModuleBuild.get_by_id(db.session, 1)
platform_f28 = ModuleBuild.get_by_id(db_session, 1)
platform_f28.version = "150"
db.session.add(platform_f28)
db.session.commit()
db_session.commit()
# Simply assert the order of all module builds
page_size = ModuleBuild.query.count()
page_size = db_session.query(ModuleBuild).count()
rv = self.client.get(
"/module-build-service/1/module-builds/?order_desc_by=stream_version"
"&order_desc_by=version&per_page={}".format(page_size)
@@ -800,7 +799,7 @@ class TestViews:
actual_ids = [item["id"] for item in items]
expected_ids = [
build.id for build in ModuleBuild.query.order_by(
build.id for build in db_session.query(ModuleBuild).order_by(
ModuleBuild.stream_version.desc(),
sqlalchemy.cast(ModuleBuild.version, sqlalchemy.BigInteger).desc()
).all()
@@ -857,8 +856,8 @@ class TestViews:
def test_query_base_module_br_filters(self):
mmd = load_mmd(read_staged_data("platform"))
mmd = mmd.copy(mmd.get_module_name(), "f30.1.3")
import_mmd(db.session, mmd)
platform_f300103 = ModuleBuild.query.filter_by(stream="f30.1.3").one()
import_mmd(db_session, mmd)
platform_f300103 = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()
build = ModuleBuild(
name="testmodule",
stream="master",
@@ -877,8 +876,8 @@ class TestViews:
modulemd=read_staged_data("testmodule"),
)
build.buildrequires.append(platform_f300103)
db.session.add(build)
db.session.commit()
db_session.add(build)
db_session.commit()
# Query by NSVC
rv = self.client.get(
"/module-build-service/1/module-builds/?base_module_br=platform:f28:3:00000000")
@@ -973,7 +972,7 @@ class TestViews:
module_build_service.utils.load_mmd(data["modulemd"])
# Make sure the buildrequires entry was created
module = ModuleBuild.get_by_id(db.session, 8)
module = ModuleBuild.get_by_id(db_session, 8)
assert len(module.buildrequires) == 1
assert module.buildrequires[0].name == "platform"
assert module.buildrequires[0].stream == "f28"
@@ -1256,14 +1255,14 @@ class TestViews:
@patch("module_build_service.auth.get_user", return_value=other_user)
def test_cancel_build_already_failed(self, mocked_get_user):
module = ModuleBuild.query.filter_by(id=7).one()
module = ModuleBuild.get_by_id(db_session, 7)
module.state = 4
db.session.add(module)
db.session.commit()
db_session.commit()
rv = self.client.patch(
"/module-build-service/1/module-builds/7", data=json.dumps({"state": "failed"}))
data = json.loads(rv.data)
data = json.loads(rv.data)
assert data["status"] == 403
assert data["error"] == "Forbidden"
@@ -1434,7 +1433,7 @@ class TestViews:
rv = self.client.post("/module-build-service/1/module-builds/", data=json.dumps(data))
result = json.loads(rv.data)
build = ModuleBuild.query.filter(ModuleBuild.id == result["id"]).one()
build = ModuleBuild.get_by_id(db_session, result["id"])
assert (build.owner == result["owner"] == "foo") is True
@patch("module_build_service.auth.get_user", return_value=("svc_account", set()))
@@ -2039,7 +2038,7 @@ class TestViews:
# assertion later.
br_modulea = dict(stream="6", version="1", context="1234")
br_moduleb = dict(stream="10", version="1", context="5678")
build = ModuleBuild.query.first()
build = db_session.query(ModuleBuild).first()
mmd = build.mmd()
xmd = mmd.get_xmd()
mbs = xmd.setdefault("mbs", {})
@@ -2048,7 +2047,7 @@ class TestViews:
buildrequires["moduleb"] = br_moduleb
mmd.set_xmd(xmd)
build.modulemd = mmd_to_str(mmd)
db.session.commit()
db_session.commit()
rv = self.client.get("/module-build-service/1/module-builds/{}".format(build.id))
data = json.loads(rv.data)
@@ -2111,7 +2110,7 @@ class TestViews:
module_build_service.utils.load_mmd(data["modulemd"])
# Make sure the buildrequires entry was created
module = ModuleBuild.get_by_id(db.session, 8)
module = ModuleBuild.get_by_id(db_session, 8)
assert len(module.buildrequires) == 1
assert module.buildrequires[0].name == "platform"
assert module.buildrequires[0].stream == "f28"
@@ -2207,7 +2206,7 @@ class TestViews:
module_build_service.utils.load_mmd(data["modulemd"])
# Make sure the buildrequires entry was created
module = ModuleBuild.get_by_id(db.session, 8)
module = ModuleBuild.get_by_id(db_session, 8)
assert len(module.buildrequires) == 1
assert module.buildrequires[0].name == "platform"
assert module.buildrequires[0].stream == "f28"
@@ -2307,7 +2306,7 @@ class TestViews:
xmd = platform_mmd.get_xmd()
xmd["mbs"]["virtual_streams"] = ["el8"]
platform_mmd.set_xmd(xmd)
import_mmd(db.session, platform_mmd)
import_mmd(db_session, platform_mmd)
FakeSCM(
mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")
@@ -2350,7 +2349,7 @@ class TestViews:
# Create a platform for the override so the build submission succeeds
platform_mmd = load_mmd(read_staged_data('platform'))
platform_mmd = platform_mmd.copy(platform_mmd.get_module_name(), "product1.3")
import_mmd(db.session, platform_mmd)
import_mmd(db_session, platform_mmd)
FakeSCM(
mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")
@@ -2389,7 +2388,7 @@ class TestViews:
init_data(data_size=1, multiple_stream_versions=True)
platform_mmd = load_mmd(read_staged_data("platform"))
platform_mmd = platform_mmd.copy(platform_mmd.get_module_name(), "el8.0.0")
import_mmd(db.session, platform_mmd)
import_mmd(db_session, platform_mmd)
FakeSCM(
mocked_scm,
@@ -2450,7 +2449,7 @@ class TestViews:
xmd = mmd.get_xmd()
xmd["mbs"]["virtual_streams"] = ["el8"]
mmd.set_xmd(xmd)
import_mmd(db.session, mmd)
import_mmd(db_session, mmd)
# Use a testmodule that buildrequires platform:el8
FakeSCM(
@@ -2610,7 +2609,7 @@ class TestViews:
# Create the required platforms
for stream in ("el8.0.0", "el8.0.0.z", "el8.2.1", "el8.2.1.z"):
mmd = mmd.copy(mmd.get_module_name(), stream)
import_mmd(db.session, mmd)
import_mmd(db_session, mmd)
# Use a testmodule that buildrequires platform:el8.0.0 or platform:el8.2.1
FakeSCM(
@@ -2655,11 +2654,11 @@ class TestViews:
self, mocked_scm, mocked_get_user, reuse_components_from,
):
"""Test a build submission using the reuse_components_from parameter."""
module_to_reuse = db.session.query(ModuleBuild).get(7)
module_to_reuse = ModuleBuild.get_by_id(db_session, 7)
module_to_reuse.state = BUILD_STATES["ready"]
for c in module_to_reuse.component_builds:
c.state = koji.BUILD_STATES["COMPLETE"]
db.session.commit()
db_session.commit()
FakeSCM(
mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")
@@ -2765,11 +2764,11 @@ class TestLogMessageViews:
init_data(2)
self.module_id = 2
self.component_id = 1
self.module_build = db.session.query(ModuleBuild).filter_by(id=self.module_id).first()
self.module_build.log_message(db.session, "Build-1 msg")
self.module_build.log_message(db.session, "Build-2 msg")
self.module_build = ModuleBuild.get_by_id(db_session, self.module_id)
self.module_build.log_message(db_session, "Build-1 msg")
self.module_build.log_message(db_session, "Build-2 msg")
self.component_build = self.module_build.component_builds[0]
self.component_build.log_message(db.session, "Component-1 msg")
self.component_build.log_message(db_session, "Component-1 msg")
def test_view_log_messages_for_module_builds(self):
url = "/module-build-service/1/module-builds/{module_id}/messages".format(