Separate use of database sessions

This patch separates the use of database session in different MBS components
and do not mix them together.

In general, MBS components could be separated as the REST API (implemented
based on Flask) and non-REST API including the backend build workflow
(implemented as a fedmsg consumer on top of fedmsg-hub and running
independently) and library shared by them. As a result, there are two kind of
database session used in MBS, one is created and managed by Flask-SQLAlchemy,
and another one is created from SQLAclhemy Session API directly. The goal of
this patch is to make ensure session object is used properly in the right
place.

All the changes follow these rules:

* REST API related code uses the session object db.session created and
  managed by Flask-SQLAlchemy.
* Non-REST API related code uses the session object created with SQLAlchemy
  Session API. Function make_db_session does that.
* Shared code does not created a new session object as much as possible.
  Instead, it accepts an argument db_session.

The first two rules are applicable to tests as well.

Major changes:

* Switch tests back to run with a file-based SQLite database.
* make_session is renamed to make_db_session and SQLAlchemy connection pool
  options are applied for PostgreSQL backend.
* Frontend Flask related code uses db.session
* Shared code by REST API and backend build workflow accepts SQLAlchemy session
  object as an argument. For example, resolver class is constructed with a
  database session, and some functions accepts an argument for database session.
* Build workflow related code use session object returned from make_db_session
  and ensure db.session is not used.
* Only tests for views use db.session, and other tests use db_session fixture
  to access database.
* All argument name session, that is for database access, are renamed to
  db_session.
* Functions model_tests_init_data, reuse_component_init_data and
  reuse_shared_userspace_init_data, which creates fixture data for
  tests, are converted into pytest fixtures from original function
  called inside setup_method or a test method. The reason of this
  conversion is to use fixture ``db_session`` rather than create a
  new one. That would also benefit the whole test suite to reduce the
  number of SQLAlchemy session objects.

Signed-off-by: Chenxiong Qi <cqi@redhat.com>
This commit is contained in:
Chenxiong Qi
2019-07-12 23:43:17 +08:00
parent 64698fbde8
commit 3878affa41
54 changed files with 2692 additions and 2454 deletions

View File

@@ -34,7 +34,7 @@ from module_build_service import db
from module_build_service.utils import get_rpm_release, import_mmd, mmd_to_str
from module_build_service.config import init_config
from module_build_service.models import (
ModuleBuild, ComponentBuild, VirtualStream, make_session, BUILD_STATES,
ModuleBuild, ComponentBuild, VirtualStream, make_db_session, BUILD_STATES,
)
from module_build_service import Modulemd
@@ -110,6 +110,17 @@ patch_zeromq_time_sleep()
def clean_database(add_platform_module=True, add_default_arches=True):
"""Initialize the test database
This function is responsible for dropping all the data in the database and
recreating all the tables from scratch.
Please note that, this function relies on database objects managed by
Flask-SQLAlchemy.
"""
# Ensure all pending transactions are committed and do not block subsequent
# DML on tables.
# TODO: Should the code be fixed that forget to commit?
db.session.commit()
db.drop_all()
db.create_all()
@@ -153,7 +164,7 @@ def init_data(data_size=10, contexts=False, multiple_stream_versions=None, scrat
# Just to possibly confuse tests by adding another base module.
mmd = mmd.copy("bootstrap", stream)
import_mmd(db.session, mmd)
with make_session(conf) as db_session:
with make_db_session(conf) as db_session:
_populate_data(db_session, data_size, contexts=contexts, scratch=scratch)
@@ -203,34 +214,35 @@ def _populate_data(db_session, data_size=10, contexts=False, scratch=False):
db_session.add(build_one)
db_session.commit()
build_one_component_release = get_rpm_release(build_one)
db_session.add(ComponentBuild(
package="nginx",
scmurl="git://pkgs.domain.local/rpms/nginx?"
"#ga95886c8a443b36a9ce31abda1f9bed22f2f8c3",
format="rpms",
task_id=12312345 + index,
state=koji.BUILD_STATES["COMPLETE"],
nvr="nginx-1.10.1-2.{0}".format(build_one_component_release),
batch=1,
module_id=2 + index * 3,
tagged=True,
tagged_in_final=True,
))
db_session.add(ComponentBuild(
package="module-build-macros",
scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"
"module-build-macros-0.1-1.module_nginx_1_2.src.rpm",
format="rpms",
task_id=12312321 + index,
state=koji.BUILD_STATES["COMPLETE"],
nvr="module-build-macros-01-1.{0}".format(build_one_component_release),
batch=2,
module_id=2 + index * 3,
tagged=True,
tagged_in_final=True,
))
build_one_component_release = get_rpm_release(db_session, build_one)
db_session.add_all([
ComponentBuild(
package="nginx",
scmurl="git://pkgs.domain.local/rpms/nginx?"
"#ga95886c8a443b36a9ce31abda1f9bed22f2f8c3",
format="rpms",
task_id=12312345 + index,
state=koji.BUILD_STATES["COMPLETE"],
nvr="nginx-1.10.1-2.{0}".format(build_one_component_release),
batch=1,
module_id=2 + index * 3,
tagged=True,
tagged_in_final=True),
ComponentBuild(
package="module-build-macros",
scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"
"module-build-macros-0.1-1.module_nginx_1_2.src.rpm",
format="rpms",
task_id=12312321 + index,
state=koji.BUILD_STATES["COMPLETE"],
nvr="module-build-macros-01-1.{0}".format(build_one_component_release),
batch=2,
module_id=2 + index * 3,
tagged=True,
tagged_in_final=True)
])
db_session.commit()
build_two = ModuleBuild(
@@ -255,33 +267,32 @@ def _populate_data(db_session, data_size=10, contexts=False, scratch=False):
db_session.add(build_two)
db_session.commit()
build_two_component_release = get_rpm_release(build_two)
db_session.add(ComponentBuild(
package="postgresql",
scmurl="git://pkgs.domain.local/rpms/postgresql"
"?#dc95586c4a443b26a9ce38abda1f9bed22f2f8c3",
format="rpms",
task_id=2433433 + index,
state=koji.BUILD_STATES["COMPLETE"],
nvr="postgresql-9.5.3-4.{0}".format(build_two_component_release),
batch=2,
module_id=3 + index * 3,
tagged=True,
tagged_in_final=True,
))
db_session.add(ComponentBuild(
package="module-build-macros",
scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"
"module-build-macros-0.1-1.module_postgresql_1_2.src.rpm",
format="rpms",
task_id=47383993 + index,
state=koji.BUILD_STATES["COMPLETE"],
nvr="module-build-macros-01-1.{0}".format(build_two_component_release),
batch=1,
module_id=3 + index * 3,
))
build_two_component_release = get_rpm_release(db_session, build_two)
db_session.add_all([
ComponentBuild(
package="postgresql",
scmurl="git://pkgs.domain.local/rpms/postgresql"
"?#dc95586c4a443b26a9ce38abda1f9bed22f2f8c3",
format="rpms",
task_id=2433433 + index,
state=koji.BUILD_STATES["COMPLETE"],
nvr="postgresql-9.5.3-4.{0}".format(build_two_component_release),
batch=2,
module_id=3 + index * 3,
tagged=True,
tagged_in_final=True),
ComponentBuild(
package="module-build-macros",
scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"
"module-build-macros-0.1-1.module_postgresql_1_2.src.rpm",
format="rpms",
task_id=47383993 + index,
state=koji.BUILD_STATES["COMPLETE"],
nvr="module-build-macros-01-1.{0}".format(build_two_component_release),
batch=1,
module_id=3 + index * 3)
])
db_session.commit()
build_three = ModuleBuild(
@@ -304,34 +315,32 @@ def _populate_data(db_session, data_size=10, contexts=False, scratch=False):
db_session.add(build_three)
db_session.commit()
build_three_component_release = get_rpm_release(build_three)
db_session.add(ComponentBuild(
package="rubygem-rails",
scmurl="git://pkgs.domain.local/rpms/rubygem-rails"
"?#dd55886c4a443b26a9ce38abda1f9bed22f2f8c3",
format="rpms",
task_id=2433433 + index,
state=koji.BUILD_STATES["FAILED"],
nvr="postgresql-9.5.3-4.{0}".format(build_three_component_release),
batch=2,
module_id=4 + index * 3,
))
db_session.add(ComponentBuild(
package="module-build-macros",
scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"
"module-build-macros-0.1-1.module_testmodule_1_2.src.rpm",
format="rpms",
task_id=47383993 + index,
state=koji.BUILD_STATES["COMPLETE"],
nvr="module-build-macros-01-1.{0}".format(build_three_component_release),
batch=1,
module_id=4 + index * 3,
tagged=True,
build_time_only=True,
))
build_three_component_release = get_rpm_release(db_session, build_three)
db_session.add_all([
ComponentBuild(
package="rubygem-rails",
scmurl="git://pkgs.domain.local/rpms/rubygem-rails"
"?#dd55886c4a443b26a9ce38abda1f9bed22f2f8c3",
format="rpms",
task_id=2433433 + index,
state=koji.BUILD_STATES["FAILED"],
nvr="postgresql-9.5.3-4.{0}".format(build_three_component_release),
batch=2,
module_id=4 + index * 3),
ComponentBuild(
package="module-build-macros",
scmurl="/tmp/module_build_service-build-macrosWZUPeK/SRPMS/"
"module-build-macros-0.1-1.module_testmodule_1_2.src.rpm",
format="rpms",
task_id=47383993 + index,
state=koji.BUILD_STATES["COMPLETE"],
nvr="module-build-macros-01-1.{0}".format(build_three_component_release),
batch=1,
module_id=4 + index * 3,
tagged=True,
build_time_only=True)
])
db_session.commit()
@@ -374,9 +383,9 @@ def scheduler_init_data(db_session, tangerine_state=None, scratch=False):
arch = db_session.query(module_build_service.models.ModuleArch).get(1)
module_build.arches.append(arch)
build_one_component_release = get_rpm_release(module_build)
build_one_component_release = get_rpm_release(db_session, module_build)
module_build_comp_builds = [
db_session.add_all([
module_build_service.models.ComponentBuild(
module_id=module_build.id,
package="perl-Tangerine",
@@ -434,308 +443,10 @@ def scheduler_init_data(db_session, tangerine_state=None, scratch=False):
tagged=True,
build_time_only=True,
),
]
for c in module_build_comp_builds:
db_session.add(c)
])
db_session.commit()
def reuse_component_init_data():
clean_database()
mmd = load_mmd(read_staged_data("formatted_testmodule"))
build_one = module_build_service.models.ModuleBuild(
name="testmodule",
stream="master",
version='20170109091357',
state=BUILD_STATES["ready"],
ref_build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1",
context="78e4a6fd",
koji_tag="module-testmodule-master-20170109091357-78e4a6fd",
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79",
batch=3,
owner="Tom Brady",
time_submitted=datetime(2017, 2, 15, 16, 8, 18),
time_modified=datetime(2017, 2, 15, 16, 19, 35),
time_completed=datetime(2017, 2, 15, 16, 19, 35),
rebuild_strategy="changed-and-after",
)
build_one_component_release = get_rpm_release(build_one)
mmd.set_version(int(build_one.version))
xmd = mmd.get_xmd()
xmd["mbs"]["scmurl"] = build_one.scmurl
xmd["mbs"]["commit"] = "ff1ea79fc952143efeed1851aa0aa006559239ba"
mmd.set_xmd(xmd)
build_one.modulemd = mmd_to_str(mmd)
db.session.add(build_one)
db.session.commit()
db.session.refresh(build_one)
platform_br = module_build_service.models.ModuleBuild.get_by_id(db.session, 1)
build_one.buildrequires.append(platform_br)
arch = module_build_service.models.ModuleArch.query.get(1)
build_one.arches.append(arch)
build_one_comp_builds = [
module_build_service.models.ComponentBuild(
module_id=build_one.id,
package="perl-Tangerine",
scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine"
"?#4ceea43add2366d8b8c5a622a2fb563b625b9abf",
format="rpms",
task_id=90276227,
state=koji.BUILD_STATES["COMPLETE"],
nvr="perl-Tangerine-0.23-1.{0}".format(build_one_component_release),
batch=2,
ref="4ceea43add2366d8b8c5a622a2fb563b625b9abf",
tagged=True,
tagged_in_final=True,
),
module_build_service.models.ComponentBuild(
module_id=build_one.id,
package="perl-List-Compare",
scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare"
"?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
format="rpms",
task_id=90276228,
state=koji.BUILD_STATES["COMPLETE"],
nvr="perl-List-Compare-0.53-5.{0}".format(build_one_component_release),
batch=2,
ref="76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
tagged=True,
tagged_in_final=True,
),
module_build_service.models.ComponentBuild(
module_id=build_one.id,
package="tangerine",
scmurl="https://src.fedoraproject.org/rpms/tangerine"
"?#fbed359411a1baa08d4a88e0d12d426fbf8f602c",
format="rpms",
task_id=90276315,
state=koji.BUILD_STATES["COMPLETE"],
nvr="tangerine-0.22-3.{0}".format(build_one_component_release),
batch=3,
ref="fbed359411a1baa08d4a88e0d12d426fbf8f602c",
tagged=True,
tagged_in_final=True,
),
module_build_service.models.ComponentBuild(
module_id=build_one.id,
package="module-build-macros",
scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-"
"macros-0.1-1.module_testmodule_master_20170109091357.src.rpm",
format="rpms",
task_id=90276181,
state=koji.BUILD_STATES["COMPLETE"],
nvr="module-build-macros-0.1-1.{0}".format(build_one_component_release),
batch=1,
tagged=True,
build_time_only=True,
),
]
for c in build_one_comp_builds:
db.session.add(c)
# Commit component builds added to build_one
db.session.commit()
build_two = module_build_service.models.ModuleBuild(
name="testmodule",
stream="master",
version='20170219191323',
state=BUILD_STATES["build"],
ref_build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1",
context="c40c156c",
koji_tag="module-testmodule-master-20170219191323-c40c156c",
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#55f4a0a",
batch=1,
owner="Tom Brady",
time_submitted=datetime(2017, 2, 19, 16, 8, 18),
time_modified=datetime(2017, 2, 19, 16, 8, 18),
rebuild_strategy="changed-and-after",
)
build_two_component_release = get_rpm_release(build_two)
mmd.set_version(int(build_one.version))
xmd = mmd.get_xmd()
xmd["mbs"]["scmurl"] = build_one.scmurl
xmd["mbs"]["commit"] = "55f4a0a2e6cc255c88712a905157ab39315b8fd8"
mmd.set_xmd(xmd)
build_two.modulemd = mmd_to_str(mmd)
db.session.add(build_two)
db.session.commit()
db.session.refresh(build_two)
build_two.arches.append(arch)
build_two.buildrequires.append(platform_br)
build_two_comp_builds = [
module_build_service.models.ComponentBuild(
module_id=build_two.id,
package="perl-Tangerine",
scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine"
"?#4ceea43add2366d8b8c5a622a2fb563b625b9abf",
format="rpms",
batch=2,
ref="4ceea43add2366d8b8c5a622a2fb563b625b9abf",
),
module_build_service.models.ComponentBuild(
module_id=build_two.id,
package="perl-List-Compare",
scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare"
"?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
format="rpms",
batch=2,
ref="76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
),
module_build_service.models.ComponentBuild(
module_id=build_two.id,
package="tangerine",
scmurl="https://src.fedoraproject.org/rpms/tangerine"
"?#fbed359411a1baa08d4a88e0d12d426fbf8f602c",
format="rpms",
batch=3,
ref="fbed359411a1baa08d4a88e0d12d426fbf8f602c",
),
module_build_service.models.ComponentBuild(
module_id=build_two.id,
package="module-build-macros",
scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-"
"macros-0.1-1.module_testmodule_master_20170219191323.src.rpm",
format="rpms",
task_id=90276186,
state=koji.BUILD_STATES["COMPLETE"],
nvr="module-build-macros-0.1-1.{0}".format(build_two_component_release),
batch=1,
tagged=True,
build_time_only=True,
),
]
for c in build_two_comp_builds:
db.session.add(c)
# Commit component builds added to build_two
db.session.commit()
def reuse_shared_userspace_init_data():
clean_database()
with make_session(conf) as session:
# Create shared-userspace-570, state is COMPLETE, all components
# are properly built.
mmd = load_mmd(read_staged_data("shared-userspace-570"))
module_build = module_build_service.models.ModuleBuild(
name=mmd.get_module_name(),
stream=mmd.get_stream_name(),
version=mmd.get_version(),
build_context="e046b867a400a06a3571f3c71142d497895fefbe",
runtime_context="50dd3eb5dde600d072e45d4120e1548ce66bc94a",
state=BUILD_STATES["ready"],
modulemd=mmd_to_str(mmd),
koji_tag="module-shared-userspace-f26-20170601141014-75f92abb",
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453",
batch=16,
owner="Tom Brady",
time_submitted=datetime(2017, 2, 15, 16, 8, 18),
time_modified=datetime(2017, 2, 15, 16, 19, 35),
time_completed=datetime(2017, 2, 15, 16, 19, 35),
rebuild_strategy="changed-and-after",
)
components = [
mmd.get_rpm_component(rpm)
for rpm in mmd.get_rpm_component_names()
]
components.sort(key=lambda x: x.get_buildorder())
previous_buildorder = None
batch = 1
for pkg in components:
# Increment the batch number when buildorder increases.
if previous_buildorder != pkg.get_buildorder():
previous_buildorder = pkg.get_buildorder()
batch += 1
pkgref = mmd.get_xmd()["mbs"]["rpms"][pkg.get_name()]["ref"]
full_url = pkg.get_repository() + "?#" + pkgref
module_build.component_builds.append(
module_build_service.models.ComponentBuild(
package=pkg.get_name(),
format="rpms",
scmurl=full_url,
batch=batch,
ref=pkgref,
state=1,
tagged=True,
tagged_in_final=True,
)
)
session.add(module_build)
session.commit()
# Create shared-userspace-577, state is WAIT, no component built
mmd2 = load_mmd(read_staged_data("shared-userspace-577"))
module_build = module_build_service.models.ModuleBuild(
name=mmd2.get_module_name(),
stream=mmd2.get_stream_name(),
version=mmd2.get_version(),
build_context="e046b867a400a06a3571f3c71142d497895fefbe",
runtime_context="50dd3eb5dde600d072e45d4120e1548ce66bc94a",
state=BUILD_STATES["done"],
modulemd=mmd_to_str(mmd2),
koji_tag="module-shared-userspace-f26-20170605091544-75f92abb",
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453",
batch=0,
owner="Tom Brady",
time_submitted=datetime(2017, 2, 15, 16, 8, 18),
time_modified=datetime(2017, 2, 15, 16, 19, 35),
time_completed=datetime(2017, 2, 15, 16, 19, 35),
rebuild_strategy="changed-and-after",
)
components2 = [
mmd2.get_rpm_component(rpm)
for rpm in mmd2.get_rpm_component_names()
]
# Store components to database in different order than for 570 to
# reproduce the reusing issue.
components2.sort(key=lambda x: len(x.get_name()))
components2.sort(key=lambda x: x.get_buildorder())
previous_buildorder = None
batch = 1
for pkg in components2:
# Increment the batch number when buildorder increases.
if previous_buildorder != pkg.get_buildorder():
previous_buildorder = pkg.get_buildorder()
batch += 1
pkgref = mmd2.get_xmd()["mbs"]["rpms"][pkg.get_name()]["ref"]
full_url = pkg.get_repository() + "?#" + pkgref
module_build.component_builds.append(
module_build_service.models.ComponentBuild(
package=pkg.get_name(), format="rpms", scmurl=full_url, batch=batch, ref=pkgref)
)
session.add(module_build)
session.commit()
def make_module(
db_session,
nsvc,
@@ -881,3 +592,25 @@ def make_module(
db_session.commit()
return module_build
def module_build_from_modulemd(yaml):
"""
Create a ModuleBuild object and return. It is not written into database.
Please commit by yourself if necessary.
"""
mmd = load_mmd(yaml)
build = ModuleBuild()
build.name = mmd.get_module_name()
build.stream = mmd.get_stream_name()
build.version = mmd.get_version()
build.state = BUILD_STATES["ready"]
build.modulemd = yaml
build.koji_tag = None
build.batch = 0
build.owner = "some_other_user"
build.time_submitted = datetime(2016, 9, 3, 12, 28, 33)
build.time_modified = datetime(2016, 9, 3, 12, 28, 40)
build.time_completed = None
build.rebuild_strategy = "changed-and-after"
return build

View File

@@ -18,14 +18,19 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import os
import koji
import os
import pytest
from datetime import datetime
import module_build_service
from module_build_service import conf
from module_build_service.models import make_session
from module_build_service.utils.general import mmd_to_str, load_mmd
from tests import read_staged_data
from module_build_service.models import make_db_session, BUILD_STATES
from module_build_service.utils.general import mmd_to_str, load_mmd, get_rpm_release
from tests import clean_database, read_staged_data, module_build_from_modulemd
BASE_DIR = os.path.dirname(__file__)
STAGED_DATA_DIR = os.path.join(BASE_DIR, "staged_data")
@@ -61,7 +66,321 @@ def platform_mmd():
return PLATFORM_MODULEMD
@pytest.fixture(scope="function")
@pytest.fixture()
def db_session():
with make_session(conf) as db_session:
with make_db_session(conf) as db_session:
yield db_session
@pytest.fixture()
def model_tests_init_data(db_session):
"""Initialize data for model tests
This is refactored from tests/test_models/__init__.py, which was able to be
called directly inside setup_method generally.
The reason to convert it to this fixture is to use fixture ``db_session``
rather than create a new one. That would also benefit the whole test suite
to reduce the number of SQLAlchemy session objects.
"""
clean_database()
model_test_data_dir = os.path.join(os.path.dirname(__file__), "test_models", "data")
for filename in os.listdir(model_test_data_dir):
with open(os.path.join(model_test_data_dir, filename), "r") as f:
yaml = f.read()
build = module_build_from_modulemd(yaml)
db_session.add(build)
db_session.commit()
@pytest.fixture()
def reuse_component_init_data(db_session):
clean_database()
mmd = load_mmd(read_staged_data("formatted_testmodule"))
build_one = module_build_service.models.ModuleBuild(
name="testmodule",
stream="master",
version='20170109091357',
state=BUILD_STATES["ready"],
ref_build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1",
context="78e4a6fd",
koji_tag="module-testmodule-master-20170109091357-78e4a6fd",
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79",
batch=3,
owner="Tom Brady",
time_submitted=datetime(2017, 2, 15, 16, 8, 18),
time_modified=datetime(2017, 2, 15, 16, 19, 35),
time_completed=datetime(2017, 2, 15, 16, 19, 35),
rebuild_strategy="changed-and-after",
)
build_one_component_release = get_rpm_release(db_session, build_one)
mmd.set_version(int(build_one.version))
xmd = mmd.get_xmd()
xmd["mbs"]["scmurl"] = build_one.scmurl
xmd["mbs"]["commit"] = "ff1ea79fc952143efeed1851aa0aa006559239ba"
mmd.set_xmd(xmd)
build_one.modulemd = mmd_to_str(mmd)
db_session.add(build_one)
db_session.commit()
db_session.refresh(build_one)
platform_br = module_build_service.models.ModuleBuild.get_by_id(db_session, 1)
build_one.buildrequires.append(platform_br)
arch = db_session.query(module_build_service.models.ModuleArch).get(1)
build_one.arches.append(arch)
db_session.add_all([
module_build_service.models.ComponentBuild(
module_id=build_one.id,
package="perl-Tangerine",
scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine"
"?#4ceea43add2366d8b8c5a622a2fb563b625b9abf",
format="rpms",
task_id=90276227,
state=koji.BUILD_STATES["COMPLETE"],
nvr="perl-Tangerine-0.23-1.{0}".format(build_one_component_release),
batch=2,
ref="4ceea43add2366d8b8c5a622a2fb563b625b9abf",
tagged=True,
tagged_in_final=True,
),
module_build_service.models.ComponentBuild(
module_id=build_one.id,
package="perl-List-Compare",
scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare"
"?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
format="rpms",
task_id=90276228,
state=koji.BUILD_STATES["COMPLETE"],
nvr="perl-List-Compare-0.53-5.{0}".format(build_one_component_release),
batch=2,
ref="76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
tagged=True,
tagged_in_final=True,
),
module_build_service.models.ComponentBuild(
module_id=build_one.id,
package="tangerine",
scmurl="https://src.fedoraproject.org/rpms/tangerine"
"?#fbed359411a1baa08d4a88e0d12d426fbf8f602c",
format="rpms",
task_id=90276315,
state=koji.BUILD_STATES["COMPLETE"],
nvr="tangerine-0.22-3.{0}".format(build_one_component_release),
batch=3,
ref="fbed359411a1baa08d4a88e0d12d426fbf8f602c",
tagged=True,
tagged_in_final=True,
),
module_build_service.models.ComponentBuild(
module_id=build_one.id,
package="module-build-macros",
scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-"
"macros-0.1-1.module_testmodule_master_20170109091357.src.rpm",
format="rpms",
task_id=90276181,
state=koji.BUILD_STATES["COMPLETE"],
nvr="module-build-macros-0.1-1.{0}".format(build_one_component_release),
batch=1,
tagged=True,
build_time_only=True,
),
])
# Commit component builds added to build_one
db_session.commit()
build_two = module_build_service.models.ModuleBuild(
name="testmodule",
stream="master",
version='20170219191323',
state=BUILD_STATES["build"],
ref_build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
runtime_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb0",
build_context="ac4de1c346dcf09ce77d38cd4e75094ec1c08eb1",
context="c40c156c",
koji_tag="module-testmodule-master-20170219191323-c40c156c",
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#55f4a0a",
batch=1,
owner="Tom Brady",
time_submitted=datetime(2017, 2, 19, 16, 8, 18),
time_modified=datetime(2017, 2, 19, 16, 8, 18),
rebuild_strategy="changed-and-after",
)
build_two_component_release = get_rpm_release(db_session, build_two)
mmd.set_version(int(build_one.version))
xmd = mmd.get_xmd()
xmd["mbs"]["scmurl"] = build_one.scmurl
xmd["mbs"]["commit"] = "55f4a0a2e6cc255c88712a905157ab39315b8fd8"
mmd.set_xmd(xmd)
build_two.modulemd = mmd_to_str(mmd)
db_session.add(build_two)
db_session.commit()
db_session.refresh(build_two)
build_two.arches.append(arch)
build_two.buildrequires.append(platform_br)
db_session.add_all([
module_build_service.models.ComponentBuild(
module_id=build_two.id,
package="perl-Tangerine",
scmurl="https://src.fedoraproject.org/rpms/perl-Tangerine"
"?#4ceea43add2366d8b8c5a622a2fb563b625b9abf",
format="rpms",
batch=2,
ref="4ceea43add2366d8b8c5a622a2fb563b625b9abf",
),
module_build_service.models.ComponentBuild(
module_id=build_two.id,
package="perl-List-Compare",
scmurl="https://src.fedoraproject.org/rpms/perl-List-Compare"
"?#76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
format="rpms",
batch=2,
ref="76f9d8c8e87eed0aab91034b01d3d5ff6bd5b4cb",
),
module_build_service.models.ComponentBuild(
module_id=build_two.id,
package="tangerine",
scmurl="https://src.fedoraproject.org/rpms/tangerine"
"?#fbed359411a1baa08d4a88e0d12d426fbf8f602c",
format="rpms",
batch=3,
ref="fbed359411a1baa08d4a88e0d12d426fbf8f602c",
),
module_build_service.models.ComponentBuild(
module_id=build_two.id,
package="module-build-macros",
scmurl="/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-"
"macros-0.1-1.module_testmodule_master_20170219191323.src.rpm",
format="rpms",
task_id=90276186,
state=koji.BUILD_STATES["COMPLETE"],
nvr="module-build-macros-0.1-1.{0}".format(build_two_component_release),
batch=1,
tagged=True,
build_time_only=True,
),
])
db_session.commit()
@pytest.fixture()
def reuse_shared_userspace_init_data(db_session):
clean_database()
# Create shared-userspace-570, state is COMPLETE, all components
# are properly built.
mmd = load_mmd(read_staged_data("shared-userspace-570"))
module_build = module_build_service.models.ModuleBuild(
name=mmd.get_module_name(),
stream=mmd.get_stream_name(),
version=mmd.get_version(),
build_context="e046b867a400a06a3571f3c71142d497895fefbe",
runtime_context="50dd3eb5dde600d072e45d4120e1548ce66bc94a",
state=BUILD_STATES["ready"],
modulemd=mmd_to_str(mmd),
koji_tag="module-shared-userspace-f26-20170601141014-75f92abb",
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453",
batch=16,
owner="Tom Brady",
time_submitted=datetime(2017, 2, 15, 16, 8, 18),
time_modified=datetime(2017, 2, 15, 16, 19, 35),
time_completed=datetime(2017, 2, 15, 16, 19, 35),
rebuild_strategy="changed-and-after",
)
components = [
mmd.get_rpm_component(rpm)
for rpm in mmd.get_rpm_component_names()
]
components.sort(key=lambda x: x.get_buildorder())
previous_buildorder = None
batch = 1
for pkg in components:
# Increment the batch number when buildorder increases.
if previous_buildorder != pkg.get_buildorder():
previous_buildorder = pkg.get_buildorder()
batch += 1
pkgref = mmd.get_xmd()["mbs"]["rpms"][pkg.get_name()]["ref"]
full_url = pkg.get_repository() + "?#" + pkgref
module_build.component_builds.append(
module_build_service.models.ComponentBuild(
package=pkg.get_name(),
format="rpms",
scmurl=full_url,
batch=batch,
ref=pkgref,
state=1,
tagged=True,
tagged_in_final=True,
)
)
db_session.add(module_build)
db_session.commit()
# Create shared-userspace-577, state is WAIT, no component built
mmd2 = load_mmd(read_staged_data("shared-userspace-577"))
module_build = module_build_service.models.ModuleBuild(
name=mmd2.get_module_name(),
stream=mmd2.get_stream_name(),
version=mmd2.get_version(),
build_context="e046b867a400a06a3571f3c71142d497895fefbe",
runtime_context="50dd3eb5dde600d072e45d4120e1548ce66bc94a",
state=BUILD_STATES["done"],
modulemd=mmd_to_str(mmd2),
koji_tag="module-shared-userspace-f26-20170605091544-75f92abb",
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#7fea453",
batch=0,
owner="Tom Brady",
time_submitted=datetime(2017, 2, 15, 16, 8, 18),
time_modified=datetime(2017, 2, 15, 16, 19, 35),
time_completed=datetime(2017, 2, 15, 16, 19, 35),
rebuild_strategy="changed-and-after",
)
components2 = [
mmd2.get_rpm_component(rpm)
for rpm in mmd2.get_rpm_component_names()
]
# Store components to database in different order than for 570 to
# reproduce the reusing issue.
components2.sort(key=lambda x: len(x.get_name()))
components2.sort(key=lambda x: x.get_buildorder())
previous_buildorder = None
batch = 1
for pkg in components2:
# Increment the batch number when buildorder increases.
if previous_buildorder != pkg.get_buildorder():
previous_buildorder = pkg.get_buildorder()
batch += 1
pkgref = mmd2.get_xmd()["mbs"]["rpms"][pkg.get_name()]["ref"]
full_url = pkg.get_repository() + "?#" + pkgref
module_build.component_builds.append(
module_build_service.models.ComponentBuild(
package=pkg.get_name(), format="rpms", scmurl=full_url, batch=batch, ref=pkgref)
)
db_session.add(module_build)
db_session.commit()

View File

@@ -35,7 +35,7 @@ import module_build_service.scheduler.consumer
import module_build_service.scheduler.handlers.repos
import module_build_service.utils
from module_build_service.errors import Forbidden
from module_build_service import db, models, conf, build_logs
from module_build_service import models, conf, build_logs
from module_build_service.scheduler import make_simple_stop_condition
from mock import patch, PropertyMock, Mock
@@ -50,7 +50,7 @@ from module_build_service.builder.base import GenericBuilder
from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder
from module_build_service.messaging import MBSModule
from tests import (
app, reuse_component_init_data, clean_database, read_staged_data, staged_data_filename
app, clean_database, read_staged_data, staged_data_filename
)
base_dir = dirname(dirname(__file__))
@@ -119,7 +119,8 @@ class FakeModuleBuilder(GenericBuilder):
on_get_task_info_cb = None
@module_build_service.utils.validate_koji_tag("tag_name")
def __init__(self, owner, module, config, tag_name, components):
def __init__(self, db_session, owner, module, config, tag_name, components):
self.db_session = db_session
self.module_str = module
self.tag_name = tag_name
self.config = config
@@ -227,7 +228,9 @@ class FakeModuleBuilder(GenericBuilder):
for nvr in artifacts:
# tag_artifacts received a list of NVRs, but the tag message expects the
# component name
artifact = models.ComponentBuild.query.filter_by(nvr=nvr).first().package
from sqlalchemy.orm import load_only
artifact = self.db_session.query(models.ComponentBuild).filter_by(
nvr=nvr).options(load_only("package")).first().package
self._send_tag(artifact, nvr, dest_tag=dest_tag)
@property
@@ -304,7 +307,8 @@ class FakeModuleBuilder(GenericBuilder):
def recover_orphaned_artifact(self, component_build):
msgs = []
if self.INSTANT_COMPLETE:
disttag = module_build_service.utils.get_rpm_release(component_build.module_build)
disttag = module_build_service.utils.get_rpm_release(
self.db_session, component_build.module_build)
# We don't know the version or release, so just use a random one here
nvr = "{0}-1.0-1.{1}".format(component_build.package, disttag)
component_build.state = koji.BUILD_STATES["COMPLETE"]
@@ -418,22 +422,27 @@ class TestBuild(BaseTestBuild):
FakeModuleBuilder.on_get_task_info_cb = on_get_task_info_cb
self.p_check_gating = patch(
"module_build_service.utils.greenwave.Greenwave.check_gating",
return_value=True)
self.mock_check_gating = self.p_check_gating.start()
def teardown_method(self, test_method):
self.p_check_gating.stop()
FakeModuleBuilder.reset()
cleanup_moksha()
for i in range(20):
try:
os.remove(build_logs.path(i))
with models.make_db_session(conf) as db_session:
os.remove(build_logs.path(db_session, i))
except Exception:
pass
@pytest.mark.parametrize("mmd_version", [1, 2])
@patch("module_build_service.utils.greenwave.Greenwave.check_gating", return_value=True)
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build(
self, mocked_scm, mocked_get_user, mocked_greenwave, conf_system, dbg,
hmsc, mmd_version, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, mmd_version, db_session
):
"""
Tests the build of testmodule.yaml using FakeModuleBuilder which
@@ -458,9 +467,10 @@ class TestBuild(BaseTestBuild):
module_build_id = data["id"]
# Check that components are tagged after the batch is built.
tag_groups = []
tag_groups.append(set(["perl-Tangerine-1-1", "perl-List-Compare-1-1"]))
tag_groups.append(set(["tangerine-1-1"]))
tag_groups = [
set(["perl-Tangerine-1-1", "perl-List-Compare-1-1"]),
set(["tangerine-1-1"]),
]
def on_finalize_cb(cls, succeeded):
assert succeeded is True
@@ -473,10 +483,11 @@ class TestBuild(BaseTestBuild):
# Check that the components are added to buildroot after the batch
# is built.
buildroot_groups = []
buildroot_groups.append(set(["module-build-macros-1-1"]))
buildroot_groups.append(set(["perl-Tangerine-1-1", "perl-List-Compare-1-1"]))
buildroot_groups.append(set(["tangerine-1-1"]))
buildroot_groups = [
set(["module-build-macros-1-1"]),
set(["perl-Tangerine-1-1", "perl-List-Compare-1-1"]),
set(["tangerine-1-1"]),
]
def on_buildroot_add_artifacts_cb(cls, artifacts, install):
assert buildroot_groups.pop(0) == set(artifacts)
@@ -487,7 +498,8 @@ class TestBuild(BaseTestBuild):
# All components should be built and module itself should be in "done"
# or "ready" state.
for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():
for build in db_session.query(models.ComponentBuild).filter_by(
module_id=module_build_id).all():
assert build.state == koji.BUILD_STATES["COMPLETE"]
assert build.module_build.state in [
models.BUILD_STATES["done"],
@@ -497,7 +509,7 @@ class TestBuild(BaseTestBuild):
# All components has to be tagged, so tag_groups and buildroot_groups are empty...
assert tag_groups == []
assert buildroot_groups == []
module_build = models.ModuleBuild.query.get(module_build_id)
module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)
assert module_build.module_builds_trace[0].state == models.BUILD_STATES["init"]
assert module_build.module_builds_trace[1].state == models.BUILD_STATES["wait"]
assert module_build.module_builds_trace[2].state == models.BUILD_STATES["build"]
@@ -506,17 +518,15 @@ class TestBuild(BaseTestBuild):
assert len(module_build.module_builds_trace) == 5
@pytest.mark.parametrize("gating_result", (True, False))
@patch("module_build_service.utils.greenwave.Greenwave.check_gating")
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_no_components(
self, mocked_scm, mocked_get_user, mocked_greenwave, conf_system, dbg,
hmsc, gating_result, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, gating_result, db_session
):
"""
Tests the build of a module with no components
"""
mocked_greenwave.return_value = gating_result
self.mock_check_gating.return_value = gating_result
FakeSCM(
mocked_scm,
"python3",
@@ -537,7 +547,7 @@ class TestBuild(BaseTestBuild):
self.run_scheduler(db_session)
module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()
module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)
# Make sure no component builds were registered
assert len(module_build.component_builds) == 0
# Make sure the build is done
@@ -626,11 +636,13 @@ class TestBuild(BaseTestBuild):
data={"yaml": yaml_file},
)
data = json.loads(rv.data)
assert data["id"] == 2
module_build_id = data["id"]
assert module_build_id == 2
self.run_scheduler(db_session)
assert models.ModuleBuild.query.first().state == models.BUILD_STATES["ready"]
module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)
assert module_build.state == models.BUILD_STATES["ready"]
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
@@ -684,7 +696,7 @@ class TestBuild(BaseTestBuild):
# Because we did not finished single component build and canceled the
# module build, all components and even the module itself should be in
# failed state with state_reason se to cancellation message.
for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():
for build in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:
assert build.state == koji.BUILD_STATES["FAILED"]
assert build.state_reason == "Canceled by Homer J. Simpson."
assert build.module_build.state == models.BUILD_STATES["failed"]
@@ -723,7 +735,7 @@ class TestBuild(BaseTestBuild):
# All components should be built and module itself should be in "done"
# or "ready" state.
for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():
for build in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:
assert build.state == koji.BUILD_STATES["COMPLETE"]
assert build.module_build.state in [
models.BUILD_STATES["done"],
@@ -778,7 +790,7 @@ class TestBuild(BaseTestBuild):
# All components should be built and module itself should be in "done"
# or "ready" state.
for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():
for build in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:
assert build.state == koji.BUILD_STATES["COMPLETE"]
# When this fails, it can mean that num_concurrent_builds
# threshold has been met.
@@ -901,7 +913,7 @@ class TestBuild(BaseTestBuild):
self.run_scheduler(db_session)
for c in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():
for c in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:
# perl-Tangerine is expected to fail as configured in on_build_cb.
if c.package == "perl-Tangerine":
assert c.state == koji.BUILD_STATES["FAILED"]
@@ -961,7 +973,7 @@ class TestBuild(BaseTestBuild):
self.run_scheduler(db_session)
for c in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():
for c in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:
# perl-Tangerine is expected to fail as configured in on_build_cb.
if c.package == "module-build-macros":
assert c.state == koji.BUILD_STATES["COMPLETE"]
@@ -980,6 +992,7 @@ class TestBuild(BaseTestBuild):
# there were failed components in batch 2.
assert c.module_build.batch == 2
@pytest.mark.usefixtures("reuse_component_init_data")
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_reuse_all(
@@ -989,8 +1002,6 @@ class TestBuild(BaseTestBuild):
Tests that we do not try building module-build-macros when reusing all
components in a module build.
"""
reuse_component_init_data()
def on_build_cb(cls, artifact_name, source):
raise ValueError("All components should be reused, not build.")
@@ -1024,7 +1035,9 @@ class TestBuild(BaseTestBuild):
FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb
self.run_scheduler(db_session, msgs=[MBSModule("local module build", 3, 1)])
# Create a dedicated database session for scheduler to avoid hang
with models.make_db_session(conf) as scheduler_db_session:
self.run_scheduler(scheduler_db_session, msgs=[MBSModule("local module build", 3, 1)])
reused_component_ids = {
"module-build-macros": None,
@@ -1035,7 +1048,7 @@ class TestBuild(BaseTestBuild):
# All components should be built and module itself should be in "done"
# or "ready" state.
for build in models.ComponentBuild.query.filter_by(module_id=3).all():
for build in models.ModuleBuild.get_by_id(db_session, 3).component_builds:
assert build.state == koji.BUILD_STATES["COMPLETE"]
assert build.module_build.state in [
models.BUILD_STATES["done"],
@@ -1043,6 +1056,7 @@ class TestBuild(BaseTestBuild):
]
assert build.reused_component_id == reused_component_ids[build.package]
@pytest.mark.usefixtures("reuse_component_init_data")
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_reuse_all_without_build_macros(
@@ -1052,17 +1066,21 @@ class TestBuild(BaseTestBuild):
Tests that we can reuse components even when the reused module does
not have module-build-macros component.
"""
reuse_component_init_data()
db_session.query(models.ComponentBuild).filter_by(package="module-build-macros").delete()
assert (
0 == db_session.query(models.ComponentBuild)
.filter_by(package="module-build-macros")
.count()
)
# Firstly, remove all existing module-build-macros component builds
macros_cb_query = db_session.query(models.ComponentBuild).filter_by(
package="module-build-macros")
db_session.query(models.ComponentBuildTrace).filter(
models.ComponentBuildTrace.component_id.in_(
[cb.id for cb in macros_cb_query.all()]
)
).delete(synchronize_session=False)
macros_cb_query.delete(synchronize_session=False)
db_session.commit()
# Just ensure the success of removal
assert 0 == macros_cb_query.count()
def on_build_cb(cls, artifact_name, source):
raise ValueError("All components should be reused, not build.")
@@ -1096,11 +1114,12 @@ class TestBuild(BaseTestBuild):
FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb
self.run_scheduler(db_session, msgs=[MBSModule("local module build", 3, 1)])
with models.make_db_session(conf) as scheduler_db_session:
self.run_scheduler(scheduler_db_session, msgs=[MBSModule("local module build", 3, 1)])
# All components should be built and module itself should be in "done"
# or "ready" state.
for build in db_session.query(models.ComponentBuild).filter_by(module_id=3).all():
for build in models.ModuleBuild.get_by_id(db_session, 3).component_builds:
assert build.state == koji.BUILD_STATES["COMPLETE"]
assert build.module_build.state in [
models.BUILD_STATES["done"],
@@ -1137,6 +1156,10 @@ class TestBuild(BaseTestBuild):
build_one.time_submitted = submitted_time
build_one.time_modified = now
build_one.rebuild_strategy = "changed-and-after"
db_session.add(build_one)
db_session.commit()
# It went from init, to wait, to build, and then failed
mbt_one = models.ModuleBuildTrace(
state_time=submitted_time, state=models.BUILD_STATES["init"]
@@ -1152,55 +1175,53 @@ class TestBuild(BaseTestBuild):
build_one.module_builds_trace.append(mbt_two)
build_one.module_builds_trace.append(mbt_three)
build_one.module_builds_trace.append(mbt_four)
# Successful component
component_one = models.ComponentBuild()
component_one.package = "perl-Tangerine"
component_one.format = "rpms"
component_one.scmurl = "https://src.stg.fedoraproject.org/rpms/perl-Tangerine.git?#master"
component_one.state = koji.BUILD_STATES["COMPLETE"]
component_one.nvr = "perl-Tangerine-0:0.22-2.module+0+d027b723"
component_one.batch = 2
component_one.module_id = 2
component_one.ref = "7e96446223f1ad84a26c7cf23d6591cd9f6326c6"
component_one.tagged = True
component_one.tagged_in_final = True
# Failed component
component_two = models.ComponentBuild()
component_two.package = "perl-List-Compare"
component_two.format = "rpms"
component_two.scmurl = \
"https://src.stg.fedoraproject.org/rpms/perl-List-Compare.git?#master"
component_two.state = koji.BUILD_STATES["FAILED"]
component_two.batch = 2
component_two.module_id = 2
# Component that isn't started yet
component_three = models.ComponentBuild()
component_three.package = "tangerine"
component_three.format = "rpms"
component_three.scmurl = "https://src.stg.fedoraproject.org/rpms/tangerine.git?#master"
component_three.batch = 3
component_three.module_id = 2
# module-build-macros
component_four = models.ComponentBuild()
component_four.package = "module-build-macros"
component_four.format = "rpms"
component_four.state = koji.BUILD_STATES["COMPLETE"]
component_four.scmurl = (
"/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-macros-0.1-1."
"module_testmodule_master_20170109091357.src.rpm"
)
component_four.batch = 1
component_four.module_id = 2
component_four.tagged = True
component_four.build_time_only = True
db.session.add(build_one)
db.session.add(component_one)
db.session.add(component_two)
db.session.add(component_three)
db.session.add(component_four)
db.session.commit()
db.session.expire_all()
db_session.commit()
# Successful component
db_session.add_all([
models.ComponentBuild(
module_id=build_one.id,
package="perl-Tangerine",
format="rpms",
scmurl="https://src.stg.fedoraproject.org/rpms/perl-Tangerine.git?#master",
state=koji.BUILD_STATES["COMPLETE"],
nvr="perl-Tangerine-0:0.22-2.module+0+d027b723",
batch=2,
ref="7e96446223f1ad84a26c7cf23d6591cd9f6326c6",
tagged=True,
tagged_in_final=True),
# Failed component
models.ComponentBuild(
module_id=build_one.id,
package="perl-List-Compare",
format="rpms",
scmurl="https://src.stg.fedoraproject.org/rpms/perl-List-Compare.git?#master",
state=koji.BUILD_STATES["FAILED"],
batch=2),
# Component that isn't started yet
models.ComponentBuild(
module_id=build_one.id,
package="tangerine",
format="rpms",
scmurl="https://src.stg.fedoraproject.org/rpms/tangerine.git?#master",
batch=3),
# module-build-macros
models.ComponentBuild(
module_id=build_one.id,
package="module-build-macros",
format="rpms",
state=koji.BUILD_STATES["COMPLETE"],
scmurl=(
"/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-macros-0.1-1."
"module_testmodule_master_20170109091357.src.rpm"
),
batch=1,
tagged=True,
build_time_only=True),
])
db_session.commit()
db_session.expire_all()
FakeSCM(
mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")
@@ -1216,28 +1237,27 @@ class TestBuild(BaseTestBuild):
data = json.loads(rv.data)
module_build_id = data["id"]
module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()
module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)
components = (
models.ComponentBuild
.query
.filter_by(module_id=module_build_id, batch=2)
.order_by(models.ComponentBuild.id)
.all()
db_session.query(models.ComponentBuild)
.filter_by(module_id=module_build_id, batch=2)
.order_by(models.ComponentBuild.id)
.all()
)
# Make sure the build went from failed to wait
assert module_build.state == models.BUILD_STATES["wait"]
assert module_build.state_reason == "Resubmitted by Homer J. Simpson"
# Make sure the state was reset on the failed component
assert components[1].state is None
db.session.expire_all()
db_session.expire_all()
# Run the backend
self.run_scheduler(db_session)
with models.make_db_session(conf) as scheduler_db_session:
self.run_scheduler(scheduler_db_session)
# All components should be built and module itself should be in "done"
# or "ready" state.
for build in models.ComponentBuild.query.filter_by(
module_id=module_build_id).all():
for build in module_build.component_builds:
assert build.state == koji.BUILD_STATES["COMPLETE"]
assert build.module_build.state in [
models.BUILD_STATES["done"],
@@ -1256,6 +1276,7 @@ class TestBuild(BaseTestBuild):
FakeModuleBuilder.INSTANT_COMPLETE = True
now = datetime.utcnow()
submitted_time = now - timedelta(minutes=3)
# Create a module in the failed state
build_one = models.ModuleBuild()
build_one.name = "testmodule"
@@ -1276,6 +1297,9 @@ class TestBuild(BaseTestBuild):
build_one.time_submitted = submitted_time
build_one.time_modified = now
build_one.rebuild_strategy = "changed-and-after"
db_session.add(build_one)
db_session.commit()
# It went from init, to wait, to build, and then failed
mbt_one = models.ModuleBuildTrace(
state_time=submitted_time, state=models.BUILD_STATES["init"])
@@ -1288,46 +1312,45 @@ class TestBuild(BaseTestBuild):
build_one.module_builds_trace.append(mbt_two)
build_one.module_builds_trace.append(mbt_three)
build_one.module_builds_trace.append(mbt_four)
# Components that haven't started yet
component_one = models.ComponentBuild()
component_one.package = "perl-Tangerine"
component_one.format = "rpms"
component_one.scmurl = "https://src.stg.fedoraproject.org/rpms/perl-Tangerine.git?#master"
component_one.batch = 2
component_one.module_id = 2
component_two = models.ComponentBuild()
component_two.package = "perl-List-Compare"
component_two.format = "rpms"
component_two.scmurl = \
"https://src.stg.fedoraproject.org/rpms/perl-List-Compare.git?#master"
component_two.batch = 2
component_two.module_id = 2
component_three = models.ComponentBuild()
component_three.package = "tangerine"
component_three.format = "rpms"
component_three.scmurl = "https://src.stg.fedoraproject.org/rpms/tangerine.git?#master"
component_three.batch = 3
component_three.module_id = 2
# Failed module-build-macros
component_four = models.ComponentBuild()
component_four.package = "module-build-macros"
component_four.format = "rpms"
component_four.state = koji.BUILD_STATES["FAILED"]
component_four.scmurl = (
"/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-macros-0.1-1."
"module_testmodule_master_20180205135154.src.rpm"
)
component_four.batch = 1
component_four.module_id = 2
component_four.build_time_only = True
db.session.add(build_one)
db.session.add(component_one)
db.session.add(component_two)
db.session.add(component_three)
db.session.add(component_four)
db.session.commit()
db.session.expire_all()
db_session.commit()
db_session.expire_all()
# Components that haven't started yet
db_session.add_all([
models.ComponentBuild(
module_id=build_one.id,
package="perl-Tangerine",
format="rpms",
scmurl="https://src.stg.fedoraproject.org/rpms/perl-Tangerine.git?#master",
batch=2),
models.ComponentBuild(
module_id=build_one.id,
package="perl-List-Compare",
format="rpms",
scmurl="https://src.stg.fedoraproject.org/rpms/perl-List-Compare.git?#master",
batch=2),
models.ComponentBuild(
module_id=build_one.id,
package="tangerine",
format="rpms",
scmurl="https://src.stg.fedoraproject.org/rpms/tangerine.git?#master",
batch=3),
# Failed module-build-macros
models.ComponentBuild(
module_id=build_one.id,
package="module-build-macros",
format="rpms",
state=koji.BUILD_STATES["FAILED"],
scmurl=(
"/tmp/module_build_service-build-macrosqr4AWH/SRPMS/module-build-macros-0.1-1."
"module_testmodule_master_20180205135154.src.rpm"
),
batch=1,
build_time_only=True)
])
db_session.commit()
FakeSCM(mocked_scm, "testmodule", "testmodule.yaml", "7fea453")
# Resubmit the failed module
@@ -1342,21 +1365,22 @@ class TestBuild(BaseTestBuild):
data = json.loads(rv.data)
module_build_id = data["id"]
module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()
module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)
# Make sure the build went from failed to wait
assert module_build.state == models.BUILD_STATES["wait"]
assert module_build.state_reason == "Resubmitted by Homer J. Simpson"
# Make sure the state was reset on the failed component
for c in module_build.component_builds:
assert c.state is None
db.session.expire_all()
db_session.expire_all()
# Run the backend
self.run_scheduler(db_session)
with models.make_db_session(conf) as scheduler_db_session:
self.run_scheduler(scheduler_db_session)
# All components should be built and module itself should be in "done"
# or "ready" state.
for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():
for build in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:
assert build.state == koji.BUILD_STATES["COMPLETE"]
assert build.module_build.state in [
models.BUILD_STATES["done"],
@@ -1374,7 +1398,7 @@ class TestBuild(BaseTestBuild):
FakeSCM(
mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")
with patch("module_build_service.utils.submit.format_mmd") as mock_format_mmd:
with patch("module_build_service.scheduler.handlers.modules.format_mmd") as mock_format_mmd:
mock_format_mmd.side_effect = Forbidden("Custom component repositories aren't allowed.")
rv = self.client.post(
"/module-build-service/1/module-builds/",
@@ -1389,7 +1413,7 @@ class TestBuild(BaseTestBuild):
cleanup_moksha()
module_build_id = json.loads(rv.data)["id"]
module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()
module_build = models.ModuleBuild.get_by_id(db_session, module_build_id)
assert module_build.state == models.BUILD_STATES["failed"]
assert module_build.state_reason == "Custom component repositories aren't allowed."
assert len(module_build.module_builds_trace) == 2
@@ -1397,7 +1421,7 @@ class TestBuild(BaseTestBuild):
assert module_build.module_builds_trace[1].state == models.BUILD_STATES["failed"]
# Resubmit the failed module
rv = self.client.post(
self.client.post(
"/module-build-service/1/module-builds/",
data=json.dumps({
"branch": "master",
@@ -1410,7 +1434,8 @@ class TestBuild(BaseTestBuild):
module_build = models.ModuleBuild.query.filter_by(id=module_build_id).one()
components = (
models.ComponentBuild.query.filter_by(module_id=module_build_id, batch=2)
db_session.query(models.ComponentBuild)
.filter_by(module_id=module_build_id, batch=2)
.order_by(models.ComponentBuild.id)
.all()
)
@@ -1419,25 +1444,24 @@ class TestBuild(BaseTestBuild):
assert module_build.state_reason == "Resubmitted by Homer J. Simpson"
# Make sure there are no components
assert components == []
db.session.expire_all()
db_session.expire_all()
# Run the backend again
self.run_scheduler(db_session)
# All components should be built and module itself should be in "done"
# or "ready" state.
for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():
for build in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:
assert build.state == koji.BUILD_STATES["COMPLETE"]
assert build.module_build.state in [
models.BUILD_STATES["done"],
models.BUILD_STATES["ready"],
]
@patch("module_build_service.utils.greenwave.Greenwave.check_gating", return_value=True)
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_build_resume_init_fail(
self, mocked_scm, mocked_get_user, mock_greenwave, conf_system, dbg, hmsc, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
):
"""
Tests that resuming the build fails when the build is in init state
@@ -1665,18 +1689,17 @@ class TestBuild(BaseTestBuild):
msg_id="a faked internal message", repo_tag=module.koji_tag + "-build"
)
]
db.session.expire_all()
db_session.expire_all()
# Stop after processing the seeded message
self.run_scheduler(db_session, msgs, lambda message: True)
# Make sure the module build didn't fail so that the poller can resume it later
module = db_session.query(models.ModuleBuild).get(module_build_id)
module = models.ModuleBuild.get_by_id(db_session, module_build_id)
assert module.state == models.BUILD_STATES["build"]
@patch("module_build_service.utils.greenwave.Greenwave.check_gating", return_value=True)
@patch("module_build_service.auth.get_user", return_value=user)
@patch("module_build_service.scm.SCM")
def test_submit_br_metadata_only_module(
self, mocked_scm, mocked_get_user, mock_greenwave, conf_system, dbg, hmsc, db_session
self, mocked_scm, mocked_get_user, conf_system, dbg, hmsc, db_session
):
"""
Test that when a build is submitted with a buildrequire without a Koji tag,
@@ -1685,7 +1708,7 @@ class TestBuild(BaseTestBuild):
metadata_mmd = module_build_service.utils.load_mmd(
read_staged_data("build_metadata_module")
)
module_build_service.utils.import_mmd(db.session, metadata_mmd)
module_build_service.utils.import_mmd(db_session, metadata_mmd)
FakeSCM(
mocked_scm,
@@ -1712,7 +1735,7 @@ class TestBuild(BaseTestBuild):
FakeModuleBuilder.on_buildroot_add_repos_cb = on_buildroot_add_repos_cb
self.run_scheduler(db_session)
module = db_session.query(models.ModuleBuild).get(module_build_id)
module = models.ModuleBuild.get_by_id(db_session, module_build_id)
assert module.state == models.BUILD_STATES["ready"]
@@ -1732,7 +1755,8 @@ class TestLocalBuild(BaseTestBuild):
cleanup_moksha()
for i in range(20):
try:
os.remove(build_logs.path(i))
with models.make_db_session(conf) as db_session:
os.remove(build_logs.path(db_session, i))
except Exception:
pass
@@ -1750,8 +1774,7 @@ class TestLocalBuild(BaseTestBuild):
"""
Tests local module build dependency.
"""
# with app.app_context():
module_build_service.utils.load_local_builds(["platform"])
module_build_service.utils.load_local_builds(db_session, ["platform"])
FakeSCM(
mocked_scm,
"testmodule",
@@ -1779,7 +1802,7 @@ class TestLocalBuild(BaseTestBuild):
# All components should be built and module itself should be in "done"
# or "ready" state.
for build in models.ComponentBuild.query.filter_by(module_id=module_build_id).all():
for build in models.ModuleBuild.get_by_id(db_session, module_build_id).component_builds:
assert build.state == koji.BUILD_STATES["COMPLETE"]
assert build.module_build.state in [
models.BUILD_STATES["done"],

View File

@@ -26,7 +26,7 @@ import module_build_service.models
import module_build_service.builder
import module_build_service.resolver
from tests import init_data, db
from tests import init_data
from module_build_service.builder import GenericBuilder
from mock import patch
@@ -35,11 +35,10 @@ from mock import patch
class TestGenericBuilder:
def setup_method(self, test_method):
init_data(1)
self.module = module_build_service.models.ModuleBuild.query.filter_by(id=1).one()
@patch("module_build_service.resolver.DBResolver")
@patch("module_build_service.resolver.GenericResolver")
def test_default_buildroot_groups_cache(self, generic_resolver, resolver):
@patch("module_build_service.builder.base.GenericResolver")
def test_default_buildroot_groups_cache(self, generic_resolver, resolver, db_session):
mbs_groups = {"buildroot": [], "srpm-buildroot": []}
resolver = mock.MagicMock()
@@ -48,27 +47,29 @@ class TestGenericBuilder:
expected_groups = {"build": [], "srpm-build": []}
with patch.object(module_build_service.resolver, "system_resolver", new=resolver):
# Call default_buildroot_groups, the result should be cached.
ret = GenericBuilder.default_buildroot_groups(db.session, self.module)
assert ret == expected_groups
resolver.resolve_profiles.assert_called_once()
resolver.resolve_profiles.reset_mock()
module = module_build_service.models.ModuleBuild.get_by_id(db_session, 1)
generic_resolver.create.return_value = resolver
# Call default_buildroot_groups, the result should be cached.
ret = GenericBuilder.default_buildroot_groups(db_session, module)
assert ret == expected_groups
resolver.resolve_profiles.assert_called_once()
resolver.resolve_profiles.reset_mock()
# Now try calling it again to verify resolve_profiles is not called,
# because it is cached.
with patch.object(module_build_service.resolver, "system_resolver", new=resolver):
ret = GenericBuilder.default_buildroot_groups(db.session, self.module)
assert ret == expected_groups
resolver.resolve_profiles.assert_not_called()
resolver.resolve_profiles.reset_mock()
generic_resolver.create.return_value = resolver
ret = GenericBuilder.default_buildroot_groups(db_session, module)
assert ret == expected_groups
resolver.resolve_profiles.assert_not_called()
resolver.resolve_profiles.reset_mock()
# And now try clearing the cache and call it again.
with patch.object(module_build_service.resolver, "system_resolver", new=resolver):
GenericBuilder.clear_cache(self.module)
ret = GenericBuilder.default_buildroot_groups(db.session, self.module)
assert ret == expected_groups
resolver.resolve_profiles.assert_called_once()
generic_resolver.create.return_value = resolver
GenericBuilder.clear_cache(module)
ret = GenericBuilder.default_buildroot_groups(db_session, module)
assert ret == expected_groups
resolver.resolve_profiles.assert_called_once()
def test_get_build_weights(self):
weights = GenericBuilder.get_build_weights(["httpd", "apr"])

View File

@@ -33,13 +33,13 @@ import module_build_service.messaging
import module_build_service.scheduler.handlers.repos
import module_build_service.models
import module_build_service.builder
from module_build_service import Modulemd, db
from module_build_service import Modulemd
from module_build_service.utils.general import mmd_to_str
import pytest
from mock import patch, MagicMock
from tests import conf, init_data, reuse_component_init_data, clean_database, make_module
from tests import conf, init_data, clean_database, make_module
from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder
@@ -109,7 +109,6 @@ class TestKojiBuilder:
self.config = mock.Mock()
self.config.koji_profile = conf.koji_profile
self.config.koji_repository_url = conf.koji_repository_url
self.module = module_build_service.models.ModuleBuild.query.filter_by(id=2).one()
self.p_read_config = patch(
"koji.read_config",
@@ -139,9 +138,12 @@ class TestKojiBuilder:
def test_recover_orphaned_artifact_when_tagged(self, db_session):
""" Test recover_orphaned_artifact when the artifact is found and tagged in both tags
"""
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
builder = FakeKojiModuleBuilder(
owner=self.module.owner,
module=self.module,
db_session=db_session,
owner=module_build.owner,
module=module_build,
config=conf,
tag_name="module-foo",
components=[],
@@ -184,9 +186,12 @@ class TestKojiBuilder:
def test_recover_orphaned_artifact_when_untagged(self, db_session):
""" Tests recover_orphaned_artifact when the build is found but untagged
"""
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
builder = FakeKojiModuleBuilder(
owner=self.module.owner,
module=self.module,
db_session=db_session,
owner=module_build.owner,
module=module_build,
config=conf,
tag_name="module-foo",
components=[],
@@ -227,9 +232,12 @@ class TestKojiBuilder:
def test_recover_orphaned_artifact_when_nothing_exists(self, db_session):
""" Test recover_orphaned_artifact when the build is not found
"""
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
builder = FakeKojiModuleBuilder(
owner=self.module.owner,
module=self.module,
db_session=db_session,
owner=module_build.owner,
module=module_build,
config=conf,
tag_name="module-foo",
components=[],
@@ -255,13 +263,15 @@ class TestKojiBuilder:
assert builder.koji_session.tagBuild.call_count == 0
@patch("koji.util")
def test_buildroot_ready(self, mocked_kojiutil):
def test_buildroot_ready(self, mocked_kojiutil, db_session):
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
attrs = {"checkForBuilds.return_value": None, "checkForBuilds.side_effect": IOError}
mocked_kojiutil.configure_mock(**attrs)
fake_kmb = FakeKojiModuleBuilder(
owner=self.module.owner,
module=self.module,
db_session=db_session,
owner=module_build.owner,
module=module_build,
config=conf,
tag_name="module-nginx-1.2",
components=[],
@@ -273,21 +283,25 @@ class TestKojiBuilder:
assert mocked_kojiutil.checkForBuilds.call_count == 3
@pytest.mark.parametrize("blocklist", [False, True])
def test_tagging_already_tagged_artifacts(self, blocklist):
def test_tagging_already_tagged_artifacts(self, blocklist, db_session):
"""
Tests that buildroot_add_artifacts and tag_artifacts do not try to
tag already tagged artifacts
"""
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
if blocklist:
mmd = self.module.mmd()
mmd = module_build.mmd()
xmd = mmd.get_xmd()
xmd["mbs_options"] = {"blocked_packages": ["foo", "bar", "new"]}
mmd.set_xmd(xmd)
self.module.modulemd = mmd_to_str(mmd)
module_build.modulemd = mmd_to_str(mmd)
db_session.commit()
builder = FakeKojiModuleBuilder(
owner=self.module.owner,
module=self.module,
db_session=db_session,
owner=module_build.owner,
module=module_build,
config=conf,
tag_name="module-nginx-1.2",
components=[],
@@ -328,10 +342,12 @@ class TestKojiBuilder:
@patch.object(FakeKojiModuleBuilder, "get_session")
@patch.object(FakeKojiModuleBuilder, "_get_tagged_nvrs")
def test_untagged_artifacts(self, mock_get_tagged_nvrs, mock_get_session):
def test_untagged_artifacts(self, mock_get_tagged_nvrs, mock_get_session, db_session):
"""
Tests that only tagged artifacts will be untagged
"""
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
mock_session = mock.Mock()
mock_session.getTag.side_effect = [
{"name": "foobar", "id": 1},
@@ -340,8 +356,9 @@ class TestKojiBuilder:
mock_get_session.return_value = mock_session
mock_get_tagged_nvrs.side_effect = [["foo", "bar"], ["foo"]]
builder = FakeKojiModuleBuilder(
owner=self.module.owner,
module=self.module,
db_session=db_session,
owner=module_build.owner,
module=module_build,
config=conf,
tag_name="module-foo",
components=[],
@@ -471,45 +488,51 @@ class TestKojiBuilder:
@pytest.mark.parametrize("custom_whitelist", [False, True])
@pytest.mark.parametrize("repo_include_all", [False, True])
def test_buildroot_connect(
self, custom_whitelist, blocklist, repo_include_all
self, custom_whitelist, blocklist, repo_include_all, db_session
):
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
if blocklist:
mmd = self.module.mmd()
mmd = module_build.mmd()
xmd = mmd.get_xmd()
xmd["mbs_options"] = {"blocked_packages": ["foo", "nginx"]}
mmd.set_xmd(xmd)
self.module.modulemd = mmd_to_str(mmd)
module_build.modulemd = mmd_to_str(mmd)
db_session.commit()
if custom_whitelist:
mmd = self.module.mmd()
mmd = module_build.mmd()
opts = Modulemd.Buildopts()
opts.add_rpm_to_whitelist("custom1")
opts.add_rpm_to_whitelist("custom2")
mmd.set_buildopts(opts)
self.module.modulemd = mmd_to_str(mmd)
module_build.modulemd = mmd_to_str(mmd)
else:
# Set some irrelevant buildopts options to test that KojiModuleBuilder
# is not confused by this.
mmd = self.module.mmd()
mmd = module_build.mmd()
opts = Modulemd.Buildopts()
opts.set_rpm_macros("%my_macro 1")
mmd.set_buildopts(opts)
self.module.modulemd = mmd_to_str(mmd)
module_build.modulemd = mmd_to_str(mmd)
db_session.commit()
if repo_include_all is False:
mmd = self.module.mmd()
mmd = module_build.mmd()
xmd = mmd.get_xmd()
mbs_options = xmd["mbs_options"] if "mbs_options" in xmd.keys() else {}
mbs_options["repo_include_all"] = False
xmd["mbs_options"] = mbs_options
mmd.set_xmd(xmd)
self.module.modulemd = mmd_to_str(mmd)
module_build.modulemd = mmd_to_str(mmd)
db_session.commit()
self.module.arches.append(module_build_service.models.ModuleArch(name="i686"))
module_build.arches.append(module_build_service.models.ModuleArch(name="i686"))
builder = FakeKojiModuleBuilder(
owner=self.module.owner,
module=self.module,
db_session=db_session,
owner=module_build.owner,
module=module_build,
config=conf,
tag_name="module-foo",
components=["nginx"],
@@ -577,17 +600,21 @@ class TestKojiBuilder:
assert session.editTag2.mock_calls == expected_calls
@pytest.mark.parametrize("blocklist", [False, True])
def test_buildroot_connect_create_tag(self, blocklist):
def test_buildroot_connect_create_tag(self, blocklist, db_session):
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
if blocklist:
mmd = self.module.mmd()
mmd = module_build.mmd()
xmd = mmd.get_xmd()
xmd["mbs_options"] = {"blocked_packages": ["foo", "nginx"]}
mmd.set_xmd(xmd)
self.module.modulemd = mmd_to_str(mmd)
module_build.modulemd = mmd_to_str(mmd)
db_session.commit()
builder = FakeKojiModuleBuilder(
owner=self.module.owner,
module=self.module,
db_session=db_session,
owner=module_build.owner,
module=module_build,
config=conf,
tag_name="module-foo",
components=["nginx"],
@@ -610,13 +637,17 @@ class TestKojiBuilder:
assert session.packageListBlock.mock_calls == expected_calls
@pytest.mark.parametrize("scratch", [False, True])
def test_buildroot_connect_create_target(self, scratch):
def test_buildroot_connect_create_target(self, scratch, db_session):
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
if scratch:
self.module.scratch = scratch
module_build.scratch = scratch
db_session.commit()
builder = FakeKojiModuleBuilder(
owner=self.module.owner,
module=self.module,
db_session=db_session,
owner=module_build.owner,
module=module_build,
config=conf,
tag_name="module-foo",
components=["nginx"],
@@ -641,7 +672,7 @@ class TestKojiBuilder:
assert session.createBuildTarget.mock_calls == expected_calls
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_built_rpms_in_module_build(self, ClientSession):
def test_get_built_rpms_in_module_build(self, ClientSession, db_session):
session = ClientSession.return_value
session.listTaggedRPMS.return_value = (
[
@@ -679,20 +710,23 @@ class TestKojiBuilder:
[],
)
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
# Module builds generated by init_data uses generic modulemd file and
# the module's name/stream/version/context does not have to match it.
# But for this test, we need it to match.
mmd = self.module.mmd()
self.module.name = mmd.get_module_name()
self.module.stream = mmd.get_stream_name()
self.module.version = mmd.get_version()
self.module.context = mmd.get_context()
db.session.commit()
mmd = module_build.mmd()
module_build.name = mmd.get_module_name()
module_build.stream = mmd.get_stream_name()
module_build.version = mmd.get_version()
module_build.context = mmd.get_context()
db_session.commit()
ret = KojiModuleBuilder.get_built_rpms_in_module_build(mmd)
assert set(ret) == set(["bar-2:1.30-4.el8+1308+551bfa71", "tar-2:1.30-4.el8+1308+551bfa71"])
session.assert_not_called()
@pytest.mark.usefixtures("reuse_component_init_data")
@pytest.mark.parametrize(
"br_filtered_rpms,expected",
(
@@ -775,7 +809,6 @@ class TestKojiBuilder:
},
],
)
reuse_component_init_data()
current_module = module_build_service.models.ModuleBuild.get_by_id(db_session, 3)
rv = KojiModuleBuilder._get_filtered_rpms_on_self_dep(current_module, br_filtered_rpms)
assert set(rv) == set(expected)
@@ -785,8 +818,9 @@ class TestKojiBuilder:
"cg_enabled,cg_devel_enabled", [(False, False), (True, False), (True, True)]
)
@mock.patch("module_build_service.builder.KojiModuleBuilder.KojiContentGenerator")
def test_finalize(self, mock_koji_cg_cls, cg_enabled, cg_devel_enabled):
self.module.state = 2
def test_finalize(self, mock_koji_cg_cls, cg_enabled, cg_devel_enabled, db_session):
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
module_build.state = 2
with patch(
"module_build_service.config.Config.koji_enable_content_generator",
new_callable=mock.PropertyMock,
@@ -798,8 +832,9 @@ class TestKojiBuilder:
return_value=cg_devel_enabled,
):
builder = FakeKojiModuleBuilder(
owner=self.module.owner,
module=self.module,
db_session=db_session,
owner=module_build.owner,
module=module_build,
config=conf,
tag_name="module-nginx-1.2",
components=[],
@@ -825,17 +860,19 @@ class TestKojiBuilder:
@patch.dict("sys.modules", krbV=MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_ensure_builder_use_a_logged_in_koji_session(self, ClientSession):
builder = KojiModuleBuilder("owner", self.module, conf, "module-tag", [])
def test_ensure_builder_use_a_logged_in_koji_session(self, ClientSession, db_session):
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
builder = KojiModuleBuilder(db_session, "owner", module_build, conf, "module-tag", [])
builder.koji_session.krb_login.assert_called_once()
@patch.dict("sys.modules", krbV=MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_module_build_arches(self, ClientSession):
def test_get_module_build_arches(self, ClientSession, db_session):
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
arches = "x86_64 i686 ppc64le aarch64 s390x"
session = ClientSession.return_value
session.getTag.return_value = {"arches": arches}
ret = KojiModuleBuilder.get_module_build_arches(self.module)
ret = KojiModuleBuilder.get_module_build_arches(module_build)
assert " ".join(ret) == arches

View File

@@ -8,7 +8,7 @@ from textwrap import dedent
import kobo.rpmlib
from module_build_service import conf
from module_build_service.models import ModuleBuild, ComponentBuild, make_session
from module_build_service.models import ModuleBuild, ComponentBuild
from module_build_service.builder.MockModuleBuilder import MockModuleBuilder
from module_build_service.utils import import_fake_base_module, mmd_to_str, load_mmd
from tests import clean_database, make_module, read_staged_data
@@ -23,7 +23,7 @@ class TestMockModuleBuilder:
clean_database()
shutil.rmtree(self.resultdir)
def _create_module_with_filters(self, session, batch, state):
def _create_module_with_filters(self, db_session, batch, state):
mmd = load_mmd(read_staged_data("testmodule-with-filters"))
# Set the name and stream
mmd = mmd.copy("mbs-testmodule", "test")
@@ -63,7 +63,7 @@ class TestMockModuleBuilder:
}
})
module = ModuleBuild.create(
session,
db_session,
conf,
name="mbs-testmodule",
stream="test",
@@ -74,8 +74,8 @@ class TestMockModuleBuilder:
)
module.koji_tag = "module-mbs-testmodule-test-20171027111452"
module.batch = batch
session.add(module)
session.commit()
db_session.add(module)
db_session.commit()
comp_builds = [
{
@@ -105,86 +105,82 @@ class TestMockModuleBuilder:
]
for build in comp_builds:
session.add(ComponentBuild(**build))
session.commit()
db_session.add(ComponentBuild(**build))
db_session.commit()
return module
@mock.patch("module_build_service.conf.system", new="mock")
def test_createrepo_filter_last_batch(self, *args):
with make_session(conf) as session:
module = self._create_module_with_filters(session, 3, koji.BUILD_STATES["COMPLETE"])
def test_createrepo_filter_last_batch(self, db_session):
module = self._create_module_with_filters(db_session, 3, koji.BUILD_STATES["COMPLETE"])
builder = MockModuleBuilder(
"mcurlej", module, conf, module.koji_tag, module.component_builds
)
builder.resultsdir = self.resultdir
rpms = [
"ed-1.14.1-4.module+24957a32.x86_64.rpm",
"mksh-56b-1.module+24957a32.x86_64.rpm",
"module-build-macros-0.1-1.module+24957a32.noarch.rpm",
]
rpm_qf_output = dedent("""\
ed 0 1.14.1 4.module+24957a32 x86_64
mksh 0 56b-1 module+24957a32 x86_64
module-build-macros 0 0.1 1.module+24957a32 noarch
""")
with mock.patch("os.listdir", return_value=rpms):
with mock.patch("subprocess.check_output", return_value=rpm_qf_output):
builder._createrepo()
with open(os.path.join(self.resultdir, "pkglist"), "r") as fd:
pkglist = fd.read().strip()
rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split("\n")]
assert "ed" not in rpm_names
@mock.patch("module_build_service.conf.system", new="mock")
def test_createrepo_not_last_batch(self):
with make_session(conf) as session:
module = self._create_module_with_filters(session, 2, koji.BUILD_STATES["COMPLETE"])
builder = MockModuleBuilder(
"mcurlej", module, conf, module.koji_tag, module.component_builds
)
builder.resultsdir = self.resultdir
rpms = [
"ed-1.14.1-4.module+24957a32.x86_64.rpm",
"mksh-56b-1.module+24957a32.x86_64.rpm",
]
rpm_qf_output = dedent("""\
ed 0 1.14.1 4.module+24957a32 x86_64
mksh 0 56b-1 module+24957a32 x86_64
""")
with mock.patch("os.listdir", return_value=rpms):
with mock.patch("subprocess.check_output", return_value=rpm_qf_output):
builder._createrepo()
with open(os.path.join(self.resultdir, "pkglist"), "r") as fd:
pkglist = fd.read().strip()
rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split("\n")]
assert "ed" in rpm_names
@mock.patch("module_build_service.conf.system", new="mock")
def test_createrepo_empty_rmp_list(self, *args):
with make_session(conf) as session:
module = self._create_module_with_filters(session, 3, koji.BUILD_STATES["COMPLETE"])
builder = MockModuleBuilder(
"mcurlej", module, conf, module.koji_tag, module.component_builds)
builder.resultsdir = self.resultdir
rpms = []
with mock.patch("os.listdir", return_value=rpms):
builder = MockModuleBuilder(
db_session, "mcurlej", module, conf, module.koji_tag, module.component_builds
)
builder.resultsdir = self.resultdir
rpms = [
"ed-1.14.1-4.module+24957a32.x86_64.rpm",
"mksh-56b-1.module+24957a32.x86_64.rpm",
"module-build-macros-0.1-1.module+24957a32.noarch.rpm",
]
rpm_qf_output = dedent("""\
ed 0 1.14.1 4.module+24957a32 x86_64
mksh 0 56b-1 module+24957a32 x86_64
module-build-macros 0 0.1 1.module+24957a32 noarch
""")
with mock.patch("os.listdir", return_value=rpms):
with mock.patch("subprocess.check_output", return_value=rpm_qf_output):
builder._createrepo()
with open(os.path.join(self.resultdir, "pkglist"), "r") as fd:
pkglist = fd.read().strip()
assert not pkglist
with open(os.path.join(self.resultdir, "pkglist"), "r") as fd:
pkglist = fd.read().strip()
rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split("\n")]
assert "ed" not in rpm_names
@mock.patch("module_build_service.conf.system", new="mock")
def test_createrepo_not_last_batch(self, db_session):
module = self._create_module_with_filters(db_session, 2, koji.BUILD_STATES["COMPLETE"])
builder = MockModuleBuilder(
db_session, "mcurlej", module, conf, module.koji_tag, module.component_builds
)
builder.resultsdir = self.resultdir
rpms = [
"ed-1.14.1-4.module+24957a32.x86_64.rpm",
"mksh-56b-1.module+24957a32.x86_64.rpm",
]
rpm_qf_output = dedent("""\
ed 0 1.14.1 4.module+24957a32 x86_64
mksh 0 56b-1 module+24957a32 x86_64
""")
with mock.patch("os.listdir", return_value=rpms):
with mock.patch("subprocess.check_output", return_value=rpm_qf_output):
builder._createrepo()
with open(os.path.join(self.resultdir, "pkglist"), "r") as fd:
pkglist = fd.read().strip()
rpm_names = [kobo.rpmlib.parse_nvr(rpm)["name"] for rpm in pkglist.split("\n")]
assert "ed" in rpm_names
@mock.patch("module_build_service.conf.system", new="mock")
def test_createrepo_empty_rmp_list(self, db_session):
module = self._create_module_with_filters(db_session, 3, koji.BUILD_STATES["COMPLETE"])
builder = MockModuleBuilder(
db_session, "mcurlej", module, conf, module.koji_tag, module.component_builds)
builder.resultsdir = self.resultdir
rpms = []
with mock.patch("os.listdir", return_value=rpms):
builder._createrepo()
with open(os.path.join(self.resultdir, "pkglist"), "r") as fd:
pkglist = fd.read().strip()
assert not pkglist
class TestMockModuleBuilderAddRepos:
def setup_method(self, test_method):
clean_database(add_platform_module=False)
import_fake_base_module("platform:f29:1:000000")
@mock.patch("module_build_service.conf.system", new="mock")
@mock.patch(
@@ -203,6 +199,8 @@ class TestMockModuleBuilderAddRepos:
def test_buildroot_add_repos(
self, write_config, load_config, patched_open, base_module_repofiles, db_session
):
import_fake_base_module(db_session, "platform:f29:1:000000")
platform = ModuleBuild.get_last_build_in_stream(db_session, "platform", "f29")
foo = make_module(
db_session, "foo:1:1:1", {"platform": ["f29"]}, {"platform": ["f29"]})
@@ -215,7 +213,7 @@ class TestMockModuleBuilderAddRepos:
mock.mock_open(read_data="[fake]\nrepofile 3\n").return_value,
]
builder = MockModuleBuilder("user", app, conf, "module-app", [])
builder = MockModuleBuilder(db_session, "user", app, conf, "module-app", [])
dependencies = {
"repofile://": [platform.mmd()],

View File

@@ -71,7 +71,8 @@ class TestBuild:
# Ensure that there is no build log from other tests
try:
file_path = build_logs.path(self.cg.module)
with models.make_db_session(conf) as db_session:
file_path = build_logs.path(db_session, self.cg.module)
os.remove(file_path)
except OSError:
pass
@@ -88,7 +89,8 @@ class TestBuild:
import moksha.hub.reactor # noqa
try:
file_path = build_logs.path(self.cg.module)
with models.make_db_session(conf) as db_session:
file_path = build_logs.path(db_session, self.cg.module)
os.remove(file_path)
except OSError:
pass
@@ -135,7 +137,8 @@ class TestBuild:
expected_output = json.load(expected_output_file)
# create the build.log
build_logs.start(self.cg.module)
with models.make_db_session(conf) as db_session:
build_logs.start(db_session, self.cg.module)
build_logs.stop(self.cg.module)
self.cg.devel = devel

View File

@@ -60,16 +60,16 @@ class TestLogger:
MBSConsumer.current_module_build_id = None
shutil.rmtree(self.base)
def test_module_build_logs(self):
def test_module_build_logs(self, db_session):
"""
Tests that ModuleBuildLogs is logging properly to build log file.
"""
build = models.ModuleBuild.query.filter_by(id=2).one()
build = models.ModuleBuild.get_by_id(db_session, 2)
# Initialize logging, get the build log path and remove it to
# ensure we are not using some garbage from previous failed test.
self.build_log.start(build)
path = self.build_log.path(build)
self.build_log.start(db_session, build)
path = self.build_log.path(db_session, build)
assert path[len(self.base):] == "/build-2.log"
if os.path.exists(path):
os.unlink(path)
@@ -86,7 +86,7 @@ class TestLogger:
# Try logging with current_module_build_id set to 2 and then to 2.
# Only messages with current_module_build_id set to 2 should appear in
# the log.
self.build_log.start(build)
self.build_log.start(db_session, build)
MBSConsumer.current_module_build_id = 1
log.debug("ignore this test msg1")
log.info("ignore this test msg1")
@@ -119,13 +119,13 @@ class TestLogger:
data = f.read()
assert data.find("ignore this test msg3") == -1
def test_module_build_logs_name_format(self):
build = models.ModuleBuild.query.filter_by(id=2).one()
def test_module_build_logs_name_format(self, db_session):
build = models.ModuleBuild.get_by_id(db_session, 2)
log1 = ModuleBuildLogs("/some/path", "build-{id}.log")
assert log1.name(build) == "build-2.log"
assert log1.path(build) == "/some/path/build-2.log"
assert log1.name(db_session, build) == "build-2.log"
assert log1.path(db_session, build) == "/some/path/build-2.log"
log2 = ModuleBuildLogs("/some/path", "build-{name}-{stream}-{version}.log")
assert log2.name(build) == "build-nginx-1-2.log"
assert log2.path(build) == "/some/path/build-nginx-1-2.log"
assert log2.name(db_session, build) == "build-nginx-1-2.log"
assert log2.path(db_session, build) == "/some/path/build-nginx-1-2.log"

View File

@@ -18,20 +18,15 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import pytest
from mock import patch, mock_open, ANY
from mock import patch, mock_open, ANY, Mock
from module_build_service import app, conf
from module_build_service import app
from module_build_service.manage import retire, build_module_locally
from module_build_service.models import BUILD_STATES, ModuleBuild, make_session
from tests.test_models import clean_database, init_data
from module_build_service.models import BUILD_STATES, ModuleBuild
@pytest.mark.usefixtures("model_tests_init_data")
class TestMBSManage:
def setup_method(self, test_method):
init_data()
def teardown_method(self, test_method):
clean_database(False, False)
@pytest.mark.parametrize(
("identifier", "is_valid"),
@@ -64,34 +59,33 @@ class TestMBSManage:
),
)
@patch("module_build_service.manage.prompt_bool")
def test_retire_build(self, prompt_bool, overrides, identifier, changed_count):
def test_retire_build(self, prompt_bool, overrides, identifier, changed_count, db_session):
prompt_bool.return_value = True
with make_session(conf) as session:
module_builds = session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all()
# Verify our assumption of the amount of ModuleBuilds in database
assert len(module_builds) == 3
module_builds = db_session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all()
# Verify our assumption of the amount of ModuleBuilds in database
assert len(module_builds) == 3
for x, build in enumerate(module_builds):
build.name = "spam"
build.stream = "eggs"
build.version = "ham"
build.context = str(x)
for x, build in enumerate(module_builds):
build.name = "spam"
build.stream = "eggs"
build.version = "ham"
build.context = str(x)
for attr, value in overrides.items():
setattr(module_builds[0], attr, value)
for attr, value in overrides.items():
setattr(module_builds[0], attr, value)
session.commit()
db_session.commit()
retire(identifier)
retired_module_builds = (
session.query(ModuleBuild).filter_by(state=BUILD_STATES["garbage"]).all()
)
retire(identifier)
retired_module_builds = (
db_session.query(ModuleBuild).filter_by(state=BUILD_STATES["garbage"]).all()
)
assert len(retired_module_builds) == changed_count
for x in range(changed_count):
assert retired_module_builds[x].id == module_builds[x].id
assert retired_module_builds[x].state == BUILD_STATES["garbage"]
assert len(retired_module_builds) == changed_count
for x in range(changed_count):
assert retired_module_builds[x].id == module_builds[x].id
assert retired_module_builds[x].state == BUILD_STATES["garbage"]
@pytest.mark.parametrize(
("confirm_prompt", "confirm_arg", "confirm_expected"),
@@ -104,25 +98,24 @@ class TestMBSManage:
)
@patch("module_build_service.manage.prompt_bool")
def test_retire_build_confirm_prompt(
self, prompt_bool, confirm_prompt, confirm_arg, confirm_expected
self, prompt_bool, confirm_prompt, confirm_arg, confirm_expected, db_session
):
prompt_bool.return_value = confirm_prompt
with make_session(conf) as session:
module_builds = session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all()
# Verify our assumption of the amount of ModuleBuilds in database
assert len(module_builds) == 3
module_builds = db_session.query(ModuleBuild).filter_by(state=BUILD_STATES["ready"]).all()
# Verify our assumption of the amount of ModuleBuilds in database
assert len(module_builds) == 3
for x, build in enumerate(module_builds):
build.name = "spam"
build.stream = "eggs"
for x, build in enumerate(module_builds):
build.name = "spam"
build.stream = "eggs"
session.commit()
db_session.commit()
retire("spam:eggs", confirm_arg)
retired_module_builds = (
session.query(ModuleBuild).filter_by(state=BUILD_STATES["garbage"]).all()
)
retire("spam:eggs", confirm_arg)
retired_module_builds = (
db_session.query(ModuleBuild).filter_by(state=BUILD_STATES["garbage"]).all()
)
expected_changed_count = 3 if confirm_expected else 0
assert len(retired_module_builds) == expected_changed_count
@@ -131,8 +124,13 @@ class TestMBSManage:
@patch("module_build_service.manage.submit_module_build_from_yaml")
@patch("module_build_service.scheduler.main")
@patch("module_build_service.manage.conf.set_item")
@patch("module_build_service.models.make_db_session")
def test_build_module_locally_set_stream(
self, conf_set_item, main, submit_module_build_from_yaml, patched_open):
self, make_db_session, conf_set_item, main, submit_module_build_from_yaml, patched_open
):
mock_db_session = Mock()
make_db_session.return_value.__enter__.return_value = mock_db_session
# build_module_locally changes database uri to a local SQLite database file.
# Restore the uri to original one in order to not impact the database
# session in subsequent tests.
@@ -144,5 +142,7 @@ class TestMBSManage:
app.config['SQLALCHEMY_DATABASE_URI'] = original_db_uri
submit_module_build_from_yaml.assert_called_once_with(
ANY, ANY, {"default_streams": {"platform": "el8"}, "local_build": True},
mock_db_session, ANY, ANY, {
"default_streams": {"platform": "el8"}, "local_build": True
},
skiptests=False, stream="foo")

View File

@@ -19,47 +19,3 @@
# SOFTWARE.
#
# Written by Ralph Bean <rbean@redhat.com>
import os
from datetime import datetime
import module_build_service
from module_build_service.utils import load_mmd
from tests import db, clean_database
from module_build_service.config import init_config
from module_build_service.models import ModuleBuild, BUILD_STATES
app = module_build_service.app
conf = init_config(app)
datadir = os.path.dirname(__file__) + "/data/"
def module_build_from_modulemd(yaml):
mmd = load_mmd(yaml)
build = ModuleBuild()
build.name = mmd.get_module_name()
build.stream = mmd.get_stream_name()
build.version = mmd.get_version()
build.state = BUILD_STATES["ready"]
build.modulemd = yaml
build.koji_tag = None
build.batch = 0
build.owner = "some_other_user"
build.time_submitted = datetime(2016, 9, 3, 12, 28, 33)
build.time_modified = datetime(2016, 9, 3, 12, 28, 40)
build.time_completed = None
build.rebuild_strategy = "changed-and-after"
return build
def init_data():
clean_database()
for filename in os.listdir(datadir):
with open(datadir + filename, "r") as f:
yaml = f.read()
build = module_build_from_modulemd(yaml)
db.session.add(build)
db.session.commit()

View File

@@ -24,47 +24,48 @@ import pytest
from mock import patch
from module_build_service import conf
from module_build_service.models import ComponentBuild, ModuleBuild, make_session
from module_build_service.models import ComponentBuild, ComponentBuildTrace, ModuleBuild
from module_build_service.utils.general import mmd_to_str, load_mmd
from tests import init_data as init_data_contexts, clean_database, make_module, read_staged_data
from tests.test_models import init_data, module_build_from_modulemd
from tests import module_build_from_modulemd
@pytest.mark.usefixtures("model_tests_init_data")
class TestModels:
def setup_method(self, test_method):
init_data()
def test_app_sqlalchemy_events(self):
with make_session(conf) as session:
component_build = ComponentBuild()
component_build.package = "before_models_committed"
component_build.scmurl = (
"git://pkgs.domain.local/rpms/before_models_committed?"
"#9999999999999999999999999999999999999999"
)
component_build.format = "rpms"
component_build.task_id = 999999999
component_build.state = 1
component_build.nvr = \
"before_models_committed-0.0.0-0.module_before_models_committed_0_0"
component_build.batch = 1
component_build.module_id = 1
def test_app_sqlalchemy_events(self, db_session):
component_build = ComponentBuild(
package="before_models_committed",
scmurl="git://pkgs.domain.local/rpms/before_models_committed?"
"#9999999999999999999999999999999999999999",
format="rpms",
task_id=999999999,
state=1,
nvr="before_models_committed-0.0.0-0.module_before_models_committed_0_0",
batch=1,
module_id=1,
)
session.add(component_build)
session.commit()
db_session.add(component_build)
db_session.commit()
with make_session(conf) as session:
c = session.query(ComponentBuild).filter(ComponentBuild.id == 1).one()
assert c.component_builds_trace[0].id == 1
assert c.component_builds_trace[0].component_id == 1
assert c.component_builds_trace[0].state == 1
assert c.component_builds_trace[0].state_reason is None
assert c.component_builds_trace[0].task_id == 999999999
component_builds_trace = db_session.query(ComponentBuildTrace).filter(
ComponentBuildTrace.component_id == component_build.id).one()
db_session.commit()
def test_context_functions(self):
assert component_builds_trace.id == 1
assert component_builds_trace.component_id == 1
assert component_builds_trace.state == 1
assert component_builds_trace.state_reason is None
assert component_builds_trace.task_id == 999999999
def test_context_functions(self, db_session):
""" Test that the build_context, runtime_context, and context hashes are correctly
determined"""
build = ModuleBuild.query.filter_by(id=1).one()
db_session.commit()
build = ModuleBuild.get_by_id(db_session, 1)
db_session.commit()
build.modulemd = read_staged_data("testmodule_dependencies")
(
build.ref_build_context,
@@ -89,8 +90,12 @@ class TestModels:
build.runtime_context = "bbc84c7b817ab3dd54916c0bcd6c6bdf512f7f9c" + str(i)
db_session.add(build)
db_session.commit()
build_one = ModuleBuild.get_by_id(db_session, 2)
assert build_one.siblings == [3, 4]
sibling_ids = build_one.siblings(db_session)
db_session.commit()
assert sibling_ids == [3, 4]
@pytest.mark.parametrize(
"stream,right_pad,expected",
@@ -111,100 +116,100 @@ class TestModels:
class TestModelsGetStreamsContexts:
def test_get_last_build_in_all_streams(self):
def test_get_last_build_in_all_streams(self, db_session):
init_data_contexts(contexts=True)
with make_session(conf) as session:
builds = ModuleBuild.get_last_build_in_all_streams(session, "nginx")
builds = sorted([
"%s:%s:%s" % (build.name, build.stream, str(build.version)) for build in builds
])
assert builds == ["nginx:%d:%d" % (i, i + 2) for i in range(10)]
builds = ModuleBuild.get_last_build_in_all_streams(db_session, "nginx")
builds = sorted([
"%s:%s:%s" % (build.name, build.stream, str(build.version)) for build in builds
])
db_session.commit()
assert builds == ["nginx:%d:%d" % (i, i + 2) for i in range(10)]
def test_get_last_build_in_all_stream_last_version(self):
def test_get_last_build_in_all_stream_last_version(self, db_session):
init_data_contexts(contexts=False)
with make_session(conf) as session:
builds = ModuleBuild.get_last_build_in_all_streams(session, "nginx")
builds = [
"%s:%s:%s" % (build.name, build.stream, str(build.version)) for build in builds
]
assert builds == ["nginx:1:11"]
builds = ModuleBuild.get_last_build_in_all_streams(db_session, "nginx")
builds = [
"%s:%s:%s" % (build.name, build.stream, str(build.version)) for build in builds
]
db_session.commit()
assert builds == ["nginx:1:11"]
def test_get_last_builds_in_stream(self):
def test_get_last_builds_in_stream(self, db_session):
init_data_contexts(contexts=True)
with make_session(conf) as session:
builds = ModuleBuild.get_last_builds_in_stream(session, "nginx", "1")
builds = [
"%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)
for build in builds
]
assert builds == ["nginx:1:3:d5a6c0fa", "nginx:1:3:795e97c1"]
builds = ModuleBuild.get_last_builds_in_stream(db_session, "nginx", "1")
builds = [
"%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)
for build in builds
]
db_session.commit()
assert builds == ["nginx:1:3:d5a6c0fa", "nginx:1:3:795e97c1"]
def test_get_last_builds_in_stream_version_lte(self):
def test_get_last_builds_in_stream_version_lte(self, db_session):
init_data_contexts(1, multiple_stream_versions=True)
with make_session(conf) as session:
builds = ModuleBuild.get_last_builds_in_stream_version_lte(session, "platform", 290100)
builds = set([
"%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)
for build in builds
])
assert builds == set(["platform:f29.0.0:3:00000000", "platform:f29.1.0:3:00000000"])
builds = ModuleBuild.get_last_builds_in_stream_version_lte(db_session, "platform", 290100)
builds = set([
"%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)
for build in builds
])
db_session.commit()
assert builds == set(["platform:f29.0.0:3:00000000", "platform:f29.1.0:3:00000000"])
def test_get_last_builds_in_stream_version_lte_different_versions(self):
def test_get_last_builds_in_stream_version_lte_different_versions(self, db_session):
"""
Tests that get_last_builds_in_stream_version_lte works in case the
name:stream_ver modules have different versions.
"""
clean_database(False)
with make_session(conf) as db_session:
make_module(
db_session, "platform:f29.1.0:10:old_version", {}, {}, virtual_streams=["f29"])
make_module(
db_session, "platform:f29.1.0:15:c11.another", {}, {}, virtual_streams=["f29"])
make_module(
db_session, "platform:f29.1.0:15:c11", {}, {}, virtual_streams=["f29"])
make_module(
db_session, "platform:f29.2.0:0:old_version", {}, {}, virtual_streams=["f29"])
make_module(
db_session, "platform:f29.2.0:1:c11", {}, {}, virtual_streams=["f29"])
make_module(
db_session, "platform:f29.3.0:15:old_version", {}, {}, virtual_streams=["f29"])
make_module(
db_session, "platform:f29.3.0:20:c11", {}, {}, virtual_streams=["f29"])
make_module(
db_session, "platform:f29.1.0:10:old_version", {}, {}, virtual_streams=["f29"])
make_module(
db_session, "platform:f29.1.0:15:c11.another", {}, {}, virtual_streams=["f29"])
make_module(
db_session, "platform:f29.1.0:15:c11", {}, {}, virtual_streams=["f29"])
make_module(
db_session, "platform:f29.2.0:0:old_version", {}, {}, virtual_streams=["f29"])
make_module(
db_session, "platform:f29.2.0:1:c11", {}, {}, virtual_streams=["f29"])
make_module(
db_session, "platform:f29.3.0:15:old_version", {}, {}, virtual_streams=["f29"])
make_module(
db_session, "platform:f29.3.0:20:c11", {}, {}, virtual_streams=["f29"])
builds = ModuleBuild.get_last_builds_in_stream_version_lte(
db_session, "platform", 290200)
builds = set([
"%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)
for build in builds
])
assert builds == set([
"platform:f29.1.0:15:c11",
"platform:f29.1.0:15:c11.another",
"platform:f29.2.0:1:c11",
])
builds = ModuleBuild.get_last_builds_in_stream_version_lte(
db_session, "platform", 290200)
builds = set([
"%s:%s:%s:%s" % (build.name, build.stream, str(build.version), build.context)
for build in builds
])
db_session.commit()
assert builds == set([
"platform:f29.1.0:15:c11",
"platform:f29.1.0:15:c11.another",
"platform:f29.2.0:1:c11",
])
def test_get_module_count(self):
def test_get_module_count(self, db_session):
clean_database(False)
with make_session(conf) as db_session:
make_module(db_session, "platform:f29.1.0:10:c11", {}, {})
make_module(db_session, "platform:f29.1.0:10:c12", {}, {})
make_module(db_session, "platform:f29.1.0:10:c11", {}, {})
make_module(db_session, "platform:f29.1.0:10:c12", {}, {})
count = ModuleBuild.get_module_count(db_session, name="platform")
assert count == 2
count = ModuleBuild.get_module_count(db_session, name="platform")
db_session.commit()
assert count == 2
def test_add_virtual_streams_filter(self):
def test_add_virtual_streams_filter(self, db_session):
clean_database(False)
with make_session(conf) as db_session:
make_module(db_session, "platform:f29.1.0:10:c1", {}, {}, virtual_streams=["f29"])
make_module(db_session, "platform:f29.1.0:15:c1", {}, {}, virtual_streams=["f29"])
make_module(
db_session, "platform:f29.3.0:15:old_version", {}, {},
virtual_streams=["f28", "f29"])
make_module(db_session, "platform:f29.3.0:20:c11", {}, {}, virtual_streams=["f30"])
make_module(db_session, "platform:f29.1.0:10:c1", {}, {}, virtual_streams=["f29"])
make_module(db_session, "platform:f29.1.0:15:c1", {}, {}, virtual_streams=["f29"])
make_module(
db_session, "platform:f29.3.0:15:old_version", {}, {},
virtual_streams=["f28", "f29"])
make_module(db_session, "platform:f29.3.0:20:c11", {}, {}, virtual_streams=["f30"])
query = db_session.query(ModuleBuild).filter_by(name="platform")
query = ModuleBuild._add_virtual_streams_filter(db_session, query, ["f28", "f29"])
count = query.count()
assert count == 3
query = db_session.query(ModuleBuild).filter_by(name="platform")
query = ModuleBuild._add_virtual_streams_filter(db_session, query, ["f28", "f29"])
count = query.count()
db_session.commit()
assert count == 3

View File

@@ -69,26 +69,26 @@ def test_standalone_metrics_server():
@mock.patch("module_build_service.monitor.builder_failed_counter.labels")
@mock.patch("module_build_service.monitor.builder_success_counter.inc")
def test_monitor_state_changing_success(succ_cnt, failed_cnt):
def test_monitor_state_changing_success(succ_cnt, failed_cnt, db_session):
conf = mbs_config.Config(TestConfiguration)
with models.make_session(conf) as db_session:
b = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})
b.transition(conf, models.BUILD_STATES["wait"])
b.transition(conf, models.BUILD_STATES["build"])
b.transition(conf, models.BUILD_STATES["done"])
b = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})
b.transition(db_session, conf, models.BUILD_STATES["wait"])
b.transition(db_session, conf, models.BUILD_STATES["build"])
b.transition(db_session, conf, models.BUILD_STATES["done"])
db_session.commit()
succ_cnt.assert_called_once()
failed_cnt.assert_not_called()
@mock.patch("module_build_service.monitor.builder_failed_counter.labels")
@mock.patch("module_build_service.monitor.builder_success_counter.inc")
def test_monitor_state_changing_failure(succ_cnt, failed_cnt):
def test_monitor_state_changing_failure(succ_cnt, failed_cnt, db_session):
failure_type = "user"
conf = mbs_config.Config(TestConfiguration)
with models.make_session(conf) as db_session:
b = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})
b.transition(conf, models.BUILD_STATES["wait"])
b.transition(conf, models.BUILD_STATES["build"])
b.transition(conf, models.BUILD_STATES["failed"], failure_type=failure_type)
b = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})
b.transition(db_session, conf, models.BUILD_STATES["wait"])
b.transition(db_session, conf, models.BUILD_STATES["build"])
b.transition(db_session, conf, models.BUILD_STATES["failed"], failure_type=failure_type)
db_session.commit()
succ_cnt.assert_not_called()
failed_cnt.assert_called_once_with(reason=failure_type)

View File

@@ -27,60 +27,58 @@ from mock import patch, PropertyMock
import pytest
import module_build_service.resolver as mbs_resolver
from module_build_service import app, conf, db, models, utils, Modulemd
from module_build_service import models, utils, Modulemd
from module_build_service.utils import import_mmd, mmd_to_str, load_mmd
from module_build_service.models import ModuleBuild
import tests
base_dir = os.path.join(os.path.dirname(__file__), "..")
@pytest.mark.usefixtures("reuse_component_init_data")
class TestDBModule:
def setup_method(self):
tests.reuse_component_init_data()
def test_get_buildrequired_modulemds(self):
def test_get_buildrequired_modulemds(self, db_session):
mmd = load_mmd(tests.read_staged_data("platform"))
mmd = mmd.copy(mmd.get_module_name(), "f30.1.3")
with models.make_session(conf) as db_session:
import_mmd(db_session, mmd)
platform_f300103 = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()
mmd = tests.make_module(db_session,
"testmodule:master:20170109091357:123",
store_to_db=False)
build = ModuleBuild(
name="testmodule",
stream="master",
version=20170109091357,
state=5,
build_context="dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3",
runtime_context="ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7",
context="7c29193d",
koji_tag="module-testmodule-master-20170109091357-7c29193d",
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79",
batch=3,
owner="Dr. Pepper",
time_submitted=datetime(2018, 11, 15, 16, 8, 18),
time_modified=datetime(2018, 11, 15, 16, 19, 35),
rebuild_strategy="changed-and-after",
modulemd=mmd_to_str(mmd),
)
build.buildrequires.append(platform_f300103)
db_session.add(build)
db_session.commit()
platform_nsvc = platform_f300103.mmd().get_nsvc()
import_mmd(db_session, mmd)
platform_f300103 = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one()
mmd = tests.make_module(db_session,
"testmodule:master:20170109091357:123",
store_to_db=False)
build = ModuleBuild(
name="testmodule",
stream="master",
version=20170109091357,
state=5,
build_context="dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3",
runtime_context="ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7",
context="7c29193d",
koji_tag="module-testmodule-master-20170109091357-7c29193d",
scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79",
batch=3,
owner="Dr. Pepper",
time_submitted=datetime(2018, 11, 15, 16, 8, 18),
time_modified=datetime(2018, 11, 15, 16, 19, 35),
rebuild_strategy="changed-and-after",
modulemd=mmd_to_str(mmd),
)
build.buildrequires.append(platform_f300103)
db_session.add(build)
db_session.commit()
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
platform_nsvc = platform_f300103.mmd().get_nsvc()
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
result = resolver.get_buildrequired_modulemds("testmodule", "master", platform_nsvc)
nsvcs = set([m.get_nsvc() for m in result])
assert nsvcs == set(["testmodule:master:20170109091357:123"])
@pytest.mark.parametrize("stream_versions", [False, True])
def test_get_compatible_base_module_modulemds_stream_versions(self, stream_versions):
def test_get_compatible_base_module_modulemds_stream_versions(
self, stream_versions, db_session
):
tests.init_data(1, multiple_stream_versions=True)
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
result = resolver.get_compatible_base_module_modulemds(
"platform", "f29.1.0", stream_version_lte=stream_versions, virtual_streams=["f29"],
states=[models.BUILD_STATES["ready"]])
@@ -109,7 +107,7 @@ class TestDBModule:
mmd.set_xmd(xmd)
module.modulemd = mmd_to_str(mmd)
db_session.commit()
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
result = resolver.get_module_build_dependencies(
"testmodule", "master", "20170109091357", "78e4a6fd").keys()
assert set(result) == expected
@@ -142,7 +140,7 @@ class TestDBModule:
db_session.commit()
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
result = resolver.get_module_build_dependencies(
"testmodule2", "master", "20180123171545", "c40c156c").keys()
assert set(result) == set(["module-f28-build"])
@@ -155,25 +153,26 @@ class TestDBModule:
new_callable=PropertyMock,
return_value=tests.staged_data_filename("local_builds"),
)
def test_get_module_build_dependencies_recursive_requires(self, resultdir, conf_system):
def test_get_module_build_dependencies_recursive_requires(
self, resultdir, conf_system, db_session
):
"""
Tests that it returns the requires of the buildrequires recursively
"""
with app.app_context():
utils.load_local_builds(["platform", "parent", "child", "testmodule"])
utils.load_local_builds(db_session, ["platform", "parent", "child", "testmodule"])
build = models.ModuleBuild.local_modules(db.session, "child", "master")
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
result = resolver.get_module_build_dependencies(mmd=build[0].mmd()).keys()
build = models.ModuleBuild.local_modules(db_session, "child", "master")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
result = resolver.get_module_build_dependencies(mmd=build[0].mmd()).keys()
local_path = tests.staged_data_filename("local_builds")
local_path = tests.staged_data_filename("local_builds")
expected = [os.path.join(local_path, "module-parent-master-20170816080815/results")]
assert set(result) == set(expected)
expected = [os.path.join(local_path, "module-parent-master-20170816080815/results")]
assert set(result) == set(expected)
def test_resolve_requires(self, db_session):
build = models.ModuleBuild.get_by_id(db_session, 2)
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
result = resolver.resolve_requires(
[":".join([build.name, build.stream, build.version, build.context])]
)
@@ -193,7 +192,7 @@ class TestDBModule:
Tests that the profiles get resolved recursively
"""
mmd = models.ModuleBuild.get_by_id(db_session, 2).mmd()
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))
expected = {
"buildroot": set([
@@ -246,26 +245,26 @@ class TestDBModule:
"""
Test that profiles get resolved recursively on local builds
"""
utils.load_local_builds(["platform"])
utils.load_local_builds(db_session, ["platform"])
mmd = models.ModuleBuild.get_by_id(db_session, 2).mmd()
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
result = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))
expected = {"buildroot": set(["foo"]), "srpm-buildroot": set(["bar"])}
assert result == expected
def test_get_latest_with_virtual_stream(self):
def test_get_latest_with_virtual_stream(self, db_session):
tests.init_data(1, multiple_stream_versions=True)
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
mmd = resolver.get_latest_with_virtual_stream("platform", "f29")
assert mmd
assert mmd.get_stream_name() == "f29.2.0"
def test_get_latest_with_virtual_stream_none(self):
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
def test_get_latest_with_virtual_stream_none(self, db_session):
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
mmd = resolver.get_latest_with_virtual_stream("platform", "doesnotexist")
assert not mmd
def test_get_module_count(self):
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="db")
def test_get_module_count(self, db_session):
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="db")
count = resolver.get_module_count(name="platform", stream="f28")
assert count == 1

View File

@@ -20,27 +20,22 @@
#
# Written by Jan Kaluza <jkaluza@redhat.com>
import os
import pytest
from datetime import datetime
import module_build_service.resolver as mbs_resolver
from module_build_service import db
from module_build_service.utils.general import import_mmd, mmd_to_str, load_mmd
from module_build_service.models import ModuleBuild
import tests
base_dir = os.path.join(os.path.dirname(__file__), "..")
@pytest.mark.usefixtures("reuse_component_init_data")
class TestLocalResolverModule:
def setup_method(self):
tests.reuse_component_init_data()
def test_get_buildrequired_modulemds(self):
def test_get_buildrequired_modulemds(self, db_session):
mmd = load_mmd(tests.read_staged_data("platform"))
mmd = mmd.copy(mmd.get_module_name(), "f8")
import_mmd(db.session, mmd)
import_mmd(db_session, mmd)
platform_f8 = ModuleBuild.query.filter_by(stream="f8").one()
mmd = mmd.copy("testmodule", "master")
mmd.set_version(20170109091357)
@@ -62,10 +57,10 @@ class TestLocalResolverModule:
rebuild_strategy="changed-and-after",
modulemd=mmd_to_str(mmd),
)
db.session.add(build)
db.session.commit()
db_session.add(build)
db_session.commit()
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="local")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="local")
result = resolver.get_buildrequired_modulemds(
"testmodule", "master", platform_f8.mmd().get_nsvc())
nsvcs = set([m.get_nsvc() for m in result])

View File

@@ -18,24 +18,18 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
from mock import patch, PropertyMock, Mock, call
import module_build_service.resolver as mbs_resolver
import module_build_service.utils
from module_build_service import conf, models
from module_build_service.utils.general import mmd_to_str
import module_build_service.models
import tests
base_dir = os.path.join(os.path.dirname(__file__), "..")
class TestMBSModule:
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_module_modulemds_nsvc(self, mock_session, testmodule_mmd_9c690d0e):
def test_get_module_modulemds_nsvc(self, mock_session, testmodule_mmd_9c690d0e, db_session):
""" Tests for querying a module from mbs """
mock_res = Mock()
mock_res.ok.return_value = True
@@ -54,7 +48,7 @@ class TestMBSModule:
mock_session.get.return_value = mock_res
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
module_mmds = resolver.get_module_modulemds(
"testmodule", "master", "20180205135154", "9c690d0e", virtual_streams=["f28"]
)
@@ -81,7 +75,7 @@ class TestMBSModule:
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_module_modulemds_partial(
self, mock_session, testmodule_mmd_9c690d0e, testmodule_mmd_c2c572ed
self, mock_session, testmodule_mmd_9c690d0e, testmodule_mmd_c2c572ed, db_session
):
""" Test for querying MBS without the context of a module """
@@ -110,7 +104,7 @@ class TestMBSModule:
}
mock_session.get.return_value = mock_res
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
ret = resolver.get_module_modulemds("testmodule", "master", version)
nsvcs = set(
m.get_nsvc()
@@ -136,7 +130,7 @@ class TestMBSModule:
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_module_build_dependencies(
self, mock_session, platform_mmd, testmodule_mmd_9c690d0e
self, mock_session, platform_mmd, testmodule_mmd_9c690d0e, db_session
):
"""
Tests that we return just direct build-time dependencies of testmodule.
@@ -173,7 +167,7 @@ class TestMBSModule:
mock_session.get.return_value = mock_res
expected = set(["module-f28-build"])
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
result = resolver.get_module_build_dependencies(
"testmodule", "master", "20180205135154", "9c690d0e").keys()
@@ -213,7 +207,7 @@ class TestMBSModule:
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_module_build_dependencies_empty_buildrequires(
self, mock_session, testmodule_mmd_9c690d0e
self, mock_session, testmodule_mmd_9c690d0e, db_session
):
mmd = module_build_service.utils.load_mmd(testmodule_mmd_9c690d0e)
@@ -246,7 +240,7 @@ class TestMBSModule:
expected = set()
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
result = resolver.get_module_build_dependencies(
"testmodule", "master", "20180205135154", "9c690d0e"
).keys()
@@ -266,7 +260,9 @@ class TestMBSModule:
assert set(result) == expected
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_resolve_profiles(self, mock_session, formatted_testmodule_mmd, platform_mmd):
def test_resolve_profiles(
self, mock_session, formatted_testmodule_mmd, platform_mmd, db_session
):
mock_res = Mock()
mock_res.ok.return_value = True
@@ -284,7 +280,7 @@ class TestMBSModule:
}
mock_session.get.return_value = mock_res
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
result = resolver.resolve_profiles(
formatted_testmodule_mmd, ("buildroot", "srpm-buildroot")
)
@@ -351,55 +347,54 @@ class TestMBSModule:
return_value=tests.staged_data_filename("local_builds")
)
def test_resolve_profiles_local_module(
self, local_builds, conf_system, formatted_testmodule_mmd
self, local_builds, conf_system, formatted_testmodule_mmd, db_session
):
tests.clean_database()
with tests.app.app_context():
module_build_service.utils.load_local_builds(["platform"])
module_build_service.utils.load_local_builds(db_session, ["platform"])
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
result = resolver.resolve_profiles(
formatted_testmodule_mmd, ("buildroot", "srpm-buildroot"))
expected = {"buildroot": set(["foo"]), "srpm-buildroot": set(["bar"])}
assert result == expected
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_empty_buildrequired_modulemds(self, mock_session):
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
mock_session.get.return_value = Mock(ok=True)
mock_session.get.return_value.json.return_value = {"items": [], "meta": {"next": None}}
def test_get_empty_buildrequired_modulemds(self, request_session, db_session):
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
request_session.get.return_value = Mock(ok=True)
request_session.get.return_value.json.return_value = {"items": [], "meta": {"next": None}}
result = resolver.get_buildrequired_modulemds("nodejs", "10", "platform:el8:1:00000000")
assert [] == result
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_buildrequired_modulemds(self, mock_session):
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
def test_get_buildrequired_modulemds(self, mock_session, db_session):
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
mock_session.get.return_value = Mock(ok=True)
with models.make_session(conf) as db_session:
mock_session.get.return_value.json.return_value = {
"items": [
{
"name": "nodejs",
"stream": "10",
"version": 1,
"context": "c1",
"modulemd": mmd_to_str(
tests.make_module(db_session, "nodejs:10:1:c1", store_to_db=False),
),
},
{
"name": "nodejs",
"stream": "10",
"version": 2,
"context": "c1",
"modulemd": mmd_to_str(
tests.make_module(db_session, "nodejs:10:2:c1", store_to_db=False),
),
},
],
"meta": {"next": None},
}
mock_session.get.return_value.json.return_value = {
"items": [
{
"name": "nodejs",
"stream": "10",
"version": 1,
"context": "c1",
"modulemd": mmd_to_str(
tests.make_module(db_session, "nodejs:10:1:c1", store_to_db=False),
),
},
{
"name": "nodejs",
"stream": "10",
"version": 2,
"context": "c1",
"modulemd": mmd_to_str(
tests.make_module(db_session, "nodejs:10:2:c1", store_to_db=False),
),
},
],
"meta": {"next": None},
}
result = resolver.get_buildrequired_modulemds("nodejs", "10", "platform:el8:1:00000000")
@@ -411,7 +406,7 @@ class TestMBSModule:
assert "c1" == mmd.get_context()
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_module_count(self, mock_session):
def test_get_module_count(self, mock_session, db_session):
mock_res = Mock()
mock_res.ok.return_value = True
mock_res.json.return_value = {
@@ -420,7 +415,7 @@ class TestMBSModule:
}
mock_session.get.return_value = mock_res
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
count = resolver.get_module_count(name="platform", stream="f28")
assert count == 5
@@ -430,7 +425,7 @@ class TestMBSModule:
)
@patch("module_build_service.resolver.MBSResolver.requests_session")
def test_get_latest_with_virtual_stream(self, mock_session, platform_mmd):
def test_get_latest_with_virtual_stream(self, mock_session, platform_mmd, db_session):
mock_res = Mock()
mock_res.ok.return_value = True
mock_res.json.return_value = {
@@ -447,7 +442,7 @@ class TestMBSModule:
}
mock_session.get.return_value = mock_res
resolver = mbs_resolver.GenericResolver.create(tests.conf, backend="mbs")
resolver = mbs_resolver.GenericResolver.create(db_session, tests.conf, backend="mbs")
mmd = resolver.get_latest_with_virtual_stream("platform", "virtualf28")
assert mmd.get_module_name() == "platform"

View File

@@ -25,7 +25,7 @@ import pytest
from mock import call, patch, Mock
from sqlalchemy import func
from module_build_service import conf, db
from module_build_service import conf
from module_build_service.models import BUILD_STATES, ModuleBuild
from module_build_service.scheduler.consumer import MBSConsumer
from module_build_service.scheduler.handlers.greenwave import get_corresponding_module_build
@@ -40,10 +40,10 @@ class TestGetCorrespondingModuleBuild:
clean_database()
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_module_build_nvr_does_not_exist_in_koji(self, ClientSession):
def test_module_build_nvr_does_not_exist_in_koji(self, ClientSession, db_session):
ClientSession.return_value.getBuild.return_value = None
assert get_corresponding_module_build(db.session, "n-v-r") is None
assert get_corresponding_module_build(db_session, "n-v-r") is None
@pytest.mark.parametrize(
"build_info",
@@ -57,32 +57,34 @@ class TestGetCorrespondingModuleBuild:
],
)
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_cannot_find_module_build_id_from_build_info(self, ClientSession, build_info):
def test_cannot_find_module_build_id_from_build_info(
self, ClientSession, build_info, db_session
):
ClientSession.return_value.getBuild.return_value = build_info
assert get_corresponding_module_build(db.session, "n-v-r") is None
assert get_corresponding_module_build(db_session, "n-v-r") is None
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_corresponding_module_build_id_does_not_exist_in_db(self, ClientSession):
fake_module_build_id, = db.session.query(func.max(ModuleBuild.id)).first()
def test_corresponding_module_build_id_does_not_exist_in_db(self, ClientSession, db_session):
fake_module_build_id, = db_session.query(func.max(ModuleBuild.id)).first()
ClientSession.return_value.getBuild.return_value = {
"extra": {"typeinfo": {"module": {"module_build_service_id": fake_module_build_id + 1}}}
}
assert get_corresponding_module_build(db.session, "n-v-r") is None
assert get_corresponding_module_build(db_session, "n-v-r") is None
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_find_the_module_build(self, ClientSession):
def test_find_the_module_build(self, ClientSession, db_session):
expected_module_build = (
db.session.query(ModuleBuild).filter(ModuleBuild.name == "platform").first()
db_session.query(ModuleBuild).filter(ModuleBuild.name == "platform").first()
)
ClientSession.return_value.getBuild.return_value = {
"extra": {"typeinfo": {"module": {"module_build_service_id": expected_module_build.id}}}
}
build = get_corresponding_module_build(db.session, "n-v-r")
build = get_corresponding_module_build(db_session, "n-v-r")
assert expected_module_build.id == build.id
assert expected_module_build.name == build.name
@@ -92,9 +94,9 @@ class TestDecisionUpdateHandler:
"""Test handler decision_update"""
@patch("module_build_service.scheduler.handlers.greenwave.log")
def test_decision_context_is_not_match(self, log):
def test_decision_context_is_not_match(self, log, db_session):
msg = Mock(msg_id="msg-id-1", decision_context="bodhi_update_push_testing")
decision_update(conf, db.session, msg)
decision_update(conf, db_session, msg)
log.debug.assert_called_once_with(
'Skip Greenwave message %s as MBS only handles messages with the decision context "%s"',
"msg-id-1",
@@ -102,14 +104,14 @@ class TestDecisionUpdateHandler:
)
@patch("module_build_service.scheduler.handlers.greenwave.log")
def test_not_satisfy_policies(self, log):
def test_not_satisfy_policies(self, log, db_session):
msg = Mock(
msg_id="msg-id-1",
decision_context="test_dec_context",
policies_satisfied=False,
subject_identifier="pkg-0.1-1.c1",
)
decision_update(conf, db.session, msg)
decision_update(conf, db_session, msg)
log.debug.assert_called_once_with(
"Skip to handle module build %s because it has not satisfied Greenwave policies.",
msg.subject_identifier,
@@ -117,21 +119,21 @@ class TestDecisionUpdateHandler:
@patch("module_build_service.messaging.publish")
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_transform_from_done_to_ready(self, ClientSession, publish):
def test_transform_from_done_to_ready(self, ClientSession, publish, db_session):
clean_database()
# This build should be queried and transformed to ready state
module_build = make_module(db.session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})
module_build = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})
module_build.transition(
conf, BUILD_STATES["done"], "Move to done directly for running test."
db_session, conf, BUILD_STATES["done"], "Move to done directly for running test."
)
db.session.commit()
db_session.commit()
# Assert this call below
first_publish_call = call(
service="mbs",
topic="module.state.change",
msg=module_build.json(show_tasks=False),
msg=module_build.json(db_session, show_tasks=False),
conf=conf,
)
@@ -153,9 +155,7 @@ class TestDecisionUpdateHandler:
consumer.consume(msg)
# Load module build again to check its state is moved correctly
module_build = (
db.session.query(ModuleBuild).filter(ModuleBuild.id == module_build.id).first())
db_session.refresh(module_build)
assert BUILD_STATES["ready"] == module_build.state
publish.assert_has_calls([
@@ -163,7 +163,7 @@ class TestDecisionUpdateHandler:
call(
service="mbs",
topic="module.state.change",
msg=module_build.json(show_tasks=False),
msg=module_build.json(db_session, show_tasks=False),
conf=conf,
),
])

View File

@@ -28,7 +28,7 @@ from tests.test_views.test_views import FakeSCM
import module_build_service.messaging
import module_build_service.scheduler.handlers.modules
from module_build_service import build_logs
from module_build_service.models import make_session, ModuleBuild, ComponentBuild
from module_build_service.models import make_db_session, ModuleBuild
from module_build_service.utils.general import mmd_to_str, load_mmd
@@ -41,13 +41,14 @@ class TestModuleInit:
mmd = mmd.copy("testmodule", "1")
scmurl = "git://pkgs.domain.local/modules/testmodule?#620ec77"
clean_database()
with make_session(conf) as session:
with make_db_session(conf) as session:
ModuleBuild.create(
session, conf, "testmodule", "1", 3, mmd_to_str(mmd), scmurl, "mprahl")
def teardown_method(self, test_method):
try:
path = build_logs.path(1)
with make_db_session(conf) as db_session:
path = build_logs.path(db_session, 1)
os.remove(path)
except Exception:
pass
@@ -90,7 +91,7 @@ class TestModuleInit:
msg_id=None, module_build_id=2, module_build_state="init"
)
self.fn(config=conf, session=db_session, msg=msg)
self.fn(config=conf, db_session=db_session, msg=msg)
build = ModuleBuild.get_by_id(db_session, 2)
# Make sure the module entered the wait state
@@ -122,18 +123,18 @@ class TestModuleInit:
@patch("module_build_service.scm.SCM")
@patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])
def test_init_scm_not_available(self, get_build_arches, mocked_scm):
def mocked_scm_get_latest():
raise RuntimeError("Failed in mocked_scm_get_latest")
def test_init_scm_not_available(self, get_build_arches, mocked_scm, db_session):
FakeSCM(
mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4")
mocked_scm.return_value.get_latest = mocked_scm_get_latest
mocked_scm, "testmodule", "testmodule.yaml", "620ec77321b2ea7b0d67d82992dda3e1d67055b4",
get_latest_raise=True,
get_latest_error=RuntimeError("Failed in mocked_scm_get_latest")
)
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=2, module_build_state="init")
with make_session(conf) as session:
self.fn(config=conf, session=session, msg=msg)
build = ModuleBuild.query.filter_by(id=2).one()
self.fn(config=conf, db_session=db_session, msg=msg)
build = ModuleBuild.get_by_id(db_session, 2)
# Make sure the module entered the failed state
# since the git server is not available
assert build.state == 4, build.state
@@ -145,24 +146,25 @@ class TestModuleInit:
)
@patch("module_build_service.scm.SCM")
@patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])
def test_init_includedmodule(self, get_build_arches, mocked_scm, mocked_mod_allow_repo):
def test_init_includedmodule(
self, get_build_arches, mocked_scm, mocked_mod_allow_repo, db_session
):
FakeSCM(mocked_scm, "includedmodules", ["testmodule_init.yaml"])
includedmodules_yml_path = read_staged_data("includedmodules")
mmd = load_mmd(includedmodules_yml_path)
# Set the name and stream
mmd = mmd.copy("includedmodules", "1")
scmurl = "git://pkgs.domain.local/modules/includedmodule?#da95886"
with make_session(conf) as session:
ModuleBuild.create(
session, conf, "includemodule", "1", 3, mmd_to_str(mmd), scmurl, "mprahl")
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=3, module_build_state="init")
self.fn(config=conf, session=session, msg=msg)
build = ModuleBuild.query.filter_by(id=3).one()
ModuleBuild.create(
db_session, conf, "includemodule", "1", 3, mmd_to_str(mmd), scmurl, "mprahl")
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=3, module_build_state="init")
self.fn(config=conf, db_session=db_session, msg=msg)
build = ModuleBuild.get_by_id(db_session, 3)
assert build.state == 1
assert build.name == "includemodule"
batches = {}
for comp_build in ComponentBuild.query.filter_by(module_id=3).all():
for comp_build in build.component_builds:
batches[comp_build.package] = comp_build.batch
assert batches["perl-List-Compare"] == 2
assert batches["perl-Tangerine"] == 2
@@ -183,7 +185,7 @@ class TestModuleInit:
@patch("module_build_service.scm.SCM")
@patch("module_build_service.utils.submit.get_build_arches", return_value=["x86_64"])
def test_init_when_get_latest_raises(
self, get_build_arches, mocked_scm, mocked_from_module_event):
self, get_build_arches, mocked_scm, mocked_from_module_event, db_session):
FakeSCM(
mocked_scm,
"testmodule",
@@ -193,12 +195,13 @@ class TestModuleInit:
)
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=2, module_build_state="init")
with make_session(conf) as session:
build = session.query(ModuleBuild).filter_by(id=2).one()
mocked_from_module_event.return_value = build
self.fn(config=conf, session=session, msg=msg)
# Query the database again to make sure the build object is updated
session.refresh(build)
# Make sure the module entered the failed state
assert build.state == 4, build.state
assert "Failed to get the latest commit for" in build.state_reason
build = ModuleBuild.get_by_id(db_session, 2)
mocked_from_module_event.return_value = build
self.fn(config=conf, db_session=db_session, msg=msg)
# Query the database again to make sure the build object is updated
db_session.refresh(build)
# Make sure the module entered the failed state
assert build.state == 4, build.state
assert "Failed to get the latest commit for" in build.state_reason

View File

@@ -27,7 +27,7 @@ import module_build_service.scheduler.handlers.modules
import os
import koji
import pytest
from tests import conf, db, scheduler_init_data, read_staged_data
from tests import conf, scheduler_init_data, read_staged_data
import module_build_service.resolver
from module_build_service import build_logs, Modulemd
from module_build_service.utils.general import load_mmd
@@ -44,7 +44,8 @@ class TestModuleWait:
def teardown_method(self, test_method):
try:
path = build_logs.path(1)
with module_build_service.models.make_db_session(conf) as db_session:
path = build_logs.path(db_session, 1)
os.remove(path)
except Exception:
pass
@@ -79,8 +80,8 @@ class TestModuleWait:
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=1, module_build_state="some state")
with patch.object(module_build_service.resolver, "system_resolver"):
self.fn(config=self.config, session=self.session, msg=msg)
with patch("module_build_service.resolver.GenericResolver.create"):
self.fn(config=self.config, db_session=self.session, msg=msg)
@patch(
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -115,20 +116,19 @@ class TestModuleWait:
resolver.backend = "db"
resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"
with patch.object(module_build_service.resolver, "system_resolver", new=resolver):
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=2, module_build_state="some state")
module_build_service.scheduler.handlers.modules.wait(
config=conf, session=db_session, msg=msg)
koji_session.newRepo.assert_called_once_with("module-123-build")
generic_resolver.create.return_value = resolver
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=2, module_build_state="some state")
module_build_service.scheduler.handlers.modules.wait(
config=conf, db_session=db_session, msg=msg)
koji_session.newRepo.assert_called_once_with("module-123-build")
# When module-build-macros is reused, it still has to appear only
# once in database.
builds_count = (
db.session.query(ComponentBuild)
.filter_by(package="module-build-macros", module_id=2)
.count()
)
builds_count = db_session.query(ComponentBuild).filter_by(
package="module-build-macros", module_id=2).count()
assert builds_count == 1
@patch(
@@ -164,12 +164,14 @@ class TestModuleWait:
resolver.backend = "db"
resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357"
with patch.object(module_build_service.resolver, "system_resolver", new=resolver):
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=2, module_build_state="some state")
module_build_service.scheduler.handlers.modules.wait(
config=conf, session=db_session, msg=msg)
assert koji_session.newRepo.called
generic_resolver.create.return_value = resolver
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=2, module_build_state="some state")
module_build_service.scheduler.handlers.modules.wait(
config=conf, db_session=db_session, msg=msg)
assert koji_session.newRepo.called
@patch(
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -209,13 +211,15 @@ class TestModuleWait:
"module-bootstrap-tag": [base_mmd]
}
with patch.object(module_build_service.resolver, "system_resolver", new=resolver):
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=2, module_build_state="some state")
module_build_service.scheduler.handlers.modules.wait(
config=conf, session=db_session, msg=msg)
module_build = ModuleBuild.query.filter_by(id=2).one()
assert module_build.cg_build_koji_tag == "modular-updates-candidate"
generic_resolver.create.return_value = resolver
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=2, module_build_state="some state")
module_build_service.scheduler.handlers.modules.wait(
config=conf, db_session=db_session, msg=msg)
module_build = ModuleBuild.get_by_id(db_session, 2)
assert module_build.cg_build_koji_tag == "modular-updates-candidate"
@pytest.mark.parametrize(
"koji_cg_tag_build,expected_cg_koji_build_tag",
@@ -280,12 +284,12 @@ class TestModuleWait:
"koji_cg_tag_build",
new=koji_cg_tag_build,
):
with patch.object(module_build_service.resolver, "system_resolver", new=resolver):
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=2, module_build_state="some state"
)
module_build_service.scheduler.handlers.modules.wait(
config=conf, session=db_session, msg=msg
)
module_build = ModuleBuild.query.filter_by(id=2).one()
assert module_build.cg_build_koji_tag == expected_cg_koji_build_tag
generic_resolver.create.return_value = resolver
msg = module_build_service.messaging.MBSModule(
msg_id=None, module_build_id=2, module_build_state="some state"
)
module_build_service.scheduler.handlers.modules.wait(
config=conf, db_session=db_session, msg=msg
)
module_build = ModuleBuild.get_by_id(db_session, 2)
assert module_build.cg_build_koji_tag == expected_cg_koji_build_tag

View File

@@ -22,7 +22,7 @@ import re
import pytest
from mock import patch
from module_build_service import models, conf
from tests import reuse_component_init_data, db, clean_database
from tests import clean_database
import mock
import koji
from module_build_service.scheduler.producer import MBSProducer
@@ -31,6 +31,7 @@ import six.moves.queue as queue
from datetime import datetime, timedelta
@pytest.mark.usefixtures("reuse_component_init_data")
@patch(
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
return_value={"build": [], "srpm-build": []},
@@ -39,8 +40,6 @@ from datetime import datetime, timedelta
@patch("module_build_service.builder.GenericBuilder.create_from_module")
class TestPoller:
def setup_method(self, test_method):
reuse_component_init_data()
self.p_read_config = patch(
"koji.read_config",
return_value={
@@ -58,7 +57,7 @@ class TestPoller:
@pytest.mark.parametrize("fresh", [True, False])
@patch("module_build_service.utils.batches.start_build_component")
def test_process_paused_module_builds(
self, start_build_component, create_builder, global_consumer, dbg, fresh
self, start_build_component, create_builder, global_consumer, dbg, fresh, db_session
):
"""
Tests general use-case of process_paused_module_builds.
@@ -72,14 +71,14 @@ class TestPoller:
# Change the batch to 2, so the module build is in state where
# it is not building anything, but the state is "build".
module_build = models.ModuleBuild.query.get(3)
module_build = models.ModuleBuild.get_by_id(db_session, 3)
module_build.batch = 2
# If fresh is set, then we simulate that activity just occurred 2 minutes ago on the build
if fresh:
module_build.time_modified = datetime.utcnow() - timedelta(minutes=2)
else:
module_build.time_modified = datetime.utcnow() - timedelta(days=5)
db.session.commit()
db_session.commit()
# Poll :)
hub = mock.MagicMock()
@@ -87,7 +86,7 @@ class TestPoller:
poller.poll()
# Refresh our module_build object.
module_build = models.ModuleBuild.query.get(3)
db_session.refresh(module_build)
# If fresh is set, we expect the poller to not touch the module build since it's been less
# than 10 minutes of inactivity
@@ -112,7 +111,7 @@ class TestPoller:
@patch("module_build_service.utils.batches.start_build_component")
def test_process_paused_module_builds_with_new_repo_task(
self, start_build_component, create_builder, global_consumer, dbg, task_state,
expect_start_build_component
expect_start_build_component, db_session
):
"""
Tests general use-case of process_paused_module_builds.
@@ -126,7 +125,7 @@ class TestPoller:
# Change the batch to 2, so the module build is in state where
# it is not building anything, but the state is "build".
module_build = models.ModuleBuild.query.get(3)
module_build = models.ModuleBuild.get_by_id(db_session, 3)
module_build.batch = 2
module_build.time_modified = datetime.utcnow() - timedelta(days=5)
if task_state:
@@ -134,7 +133,7 @@ class TestPoller:
koji_session.getTaskInfo.return_value = {"state": task_state}
builder.koji_session = koji_session
module_build.new_repo_task_id = 123
db.session.commit()
db_session.commit()
# Poll :)
hub = mock.MagicMock()
@@ -142,7 +141,7 @@ class TestPoller:
poller.poll()
# Refresh our module_build object.
module_build = models.ModuleBuild.query.get(3)
db_session.refresh(module_build)
if expect_start_build_component:
expected_state = koji.BUILD_STATES["BUILDING"]
@@ -160,7 +159,7 @@ class TestPoller:
@patch.dict("sys.modules", krbV=mock.MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_retrigger_new_repo_on_failure(
self, ClientSession, create_builder, global_consumer, dbg
self, ClientSession, create_builder, global_consumer, dbg, db_session
):
"""
Tests that we call koji_sesion.newRepo when newRepo task failed.
@@ -180,10 +179,10 @@ class TestPoller:
# Change the batch to 2, so the module build is in state where
# it is not building anything, but the state is "build".
module_build = models.ModuleBuild.query.get(3)
module_build = models.ModuleBuild.get_by_id(db_session, 3)
module_build.batch = 2
module_build.new_repo_task_id = 123456
db.session.commit()
db_session.commit()
hub = mock.MagicMock()
poller = MBSProducer(hub)
@@ -195,7 +194,7 @@ class TestPoller:
@patch.dict("sys.modules", krbV=mock.MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_trigger_new_repo_when_succeeded(
self, ClientSession, create_builder, global_consumer, dbg
self, ClientSession, create_builder, global_consumer, dbg, db_session
):
"""
Tests that we do not call koji_sesion.newRepo when newRepo task
@@ -216,23 +215,23 @@ class TestPoller:
# Change the batch to 2, so the module build is in state where
# it is not building anything, but the state is "build".
module_build = models.ModuleBuild.query.get(3)
module_build = models.ModuleBuild.get_by_id(db_session, 3)
module_build.batch = 2
module_build.new_repo_task_id = 123456
db.session.commit()
db_session.commit()
hub = mock.MagicMock()
poller = MBSProducer(hub)
poller.poll()
# Refresh our module_build object.
module_build = models.ModuleBuild.query.get(3)
db_session.refresh(module_build)
assert not koji_session.newRepo.called
assert module_build.new_repo_task_id == 123456
def test_process_paused_module_builds_waiting_for_repo(
self, create_builder, global_consumer, dbg
self, create_builder, global_consumer, dbg, db_session
):
"""
Tests that process_paused_module_builds does not start new batch
@@ -247,10 +246,10 @@ class TestPoller:
# Change the batch to 2, so the module build is in state where
# it is not building anything, but the state is "build".
module_build = models.ModuleBuild.query.get(3)
module_build = models.ModuleBuild.get_by_id(db_session, 3)
module_build.batch = 2
module_build.new_repo_task_id = 123456
db.session.commit()
db_session.commit()
# Poll :)
hub = mock.MagicMock()
@@ -258,7 +257,7 @@ class TestPoller:
poller.poll()
# Refresh our module_build object.
module_build = models.ModuleBuild.query.get(3)
db_session.refresh(module_build)
# Components should not be in building state
components = module_build.current_batch()
@@ -268,7 +267,7 @@ class TestPoller:
@patch.dict("sys.modules", krbV=mock.MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_old_build_targets_are_not_associated_with_any_module_builds(
self, ClientSession, create_builder, global_consumer, dbg
self, ClientSession, create_builder, global_consumer, dbg, db_session
):
consumer = mock.MagicMock()
consumer.incoming = queue.Queue()
@@ -283,16 +282,16 @@ class TestPoller:
hub = mock.MagicMock()
poller = MBSProducer(hub)
poller.delete_old_koji_targets(conf, db.session)
poller.delete_old_koji_targets(conf, db_session)
koji_session.deleteBuildTarget.assert_not_called()
@patch.dict("sys.modules", krbV=mock.MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_dont_delete_base_module_build_target(
self, ClientSession, create_builder, global_consumer, dbg
self, ClientSession, create_builder, global_consumer, dbg, db_session
):
module_build = models.ModuleBuild.query.get(3)
module_build = models.ModuleBuild.get_by_id(db_session, 3)
koji_session = ClientSession.return_value
# No created module build has any of these tags.
@@ -308,16 +307,16 @@ class TestPoller:
hub = mock.MagicMock()
poller = MBSProducer(hub)
poller.delete_old_koji_targets(conf, db.session)
poller.delete_old_koji_targets(conf, db_session)
koji_session.deleteBuildTarget.assert_not_called()
@patch.dict("sys.modules", krbV=mock.MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_dont_delete_build_target_for_unfinished_module_builds(
self, ClientSession, create_builder, global_consumer, dbg
self, ClientSession, create_builder, global_consumer, dbg, db_session
):
module_build = models.ModuleBuild.query.get(3)
module_build = models.ModuleBuild.get_by_id(db_session, 3)
koji_session = ClientSession.return_value
# No created module build has any of these tags.
@@ -331,31 +330,32 @@ class TestPoller:
# should not be deleted.
for state in ["init", "wait", "build"]:
module_build.state = state
db.session.commit()
db_session.commit()
hub = mock.MagicMock()
poller = MBSProducer(hub)
poller.delete_old_koji_targets(conf, db.session)
poller.delete_old_koji_targets(conf, db_session)
koji_session.deleteBuildTarget.assert_not_called()
@patch.dict("sys.modules", krbV=mock.MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_only_delete_build_target_with_allowed_koji_tag_prefix(
self, ClientSession, create_builder, global_consumer, dbg
self, ClientSession, create_builder, global_consumer, dbg, db_session
):
module_build_2 = models.ModuleBuild.query.filter_by(id=2).one()
module_build_3 = models.ModuleBuild.query.filter_by(id=3).one()
module_build_2 = models.ModuleBuild.get_by_id(db_session, 2)
# Only module build 1's build target should be deleted.
module_build_2.koji_tag = "module-tag1"
module_build_2.state = models.BUILD_STATES["done"]
# Ensure to exceed the koji_target_delete_time easily later for deletion
module_build_2.time_completed = datetime.utcnow() - timedelta(hours=24)
module_build_3 = models.ModuleBuild.get_by_id(db_session, 3)
module_build_3.koji_tag = "f28"
db.session.commit()
db.session.refresh(module_build_2)
db.session.refresh(module_build_3)
db_session.commit()
db_session.refresh(module_build_2)
db_session.refresh(module_build_3)
koji_session = ClientSession.return_value
# No created module build has any of these tags.
@@ -372,7 +372,7 @@ class TestPoller:
with patch.object(conf, "koji_target_delete_time", new=60):
hub = mock.MagicMock()
poller = MBSProducer(hub)
poller.delete_old_koji_targets(conf, db.session)
poller.delete_old_koji_targets(conf, db_session)
koji_session.deleteBuildTarget.assert_called_once_with(1)
koji_session.krb_login.assert_called_once()
@@ -380,17 +380,17 @@ class TestPoller:
@patch.dict("sys.modules", krbV=mock.MagicMock())
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_cant_delete_build_target_if_not_reach_delete_time(
self, ClientSession, create_builder, global_consumer, dbg
self, ClientSession, create_builder, global_consumer, dbg, db_session
):
module_build_2 = models.ModuleBuild.query.filter_by(id=2).one()
module_build_2 = models.ModuleBuild.get_by_id(db_session, 2)
# Only module build 1's build target should be deleted.
module_build_2.koji_tag = "module-tag1"
module_build_2.state = models.BUILD_STATES["done"]
# Ensure to exceed the koji_target_delete_time easily later for deletion
module_build_2.time_completed = datetime.utcnow() - timedelta(minutes=5)
db.session.commit()
db.session.refresh(module_build_2)
db_session.commit()
db_session.refresh(module_build_2)
koji_session = ClientSession.return_value
# No created module build has any of these tags.
@@ -407,12 +407,14 @@ class TestPoller:
# enough for test.
hub = mock.MagicMock()
poller = MBSProducer(hub)
poller.delete_old_koji_targets(conf, db.session)
poller.delete_old_koji_targets(conf, db_session)
koji_session.deleteBuildTarget.assert_not_called()
@pytest.mark.parametrize("state", ["init", "wait"])
def test_process_waiting_module_build(self, create_builder, global_consumer, dbg, state):
def test_process_waiting_module_build(
self, create_builder, global_consumer, dbg, state, db_session
):
""" Test that processing old waiting module builds works. """
consumer = mock.MagicMock()
@@ -424,27 +426,29 @@ class TestPoller:
# Change the batch to 2, so the module build is in state where
# it is not building anything, but the state is "build".
module_build = models.ModuleBuild.query.get(3)
module_build = models.ModuleBuild.get_by_id(db_session, 3)
module_build.state = models.BUILD_STATES[state]
original = datetime.utcnow() - timedelta(minutes=11)
module_build.time_modified = original
db.session.commit()
db.session.refresh(module_build)
db_session.commit()
db_session.refresh(module_build)
# Ensure the queue is empty before we start.
assert consumer.incoming.qsize() == 0
# Poll :)
poller.process_waiting_module_builds(db.session)
poller.process_waiting_module_builds(db_session)
assert consumer.incoming.qsize() == 1
module_build = models.ModuleBuild.query.get(3)
db_session.refresh(module_build)
# ensure the time_modified was changed.
assert module_build.time_modified > original
@pytest.mark.parametrize("state", ["init", "wait"])
def test_process_waiting_module_build_not_old_enough(
self, create_builder, global_consumer, dbg, state
self, create_builder, global_consumer, dbg, state, db_session
):
""" Test that we do not process young waiting builds. """
@@ -457,23 +461,26 @@ class TestPoller:
# Change the batch to 2, so the module build is in state where
# it is not building anything, but the state is "build".
module_build = models.ModuleBuild.query.get(3)
module_build = models.ModuleBuild.get_by_id(db_session, 3)
module_build.state = models.BUILD_STATES[state]
original = datetime.utcnow() - timedelta(minutes=9)
module_build.time_modified = original
db.session.commit()
db.session.refresh(module_build)
db_session.commit()
db_session.refresh(module_build)
# Ensure the queue is empty before we start.
assert consumer.incoming.qsize() == 0
# Poll :)
poller.process_waiting_module_builds(db.session)
poller.process_waiting_module_builds(db_session)
# Ensure we did *not* process the 9 minute-old build.
assert consumer.incoming.qsize() == 0
def test_process_waiting_module_build_none_found(self, create_builder, global_consumer, dbg):
def test_process_waiting_module_build_none_found(
self, create_builder, global_consumer, dbg, db_session
):
""" Test nothing happens when no module builds are waiting. """
consumer = mock.MagicMock()
@@ -487,7 +494,7 @@ class TestPoller:
assert consumer.incoming.qsize() == 0
# Poll :)
poller.process_waiting_module_builds(db.session)
poller.process_waiting_module_builds(db_session)
# Ensure we did *not* process any of the non-waiting builds.
assert consumer.incoming.qsize() == 0
@@ -634,21 +641,23 @@ class TestPoller:
@pytest.mark.parametrize("btime", (True, False))
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_sync_koji_build_tags(
self, ClientSession, create_builder, global_consumer, dbg, tagged, tagged_in_final, btime
self, ClientSession, create_builder, global_consumer, dbg, tagged, tagged_in_final, btime,
db_session
):
module_build_2 = models.ModuleBuild.query.filter_by(id=2).one()
module_build_2 = models.ModuleBuild.get_by_id(db_session, 2)
# Only module build 1's build target should be deleted.
module_build_2.koji_tag = "module-tag1"
module_build_2.state = models.BUILD_STATES["build"]
if btime:
module_build_2.time_modified = datetime.utcnow() - timedelta(minutes=12)
c = module_build_2.current_batch()[0]
c.state = koji.BUILD_STATES["COMPLETE"]
c.tagged_in_final = False
c.tagged = False
db.session.commit()
db.session.refresh(module_build_2)
db_session.commit()
db_session.refresh(module_build_2)
koji_session = ClientSession.return_value
# No created module build has any of these tags.
@@ -668,7 +677,9 @@ class TestPoller:
poller = MBSProducer(hub)
assert consumer.incoming.qsize() == 0
poller.sync_koji_build_tags(conf, db.session)
poller.sync_koji_build_tags(conf, db_session)
assert consumer.incoming.qsize() == len(ret)
expected_msg_tags = []
@@ -714,18 +725,18 @@ class TestPoller:
mock_gw.return_value = greenwave_result
poller.poll_greenwave(conf, db.session)
poller.poll_greenwave(conf, db_session)
mock_gw.assert_called_once()
module = models.ModuleBuild.query.filter_by(state=models.BUILD_STATES["ready"]).all()
modules = models.ModuleBuild.by_state(db_session, "ready")
if greenwave_result:
assert len(module) == 2
assert set([m.id for m in module]) == {1, 2}
assert len(modules) == 2
assert set([m.id for m in modules]) == {1, 2}
else:
assert len(module) == 1
assert module[0].id == 1
module = models.ModuleBuild.query.filter_by(state=models.BUILD_STATES["done"]).all()
assert len(module) == 1
assert module[0].id == 2
assert re.match("Gating failed.*", module[0].state_reason)
assert len(modules) == 1
assert modules[0].id == 1
modules = models.ModuleBuild.by_state(db_session, "done")
assert len(modules) == 1
assert modules[0].id == 2
assert re.match("Gating failed.*", modules[0].state_reason)

View File

@@ -25,7 +25,8 @@ import mock
import module_build_service.messaging
import module_build_service.scheduler.handlers.repos
import module_build_service.models
from tests import conf, db, scheduler_init_data
from module_build_service.models import ComponentBuild
from tests import conf, scheduler_init_data
class TestRepoDone:
@@ -39,7 +40,8 @@ class TestRepoDone:
from_repo_done_event.return_value = None
msg = module_build_service.messaging.KojiRepoChange(
"no matches for this...", "2016-some-nonexistent-build")
module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)
module_build_service.scheduler.handlers.repos.done(
config=conf, db_session=db_session, msg=msg)
@mock.patch(
"module_build_service.builder.KojiModuleBuilder."
@@ -76,7 +78,8 @@ class TestRepoDone:
msg = module_build_service.messaging.KojiRepoChange(
"some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")
module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)
module_build_service.scheduler.handlers.repos.done(
config=conf, db_session=db_session, msg=msg)
build_fn.assert_called_once_with(
artifact_name="tangerine",
source=(
@@ -137,7 +140,8 @@ class TestRepoDone:
msg = module_build_service.messaging.KojiRepoChange(
"some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")
module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)
module_build_service.scheduler.handlers.repos.done(
config=conf, db_session=db_session, msg=msg)
finalizer.assert_called_once()
@@ -177,7 +181,8 @@ class TestRepoDone:
msg = module_build_service.messaging.KojiRepoChange(
"some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")
module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)
module_build_service.scheduler.handlers.repos.done(
config=conf, db_session=db_session, msg=msg)
build_fn.assert_called_once_with(
artifact_name="tangerine",
source=(
@@ -196,14 +201,17 @@ class TestRepoDone:
complete or go to the next build batch.
"""
scheduler_init_data(db_session, 1)
component_build = db_session.query(ComponentBuild).filter_by(package="tangerine").one()
component_build.tagged = False
db_session.commit()
msg = module_build_service.messaging.KojiRepoChange(
"some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")
component_build = (
module_build_service.models.ComponentBuild.query.filter_by(package="tangerine").one())
component_build.tagged = False
db.session.add(component_build)
db.session.commit()
module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)
module_build_service.scheduler.handlers.repos.done(
config=conf, db_session=db_session, msg=msg)
mock_log_info.assert_called_with(
"Ignoring repo regen, because not all components are tagged."
)
@@ -241,7 +249,8 @@ class TestRepoDone:
msg = module_build_service.messaging.KojiRepoChange(
"some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build")
module_build_service.scheduler.handlers.repos.done(config=conf, session=db_session, msg=msg)
module_build_service.scheduler.handlers.repos.done(
config=conf, db_session=db_session, msg=msg)
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2)
assert module_build.state == module_build_service.models.BUILD_STATES["failed"]

View File

@@ -21,25 +21,24 @@
# Written by Jan Kaluza <jkaluza@redhat.com>
import mock
import pytest
from mock import patch
import module_build_service.messaging
import module_build_service.scheduler.handlers.repos
import module_build_service.scheduler.handlers.tags
import module_build_service.models
from tests import reuse_component_init_data
from tests import conf, db
from tests import conf
import koji
import pytest
@pytest.mark.usefixtures("reuse_component_init_data")
class TestTagTagged:
def setup_method(self, test_method):
reuse_component_init_data()
@mock.patch("module_build_service.models.ModuleBuild.from_tag_change_event")
def test_no_matching_module(self, from_tag_change_event):
def test_no_matching_module(self, from_tag_change_event, db_session):
""" Test that when a tag msg hits us and we have no match,
that we do nothing gracefully.
"""
@@ -47,9 +46,9 @@ class TestTagTagged:
msg = module_build_service.messaging.KojiTagChange(
"no matches for this...", "2016-some-nonexistent-build", "artifact", "artifact-1.2-1")
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
def test_no_matching_artifact(self):
def test_no_matching_artifact(self, db_session):
""" Test that when a tag msg hits us and we have no match,
that we do nothing gracefully.
"""
@@ -60,7 +59,7 @@ class TestTagTagged:
"artifact-1.2-1",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
@patch(
"module_build_service.builder.GenericBuilder.default_buildroot_groups",
@@ -68,7 +67,7 @@ class TestTagTagged:
)
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo(self, create_builder, koji_get_session, dbg):
def test_newrepo(self, create_builder, koji_get_session, dbg, db_session):
"""
Test that newRepo is called in the expected times.
"""
@@ -86,7 +85,7 @@ class TestTagTagged:
}
create_builder.return_value = builder
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 3)
# Set previous components as COMPLETE and tagged.
module_build.batch = 1
@@ -102,7 +101,8 @@ class TestTagTagged:
elif c.package == "perl-List-Compare":
c.nvr = "perl-List-Compare-0.53-5.module+0+d027b723"
c.state = koji.BUILD_STATES["COMPLETE"]
db.session.commit()
db_session.commit()
# Tag the first component to the buildroot.
msg = module_build_service.messaging.KojiTagChange(
@@ -112,7 +112,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg
config=conf, db_session=db_session, msg=msg
)
# Tag the first component to the final tag.
msg = module_build_service.messaging.KojiTagChange(
@@ -122,7 +122,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg
config=conf, db_session=db_session, msg=msg
)
# newRepo should not be called, because there are still components
@@ -137,7 +137,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg
config=conf, db_session=db_session, msg=msg
)
# newRepo should not be called, because the component has not been
@@ -152,15 +152,14 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# newRepo should be called now - all components have been tagged.
koji_session.newRepo.assert_called_once_with(
"module-testmodule-master-20170219191323-c40c156c-build")
# Refresh our module_build object.
db.session.expunge(module_build)
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()
db_session.refresh(module_build)
# newRepo task_id should be stored in database, so we can check its
# status later in poller.
@@ -172,7 +171,9 @@ class TestTagTagged:
)
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo_still_building_components(self, create_builder, koji_get_session, dbg):
def test_newrepo_still_building_components(
self, create_builder, koji_get_session, dbg, db_session
):
"""
Test that newRepo is called in the expected times.
"""
@@ -190,13 +191,14 @@ class TestTagTagged:
}
create_builder.return_value = builder
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 3)
module_build.batch = 2
component = module_build_service.models.ComponentBuild.query.filter_by(
component = db_session.query(module_build_service.models.ComponentBuild).filter_by(
package="perl-Tangerine", module_id=module_build.id).one()
component.state = koji.BUILD_STATES["BUILDING"]
component.nvr = "perl-Tangerine-0.23-1.module+0+d027b723"
db.session.commit()
db_session.commit()
# Tag the perl-List-Compare component to the buildroot.
msg = module_build_service.messaging.KojiTagChange(
@@ -206,7 +208,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# Tag the perl-List-Compare component to final tag.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -215,7 +217,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# newRepo should not be called, because perl-List-Compare has not been
# built yet.
@@ -227,7 +229,7 @@ class TestTagTagged:
)
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo_failed_components(self, create_builder, koji_get_session, dbg):
def test_newrepo_failed_components(self, create_builder, koji_get_session, dbg, db_session):
"""
Test that newRepo is called in the expected times.
"""
@@ -245,7 +247,7 @@ class TestTagTagged:
}
create_builder.return_value = builder
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 3)
# Set previous components as COMPLETE and tagged.
module_build.batch = 1
@@ -255,15 +257,18 @@ class TestTagTagged:
c.tagged_in_final = True
module_build.batch = 2
component = module_build_service.models.ComponentBuild.query.filter_by(
component = db_session.query(module_build_service.models.ComponentBuild).filter_by(
package="perl-Tangerine", module_id=module_build.id).one()
component.state = koji.BUILD_STATES["FAILED"]
component.nvr = "perl-Tangerine-0.23-1.module+0+d027b723"
component = module_build_service.models.ComponentBuild.query.filter_by(
component = db_session.query(module_build_service.models.ComponentBuild).filter_by(
package="perl-List-Compare", module_id=module_build.id).one()
component.state = koji.BUILD_STATES["COMPLETE"]
component.nvr = "perl-List-Compare-0.53-5.module+0+d027b723"
db.session.commit()
db_session.commit()
# Tag the perl-List-Compare component to the buildroot.
msg = module_build_service.messaging.KojiTagChange(
@@ -273,7 +278,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg
config=conf, db_session=db_session, msg=msg
)
# Tag the perl-List-Compare component to final tag.
msg = module_build_service.messaging.KojiTagChange(
@@ -283,7 +288,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# newRepo should be called now - all successfully built
# components have been tagged.
@@ -291,8 +296,7 @@ class TestTagTagged:
"module-testmodule-master-20170219191323-c40c156c-build")
# Refresh our module_build object.
db.session.expunge(module_build)
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()
db_session.refresh(module_build)
# newRepo task_id should be stored in database, so we can check its
# status later in poller.
@@ -304,7 +308,9 @@ class TestTagTagged:
)
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo_multiple_batches_tagged(self, create_builder, koji_get_session, dbg):
def test_newrepo_multiple_batches_tagged(
self, create_builder, koji_get_session, dbg, db_session
):
"""
Test that newRepo is called just once and only when all components
are tagged even if we tag components from the multiple batches in the
@@ -324,19 +330,21 @@ class TestTagTagged:
}
create_builder.return_value = builder
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 3)
module_build.batch = 2
mbm = module_build_service.models.ComponentBuild.query.filter_by(
mbm = db_session.query(module_build_service.models.ComponentBuild).filter_by(
module_id=3, package="module-build-macros").one()
mbm.tagged = False
db.session.add(mbm)
for c in module_build.current_batch():
if c.package == "perl-Tangerine":
c.nvr = "perl-Tangerine-0.23-1.module+0+d027b723"
elif c.package == "perl-List-Compare":
c.nvr = "perl-List-Compare-0.53-5.module+0+d027b723"
c.state = koji.BUILD_STATES["COMPLETE"]
db.session.commit()
db_session.commit()
# Tag the first component to the buildroot.
msg = module_build_service.messaging.KojiTagChange(
@@ -346,7 +354,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# Tag the first component to the final tag.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -355,7 +363,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# newRepo should not be called, because there are still components
# to tag.
@@ -369,7 +377,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# Tag the second component to final tag.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -378,7 +386,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# newRepo should not be called, because there are still components
# to tag.
@@ -392,7 +400,7 @@ class TestTagTagged:
"module-build-macros-0.1-1.module+0+b0a1d1f7",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# Tag the component from first batch to the buildroot.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -401,15 +409,14 @@ class TestTagTagged:
"module-build-macros-0.1-1.module+0+b0a1d1f7",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# newRepo should be called now - all components have been tagged.
koji_session.newRepo.assert_called_once_with(
"module-testmodule-master-20170219191323-c40c156c-build")
# Refresh our module_build object.
db.session.expunge(module_build)
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()
db_session.refresh(module_build)
# newRepo task_id should be stored in database, so we can check its
# status later in poller.
@@ -421,7 +428,7 @@ class TestTagTagged:
)
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo_build_time_only(self, create_builder, koji_get_session, dbg):
def test_newrepo_build_time_only(self, create_builder, koji_get_session, dbg, db_session):
"""
Test the component.build_time_only is respected in tag handler.
"""
@@ -439,7 +446,7 @@ class TestTagTagged:
}
create_builder.return_value = builder
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 3)
# Set previous components as COMPLETE and tagged.
module_build.batch = 1
@@ -451,18 +458,20 @@ class TestTagTagged:
c.tagged_in_final = True
module_build.batch = 2
component = module_build_service.models.ComponentBuild.query.filter_by(
component = db_session.query(module_build_service.models.ComponentBuild).filter_by(
package="perl-Tangerine", module_id=module_build.id).one()
component.state = koji.BUILD_STATES["COMPLETE"]
component.build_time_only = True
component.tagged = False
component.tagged_in_final = False
component.nvr = "perl-Tangerine-0.23-1.module+0+d027b723"
component = module_build_service.models.ComponentBuild.query.filter_by(
component = db_session.query(module_build_service.models.ComponentBuild).filter_by(
package="perl-List-Compare", module_id=module_build.id).one()
component.state = koji.BUILD_STATES["COMPLETE"]
component.nvr = "perl-List-Compare-0.53-5.module+0+d027b723"
db.session.commit()
db_session.commit()
# Tag the perl-Tangerine component to the buildroot.
msg = module_build_service.messaging.KojiTagChange(
@@ -472,7 +481,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
assert not koji_session.newRepo.called
# Tag the perl-List-Compare component to the buildroot.
msg = module_build_service.messaging.KojiTagChange(
@@ -482,7 +491,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# Tag the perl-List-Compare component to final tag.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -491,7 +500,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# newRepo should be called now - all successfully built
# components have been tagged.
@@ -499,8 +508,7 @@ class TestTagTagged:
"module-testmodule-master-20170219191323-c40c156c-build")
# Refresh our module_build object.
db.session.expunge(module_build)
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()
db_session.refresh(module_build)
# newRepo task_id should be stored in database, so we can check its
# status later in poller.
@@ -522,7 +530,7 @@ class TestTagTagged:
@patch("module_build_service.builder.KojiModuleBuilder.KojiModuleBuilder.get_session")
@patch("module_build_service.builder.GenericBuilder.create_from_module")
def test_newrepo_not_duplicated(
self, create_builder, koji_get_session, dbg, task_state, expect_new_repo
self, create_builder, koji_get_session, dbg, task_state, expect_new_repo, db_session
):
"""
Test that newRepo is not called if a task is already in progress.
@@ -541,7 +549,7 @@ class TestTagTagged:
}
create_builder.return_value = builder
module_build = module_build_service.models.ModuleBuild.query.get(3)
module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 3)
assert module_build
# Set previous components as COMPLETE and tagged.
@@ -562,7 +570,7 @@ class TestTagTagged:
if task_state is not None:
module_build.new_repo_task_id = 123456
db.session.commit()
db_session.commit()
# Tag the first component to the buildroot.
msg = module_build_service.messaging.KojiTagChange(
@@ -572,7 +580,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# Tag the first component to the final tag.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -581,7 +589,7 @@ class TestTagTagged:
"perl-Tangerine-0.23-1.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# Tag the second component to the buildroot.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -590,7 +598,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# Tag the second component to the final tag.
msg = module_build_service.messaging.KojiTagChange(
"id",
@@ -599,7 +607,7 @@ class TestTagTagged:
"perl-List-Compare-0.53-5.module+0+d027b723",
)
module_build_service.scheduler.handlers.tags.tagged(
config=conf, session=db.session, msg=msg)
config=conf, db_session=db_session, msg=msg)
# All components are tagged, newRepo should be called if there are no active tasks.
if expect_new_repo:
@@ -609,8 +617,7 @@ class TestTagTagged:
assert not koji_session.newRepo.called
# Refresh our module_build object.
db.session.expunge(module_build)
module_build = module_build_service.models.ModuleBuild.query.filter_by(id=3).one()
db_session.refresh(module_build)
# newRepo task_id should be stored in database, so we can check its
# status later in poller.

View File

@@ -24,8 +24,6 @@
import json
from mock import patch, Mock
import pytest
from module_build_service import conf
from module_build_service.models import make_session
from module_build_service.utils.greenwave import greenwave
from tests import clean_database, make_module
@@ -36,7 +34,7 @@ class TestGreenwaveQuery():
clean_database()
@patch("module_build_service.utils.greenwave.requests")
def test_greenwave_query_decision(self, mock_requests):
def test_greenwave_query_decision(self, mock_requests, db_session):
resp_status = 200
resp_content = {
"applicable_policies": ["osci_compose_modules"],
@@ -61,9 +59,8 @@ class TestGreenwaveQuery():
response.status_code = resp_status
mock_requests.post.return_value = response
with make_session(conf) as db_session:
fake_build = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})
got_response = greenwave.query_decision(fake_build, prod_version="xxxx-8")
fake_build = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})
got_response = greenwave.query_decision(fake_build, prod_version="xxxx-8")
assert got_response == resp_content
assert json.loads(mock_requests.post.call_args_list[0][1]["data"]) == {
@@ -157,7 +154,7 @@ class TestGreenwaveQuery():
@pytest.mark.parametrize("policies_satisfied", (True, False))
@patch("module_build_service.utils.greenwave.requests")
def test_greenwave_check_gating(self, mock_requests, policies_satisfied):
def test_greenwave_check_gating(self, mock_requests, policies_satisfied, db_session):
resp_status = 200
policies_content = {
"policies": [
@@ -179,8 +176,7 @@ class TestGreenwaveQuery():
mock_requests.get.return_value = responses[0]
mock_requests.post.side_effect = responses[1:]
with make_session(conf) as db_session:
fake_build = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})
result = greenwave.check_gating(fake_build)
fake_build = make_module(db_session, "pkg:0.1:1:c1", requires_list={"platform": "el8"})
result = greenwave.check_gating(fake_build)
assert result == policies_satisfied

View File

@@ -20,7 +20,6 @@
from mock import patch, Mock
from module_build_service import conf
from module_build_service.models import make_session
from module_build_service.utils import ursine
from tests import make_module, clean_database
@@ -130,28 +129,28 @@ class TestGetModulemdsFromUrsineContent:
clean_database()
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_return_empty_if_no_ursine_build_tag_is_found(self, ClientSession):
session = ClientSession.return_value
def test_return_empty_if_no_ursine_build_tag_is_found(self, ClientSession, db_session):
koji_session = ClientSession.return_value
# No module koji_tag in ursine content yet. This will result in empty
# ursine modulemds is returned.
session.getFullInheritance.return_value = [{"name": "tag-1.0-build"}]
session.getExternalRepoList.return_value = [{
koji_session.getFullInheritance.return_value = [{"name": "tag-1.0-build"}]
koji_session.getExternalRepoList.return_value = [{
"external_repo_name": "tag-1.0-external-repo",
"url": "http://example.com/repos/tag-4-build/latest/$arch/",
}]
modulemds = ursine.get_modulemds_from_ursine_content("tag")
modulemds = ursine.get_modulemds_from_ursine_content(db_session, "tag")
assert [] == modulemds
@patch.object(conf, "koji_tag_prefixes", new=["module"])
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_get_modulemds(self, ClientSession):
session = ClientSession.return_value
def test_get_modulemds(self, ClientSession, db_session):
koji_session = ClientSession.return_value
# Ensure to to get build tag for further query of ursine content.
# For this test, the build tag is tag-4-build
session.getExternalRepoList.return_value = [{
koji_session.getExternalRepoList.return_value = [{
"external_repo_name": "tag-1.0-external-repo",
"url": "http://example.com/repos/tag-4-build/latest/$arch/",
}]
@@ -169,7 +168,7 @@ class TestGetModulemdsFromUrsineContent:
]
raise ValueError("{} is not handled by test.".format(tag))
session.getFullInheritance.side_effect = mock_getFullInheritance
koji_session.getFullInheritance.side_effect = mock_getFullInheritance
# Defaults to DB resolver, so create fake module builds and store them
# into database to ensure they can be queried.
@@ -181,25 +180,24 @@ class TestGetModulemdsFromUrsineContent:
# From the behavior of following code, the reason of the error is
# mixing use of db.session and make_session, the latter one is called
# from function ``get_modulemds_from_ursine_content``.
with make_session(conf) as db_session:
mmd_name1s2020c = make_module(
db_session,
"name1:s:2020:c", xmd={"mbs": {"koji_tag": "module-name1-s-2020-c"}})
mmd_name2s2021c = make_module(
db_session,
"name2:s:2021:c", xmd={"mbs": {"koji_tag": "module-name2-s-2021-c"}})
mmd_name1s2020c = make_module(
db_session,
"name1:s:2020:c", xmd={"mbs": {"koji_tag": "module-name1-s-2020-c"}})
mmd_name2s2021c = make_module(
db_session,
"name2:s:2021:c", xmd={"mbs": {"koji_tag": "module-name2-s-2021-c"}})
koji_tag = "tag" # It's ok to use arbitrary tag name.
with patch.object(conf, "koji_external_repo_url_prefix", new="http://example.com/"):
modulemds = ursine.get_modulemds_from_ursine_content(koji_tag)
koji_tag = "tag" # It's ok to use arbitrary tag name.
with patch.object(conf, "koji_external_repo_url_prefix", new="http://example.com/"):
modulemds = ursine.get_modulemds_from_ursine_content(db_session, koji_tag)
test_nsvcs = [item.get_nsvc() for item in modulemds]
test_nsvcs.sort()
test_nsvcs = [item.get_nsvc() for item in modulemds]
test_nsvcs.sort()
expected_nsvcs = [mmd_name1s2020c.mmd().get_nsvc(), mmd_name2s2021c.mmd().get_nsvc()]
expected_nsvcs.sort()
expected_nsvcs = [mmd_name1s2020c.mmd().get_nsvc(), mmd_name2s2021c.mmd().get_nsvc()]
expected_nsvcs.sort()
session.getExternalRepoList.assert_called_once_with(koji_tag)
koji_session.getExternalRepoList.assert_called_once_with(koji_tag)
assert expected_nsvcs == test_nsvcs
@@ -216,7 +214,7 @@ class TestRecordStreamCollisionModules:
original_xmd = fake_mmd.get_xmd()
with patch.object(ursine, "log") as log:
ursine.handle_stream_collision_modules(fake_mmd)
ursine.handle_stream_collision_modules(db_session, fake_mmd)
assert 2 == log.info.call_count
find_stream_collision_modules.assert_not_called()
@@ -241,7 +239,7 @@ class TestRecordStreamCollisionModules:
get_modulemds_from_ursine_content.return_value = []
with patch.object(ursine, "log") as log:
ursine.handle_stream_collision_modules(fake_mmd)
ursine.handle_stream_collision_modules(db_session, fake_mmd)
assert 2 == log.info.call_count
# Ensure stream_collision_modules is set.
@@ -272,7 +270,7 @@ class TestRecordStreamCollisionModules:
}
fake_mmd = make_module(db_session, "name1:s:2020:c", xmd=xmd, store_to_db=False)
def mock_get_ursine_modulemds(koji_tag):
def mock_get_ursine_modulemds(db_session, koji_tag):
if koji_tag == "module-rhel-8.0-build":
return [
# This is the one
@@ -325,7 +323,7 @@ class TestRecordStreamCollisionModules:
koji_session = ClientSession.return_value
koji_session.listTaggedRPMS.side_effect = mock_listTaggedRPMS
ursine.handle_stream_collision_modules(fake_mmd)
ursine.handle_stream_collision_modules(db_session, fake_mmd)
xmd = fake_mmd.get_xmd()
buildrequires = xmd["mbs"]["buildrequires"]
@@ -346,9 +344,11 @@ class TestFindStreamCollisionModules:
"""Test ursine.find_stream_collision_modules"""
@patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")
def test_no_modulemds_found_from_ursine_content(self, get_modulemds_from_ursine_content):
def test_no_modulemds_found_from_ursine_content(
self, get_modulemds_from_ursine_content, db_session
):
get_modulemds_from_ursine_content.return_value = []
assert not ursine.find_stream_collision_modules({}, "koji_tag")
assert not ursine.find_stream_collision_modules(db_session, {}, "koji_tag")
@patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")
def test_no_collisions_found(self, get_modulemds_from_ursine_content, db_session):
@@ -358,7 +358,8 @@ class TestFindStreamCollisionModules:
make_module(db_session, "modules:2:1:c2", store_to_db=False),
make_module(db_session, "modulet:3:1:c3", store_to_db=False),
]
assert [] == ursine.find_stream_collision_modules(xmd_mbs_buildrequires, "koji_tag")
assert [] == ursine.find_stream_collision_modules(
db_session, xmd_mbs_buildrequires, "koji_tag")
@patch("module_build_service.utils.ursine.get_modulemds_from_ursine_content")
def test_collision_modules_are_found(self, get_modulemds_from_ursine_content, db_session):
@@ -370,5 +371,6 @@ class TestFindStreamCollisionModules:
]
get_modulemds_from_ursine_content.return_value = fake_modules
modules = ursine.find_stream_collision_modules(xmd_mbs_buildrequires, "koji_tag")
modules = ursine.find_stream_collision_modules(
db_session, xmd_mbs_buildrequires, "koji_tag")
assert [fake_modules[1].get_nsvc()] == modules

File diff suppressed because it is too large Load Diff

View File

@@ -24,7 +24,7 @@ import pytest
import module_build_service.utils
from module_build_service import Modulemd, models
from module_build_service.errors import StreamAmbigous
from tests import db, clean_database, make_module, init_data, read_staged_data
from tests import clean_database, make_module, init_data, read_staged_data
class TestUtilsModuleStreamExpansion:
@@ -34,19 +34,17 @@ class TestUtilsModuleStreamExpansion:
def teardown_method(self, test_method):
clean_database()
def _get_mmds_required_by_module_recursively(self, module_build):
def _get_mmds_required_by_module_recursively(self, module_build, db_session):
"""
Convenience wrapper around get_mmds_required_by_module_recursively
returning the list with nsvc strings of modules returned by this the wrapped
method.
"""
mmd = module_build.mmd()
module_build_service.utils.expand_mse_streams(db.session, mmd)
modules = module_build_service.utils.get_mmds_required_by_module_recursively(mmd)
nsvcs = [
m.get_nsvc()
for m in modules
]
module_build_service.utils.expand_mse_streams(db_session, mmd)
modules = module_build_service.utils.get_mmds_required_by_module_recursively(
db_session, mmd)
nsvcs = [m.get_nsvc() for m in modules]
return nsvcs
def _generate_default_modules(self, db_session):
@@ -164,10 +162,10 @@ class TestUtilsModuleStreamExpansion:
if stream_ambigous:
with pytest.raises(StreamAmbigous):
module_build_service.utils.generate_expanded_mmds(
db.session, module_build.mmd(), raise_if_stream_ambigous=True)
db_session, module_build.mmd(), raise_if_stream_ambigous=True)
else:
module_build_service.utils.generate_expanded_mmds(
db.session, module_build.mmd(), raise_if_stream_ambigous=True)
db_session, module_build.mmd(), raise_if_stream_ambigous=True)
# Check that if stream is ambigous and we define the stream, it does not raise
# an exception.
@@ -177,13 +175,13 @@ class TestUtilsModuleStreamExpansion:
name, stream = ns.split(":")
default_streams[name] = stream
module_build_service.utils.generate_expanded_mmds(
db.session,
db_session,
module_build.mmd(),
raise_if_stream_ambigous=True,
default_streams=default_streams,
)
mmds = module_build_service.utils.generate_expanded_mmds(db.session, module_build.mmd())
mmds = module_build_service.utils.generate_expanded_mmds(db_session, module_build.mmd())
buildrequires_per_mmd_xmd = set()
buildrequires_per_mmd_buildrequires = set()
@@ -322,7 +320,7 @@ class TestUtilsModuleStreamExpansion:
def test_get_required_modules_simple(self, requires, build_requires, expected, db_session):
module_build = make_module(db_session, "app:1:0:c1", requires, build_requires)
self._generate_default_modules(db_session)
nsvcs = self._get_mmds_required_by_module_recursively(module_build)
nsvcs = self._get_mmds_required_by_module_recursively(module_build, db_session)
assert set(nsvcs) == set(expected)
def _generate_default_modules_recursion(self, db_session):
@@ -367,7 +365,7 @@ class TestUtilsModuleStreamExpansion:
def test_get_required_modules_recursion(self, requires, build_requires, expected, db_session):
module_build = make_module(db_session, "app:1:0:c1", requires, build_requires)
self._generate_default_modules_recursion(db_session)
nsvcs = self._get_mmds_required_by_module_recursively(module_build)
nsvcs = self._get_mmds_required_by_module_recursively(module_build, db_session)
assert set(nsvcs) == set(expected)
def _generate_default_modules_modules_multiple_stream_versions(self, db_session):
@@ -399,10 +397,10 @@ class TestUtilsModuleStreamExpansion:
):
module_build = make_module(db_session, "app:1:0:c1", requires, build_requires)
self._generate_default_modules_modules_multiple_stream_versions(db_session)
nsvcs = self._get_mmds_required_by_module_recursively(module_build)
nsvcs = self._get_mmds_required_by_module_recursively(module_build, db_session)
assert set(nsvcs) == set(expected)
def test__get_base_module_mmds(self):
def test__get_base_module_mmds(self, db_session):
"""Ensure the correct results are returned without duplicates."""
init_data(data_size=1, multiple_stream_versions=True)
mmd = module_build_service.utils.load_mmd(read_staged_data("testmodule_v2.yaml"))
@@ -415,7 +413,7 @@ class TestUtilsModuleStreamExpansion:
mmd.remove_dependencies(deps)
mmd.add_dependencies(new_deps)
mmds = module_build_service.utils.mse._get_base_module_mmds(mmd)
mmds = module_build_service.utils.mse._get_base_module_mmds(db_session, mmd)
expected = set(["platform:f29.0.0", "platform:f29.1.0", "platform:f29.2.0"])
# Verify no duplicates were returned before doing set operations
assert len(mmds["ready"]) == len(expected)
@@ -440,7 +438,7 @@ class TestUtilsModuleStreamExpansion:
make_module(db_session, "platform:lp29.1.1:12:c11", {}, {}, virtual_streams=virtual_streams)
mmds = module_build_service.utils.mse._get_base_module_mmds(mmd)
mmds = module_build_service.utils.mse._get_base_module_mmds(db_session, mmd)
if virtual_streams == ["f29"]:
expected = set(
["platform:f29.0.0", "platform:f29.1.0", "platform:f29.2.0", "platform:lp29.1.1"])
@@ -458,15 +456,16 @@ class TestUtilsModuleStreamExpansion:
"module_build_service.config.Config.allow_only_compatible_base_modules",
new_callable=PropertyMock, return_value=False
)
def test__get_base_module_mmds_virtual_streams_only_major_versions(self, cfg):
def test__get_base_module_mmds_virtual_streams_only_major_versions(self, cfg, db_session):
"""Ensure the correct results are returned without duplicates."""
init_data(data_size=1, multiple_stream_versions=["foo28", "foo29", "foo30"])
# Mark platform:foo28 as garbage to test that it is still considered as compatible.
platform = models.ModuleBuild.query.filter_by(name="platform", stream="foo28").first()
platform = db_session.query(models.ModuleBuild).filter_by(
name="platform", stream="foo28").first()
platform.state = "garbage"
db.session.add(platform)
db.session.commit()
db_session.add(platform)
db_session.commit()
mmd = module_build_service.utils.load_mmd(read_staged_data("testmodule_v2"))
deps = mmd.get_dependencies()[0]
@@ -477,7 +476,7 @@ class TestUtilsModuleStreamExpansion:
mmd.remove_dependencies(deps)
mmd.add_dependencies(new_deps)
mmds = module_build_service.utils.mse._get_base_module_mmds(mmd)
mmds = module_build_service.utils.mse._get_base_module_mmds(db_session, mmd)
expected = {}
expected["ready"] = set(["platform:foo29", "platform:foo30"])
expected["garbage"] = set(["platform:foo28"])

View File

@@ -36,7 +36,7 @@ import pytest
import re
import sqlalchemy
from tests import app, init_data, clean_database, reuse_component_init_data, staged_data_filename
from tests import app, init_data, clean_database, staged_data_filename
from tests import read_staged_data
from tests.test_scm import base_dir as scm_base_dir
from module_build_service.errors import UnprocessableEntity
@@ -65,6 +65,7 @@ class FakeSCM(object):
commit=None,
checkout_raise=False,
get_latest_raise=False,
get_latest_error=None,
branch="master",
):
"""
@@ -96,8 +97,8 @@ class FakeSCM(object):
self.mocked_scm.return_value.name = self.name
self.mocked_scm.return_value.commit = self.commit
if get_latest_raise:
self.mocked_scm.return_value.get_latest.side_effect = UnprocessableEntity(
"Failed to get_latest commit")
self.mocked_scm.return_value.get_latest.side_effect = \
get_latest_error or UnprocessableEntity("Failed to get_latest commit")
else:
self.mocked_scm.return_value.get_latest = self.get_latest
self.mocked_scm.return_value.repository_root = "https://src.stg.fedoraproject.org/modules/"
@@ -245,8 +246,8 @@ class TestViews:
assert data["version"] == "2"
assert data["virtual_streams"] == []
@pytest.mark.usefixtures("reuse_component_init_data")
def test_query_build_with_br_verbose_mode(self):
reuse_component_init_data()
rv = self.client.get("/module-build-service/1/module-builds/2?verbose=true")
data = json.loads(rv.data)
assert data["base_module_buildrequires"] == [{
@@ -464,6 +465,7 @@ class TestViews:
for key, part in zip(nsvc_keys, nsvc_parts):
assert item[key] == part
@pytest.mark.usefixtures("reuse_component_init_data")
@patch("module_build_service.builder.KojiModuleBuilder.KojiClientSession")
def test_query_builds_with_binary_rpm(self, ClientSession):
"""
@@ -471,7 +473,6 @@ class TestViews:
which contain the rpm.
"""
# update database with builds which contain koji tags.
reuse_component_init_data()
mock_rpm_md = {"build_id": 1065871}
mock_tags = [
{"name": "module-testmodule-master-20170219191323-c40c156c"},
@@ -827,8 +828,8 @@ class TestViews:
}
assert error == expected
@pytest.mark.usefixtures("reuse_component_init_data")
def test_query_base_module_br_filters(self):
reuse_component_init_data()
mmd = load_mmd(read_staged_data("platform"))
mmd = mmd.copy(mmd.get_module_name(), "f30.1.3")
import_mmd(db.session, mmd)