diff --git a/module_build_service/builder/KojiModuleBuilder.py b/module_build_service/builder/KojiModuleBuilder.py index 48c8f3ed..7385619c 100644 --- a/module_build_service/builder/KojiModuleBuilder.py +++ b/module_build_service/builder/KojiModuleBuilder.py @@ -27,7 +27,7 @@ from module_build_service.builder.utils import execute_cmd from module_build_service.db_session import db_session from module_build_service.errors import ProgrammingError -from module_build_service.builder.base import GenericBuilder +from module_build_service.builder import GenericBuilder from module_build_service.builder.KojiContentGenerator import KojiContentGenerator from module_build_service.scheduler import events from module_build_service.utils import get_reusable_components, get_reusable_module, set_locale @@ -741,18 +741,18 @@ class KojiModuleBuilder(GenericBuilder): component_build.state_reason = "Found existing build" nvr_dict = kobo.rpmlib.parse_nvr(component_build.nvr) # Trigger a completed build message - further_work.append( - events.KojiBuildChange( - "recover_orphaned_artifact: fake message", - build["build_id"], - build["task_id"], - koji.BUILD_STATES["COMPLETE"], - component_build.package, - nvr_dict["version"], - nvr_dict["release"], - component_build.module_build.id, - ) - ) + further_work.append({ + "msg_id": "recover_orphaned_artifact: fake message", + "event": events.KOJI_BUILD_CHANGE, + "build_id": build["build_id"], + "task_id": build["task_id"], + "build_new_state": koji.BUILD_STATES["COMPLETE"], + "build_name": component_build.package, + "build_version": nvr_dict["version"], + "build_release": nvr_dict["release"], + "module_build_id": component_build.module_build.id, + "state_reason": None + }) component_tagged_in = [] if build_tagged: @@ -772,14 +772,13 @@ class KojiModuleBuilder(GenericBuilder): 'The build being skipped isn\'t tagged in the "{0}" tag. Will send a message to ' "the tag handler".format(tag) ) - further_work.append( - events.KojiTagChange( - "recover_orphaned_artifact: fake message", - tag, - component_build.package, - component_build.nvr, - ) - ) + further_work.append({ + "msg_id": "recover_orphaned_artifact: fake message", + "event": events.KOJI_TAG_CHANGE, + "tag_name": tag, + "build_name": component_build.package, + "build_nvr": component_build.nvr, + }) return further_work def build(self, artifact_name, source): diff --git a/module_build_service/builder/MockModuleBuilder.py b/module_build_service/builder/MockModuleBuilder.py index cba96844..ceed2827 100644 --- a/module_build_service/builder/MockModuleBuilder.py +++ b/module_build_service/builder/MockModuleBuilder.py @@ -16,7 +16,7 @@ import module_build_service.utils import module_build_service.scheduler import module_build_service.scheduler.consumer -from module_build_service.builder.base import GenericBuilder +from module_build_service.builder import GenericBuilder from module_build_service.builder.utils import ( create_local_repo_from_koji_tag, execute_cmd, @@ -26,6 +26,7 @@ from module_build_service.builder.utils import ( from module_build_service.utils.general import mmd_to_str from module_build_service.db_session import db_session from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder +from module_build_service.scheduler import events from module_build_service import models @@ -400,16 +401,18 @@ class MockModuleBuilder(GenericBuilder): # build_id=1 and task_id=1 are OK here, because we are building just # one RPM at the time. - msg = module_build_service.messaging.KojiBuildChange( - msg_id="a faked internal message", - build_id=build_id, - task_id=build_id, - build_name=nvr["name"], - build_new_state=state, - build_release=nvr["release"], - build_version=nvr["version"], - ) - module_build_service.scheduler.consumer.work_queue_put(msg) + module_build_service.scheduler.consumer.work_queue_put({ + "msg_id": "a faked internal message", + "event": events.KOJI_BUILD_CHANGE, + "build_id": build_id, + "task_id": build_id, + "build_name": nvr["name"], + "build_new_state": state, + "build_release": nvr["release"], + "build_version": nvr["version"], + "module_build_id": None, + "state_reason": None + }) def _save_log(self, resultsdir, log_name, artifact_name): old_log = os.path.join(resultsdir, log_name) diff --git a/module_build_service/errors.py b/module_build_service/errors.py index bc9a6ef3..68f44dc1 100644 --- a/module_build_service/errors.py +++ b/module_build_service/errors.py @@ -44,3 +44,7 @@ def json_error(status, error, message): response = jsonify({"status": status, "error": error, "message": message}) response.status_code = status return response + + +class IgnoreMessage(Exception): + """Raise if message received from message bus should be ignored""" diff --git a/module_build_service/models.py b/module_build_service/models.py index 3414ca1e..7ec35b6d 100644 --- a/module_build_service/models.py +++ b/module_build_service/models.py @@ -747,41 +747,15 @@ class ModuleBuild(MBSBase): return db_session.query(ModuleBuild).filter_by(state=BUILD_STATES[state]).all() @classmethod - def from_repo_done_event(cls, db_session, event): - """ Find the ModuleBuilds in our database that should be in-flight... - ... for a given koji tag. - - There should be at most one. - """ - if event.repo_tag.endswith("-build"): - tag = event.repo_tag[:-6] - else: - tag = event.repo_tag - query = ( - db_session.query(cls) - .filter(cls.koji_tag == tag) - .filter(cls.state == BUILD_STATES["build"]) + def get_by_tag(cls, db_session, tag_name): + tag = tag_name[:-6] if tag_name.endswith("-build") else tag_name + query = db_session.query(cls).filter( + cls.koji_tag == tag, + cls.state == BUILD_STATES["build"] ) - count = query.count() if count > 1: raise RuntimeError("%r module builds in flight for %r" % (count, tag)) - - return query.first() - - @classmethod - def from_tag_change_event(cls, db_session, event): - tag = event.tag[:-6] if event.tag.endswith("-build") else event.tag - query = ( - db_session.query(cls) - .filter(cls.koji_tag == tag) - .filter(cls.state == BUILD_STATES["build"]) - ) - - count = query.count() - if count > 1: - raise RuntimeError("%r module builds in flight for %r" % (count, tag)) - return query.first() def short_json(self, show_stream_version=False, show_scratch=True): @@ -1128,18 +1102,12 @@ class ComponentBuild(MBSBase): weight = db.Column(db.Float, default=0) @classmethod - def from_component_event(cls, db_session, event): - if isinstance(event, events.KojiBuildChange): - if event.module_build_id: - return ( - db_session.query(cls) - .filter_by(task_id=event.task_id, module_id=event.module_build_id) - .one() - ) - else: - return db_session.query(cls).filter(cls.task_id == event.task_id).first() + def from_component_event(cls, db_session, task_id, module_id=None): + _filter = db_session.query(cls).filter + if module_id is None: + return _filter(cls.task_id == task_id).first() else: - raise ValueError("%r is not a koji message." % event["topic"]) + return _filter(cls.task_id == task_id, cls.module_id == module_id).one() @classmethod def from_component_name(cls, db_session, component_name, module_id): diff --git a/module_build_service/scheduler/consumer.py b/module_build_service/scheduler/consumer.py index 84f8de83..5ff0c7cb 100644 --- a/module_build_service/scheduler/consumer.py +++ b/module_build_service/scheduler/consumer.py @@ -5,7 +5,6 @@ This class reads and processes messages from the message bus it is configured to use. """ -import inspect import itertools try: @@ -18,7 +17,6 @@ except ImportError: import koji import fedmsg.consumers import moksha.hub -import six import sqlalchemy.exc import module_build_service.messaging @@ -31,10 +29,10 @@ import module_build_service.monitor as monitor from module_build_service import models, log, conf from module_build_service.db_session import db_session +from module_build_service.errors import IgnoreMessage from module_build_service.messaging import default_messaging_backend -from module_build_service.scheduler.handlers import greenwave -from module_build_service.utils import module_build_state_from_msg from module_build_service.scheduler import events +from module_build_service.scheduler.handlers import greenwave class MBSConsumer(fedmsg.consumers.FedmsgConsumer): @@ -85,34 +83,32 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer): msg = module_build_service.messaging._initial_messages.pop(0) self.incoming.put(msg) + from module_build_service.scheduler import handlers + # These are our main lookup tables for figuring out what to run in # response to what messaging events. - self.NO_OP = NO_OP = lambda msg: True + self.NO_OP = NO_OP = lambda *args, **kwargs: True self.on_build_change = { koji.BUILD_STATES["BUILDING"]: NO_OP, - koji.BUILD_STATES[ - "COMPLETE" - ]: module_build_service.scheduler.handlers.components.complete, - koji.BUILD_STATES["FAILED"]: module_build_service.scheduler.handlers.components.failed, - koji.BUILD_STATES[ - "CANCELED" - ]: module_build_service.scheduler.handlers.components.canceled, + koji.BUILD_STATES["COMPLETE"]: handlers.components.build_task_finalize, + koji.BUILD_STATES["FAILED"]: handlers.components.build_task_finalize, + koji.BUILD_STATES["CANCELED"]: handlers.components.build_task_finalize, koji.BUILD_STATES["DELETED"]: NO_OP, } self.on_module_change = { - models.BUILD_STATES["init"]: module_build_service.scheduler.handlers.modules.init, - models.BUILD_STATES["wait"]: module_build_service.scheduler.handlers.modules.wait, + models.BUILD_STATES["init"]: handlers.modules.init, + models.BUILD_STATES["wait"]: handlers.modules.wait, models.BUILD_STATES["build"]: NO_OP, - models.BUILD_STATES["failed"]: module_build_service.scheduler.handlers.modules.failed, - models.BUILD_STATES["done"]: module_build_service.scheduler.handlers.modules.done, + models.BUILD_STATES["failed"]: handlers.modules.failed, + models.BUILD_STATES["done"]: handlers.modules.done, # XXX: DIRECT TRANSITION TO READY models.BUILD_STATES["ready"]: NO_OP, models.BUILD_STATES["garbage"]: NO_OP, } # Only one kind of repo change event, though... - self.on_repo_change = module_build_service.scheduler.handlers.repos.done - self.on_tag_change = module_build_service.scheduler.handlers.tags.tagged - self.on_decision_update = module_build_service.scheduler.handlers.greenwave.decision_update + self.on_repo_change = handlers.repos.done + self.on_tag_change = handlers.tags.tagged + self.on_decision_update = handlers.greenwave.decision_update self.sanity_check() def shutdown(self): @@ -125,8 +121,8 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer): def validate(self, message): if conf.messaging == "fedmsg": # If this is a faked internal message, don't bother. - if isinstance(message, events.BaseMessage): - log.info("Skipping crypto validation for %r" % message) + if "event" in message: + log.info("Skipping crypto validation for %r", message) return # Otherwise, if it is a real message from the network, pass it # through crypto validation. @@ -140,14 +136,18 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer): # messages, then just use them as-is. If they are not already # instances of our message abstraction base class, then first transform # them before proceeding. - if isinstance(message, events.BaseMessage): - msg = message + if "event" in message: + event_info = message else: - msg = self.get_abstracted_msg(message) + try: + event_info = self.get_abstracted_event_info(message) + except IgnoreMessage as e: + log.warning(str(e)) + return # Primary work is done here. try: - self.process_message(msg) + self.process_message(event_info) monitor.messaging_rx_processed_ok_counter.inc() except sqlalchemy.exc.OperationalError as error: monitor.messaging_rx_failed_counter.inc() @@ -165,12 +165,13 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer): if self.stop_condition and self.stop_condition(message): self.shutdown() - def get_abstracted_msg(self, message): + @staticmethod + def get_abstracted_event_info(message): parser = default_messaging_backend.get("parser") if parser: try: return parser.parse(message) - except events.IgnoreMessage: + except IgnoreMessage: pass else: raise ValueError("{0} backend does not define a message parser".format(conf.messaging)) @@ -185,77 +186,80 @@ class MBSConsumer(fedmsg.consumers.FedmsgConsumer): if koji.BUILD_STATES[state] not in self.on_build_change: raise KeyError("Koji build states %r not handled." % state) - all_fns = list(self.on_build_change.items()) + list(self.on_module_change.items()) - for key, callback in all_fns: - expected = ["msg"] - if six.PY2: - argspec = inspect.getargspec(callback)[0] - else: - argspec = inspect.getfullargspec(callback)[0] - if argspec != expected: - raise ValueError( - "Callback %r, state %r has argspec %r!=%r" % (callback, key, argspec, expected)) - - def _map_message(self, db_session, msg): + def _map_message(self, db_session, event_info): """Map message to its corresponding event handler and module build""" - if isinstance(msg, events.KojiBuildChange): - handler = self.on_build_change[msg.build_new_state] - build = models.ComponentBuild.from_component_event(db_session, msg) + event = event_info["event"] + + if event == events.KOJI_BUILD_CHANGE: + handler = self.on_build_change[event_info["build_new_state"]] + build = models.ComponentBuild.from_component_event( + db_session, event_info["task_id"], event_info["module_build_id"]) if build: build = build.module_build return handler, build - if isinstance(msg, events.KojiRepoChange): + if event == events.KOJI_REPO_CHANGE: return ( self.on_repo_change, - models.ModuleBuild.from_repo_done_event(db_session, msg) + models.ModuleBuild.get_by_tag(db_session, event_info["repo_tag"]) ) - if isinstance(msg, events.KojiTagChange): + if event == events.KOJI_TAG_CHANGE: return ( self.on_tag_change, - models.ModuleBuild.from_tag_change_event(db_session, msg) + models.ModuleBuild.get_by_tag(db_session, event_info["tag_name"]) ) - if isinstance(msg, events.MBSModule): + if event == events.MBS_MODULE_STATE_CHANGE: + state = event_info["module_build_state"] + valid_module_build_states = list(models.BUILD_STATES.values()) + if state not in valid_module_build_states: + raise ValueError("state={}({}) is not in {}.".format( + state, type(state), valid_module_build_states + )) return ( - self.on_module_change[module_build_state_from_msg(msg)], - models.ModuleBuild.from_module_event(db_session, msg) + self.on_module_change[state], + models.ModuleBuild.get_by_id( + db_session, event_info["module_build_id"]) ) - if isinstance(msg, events.GreenwaveDecisionUpdate): + if event == events.GREENWAVE_DECISION_UPDATE: return ( self.on_decision_update, - greenwave.get_corresponding_module_build(msg.subject_identifier) + greenwave.get_corresponding_module_build(event_info["subject_identifier"]) ) return None, None - def process_message(self, msg): + def process_message(self, event_info): # Choose a handler for this message - handler, build = self._map_message(db_session, msg) + handler, build = self._map_message(db_session, event_info) if handler is None: - log.debug("No event handler associated with msg %s", msg.msg_id) + log.debug("No event handler associated with msg %s", event_info["msg_id"]) return - idx = "%s: %s, %s" % (handler.__name__, type(msg).__name__, msg.msg_id) + idx = "%s: %s, %s" % ( + handler.__name__, event_info["event"], event_info["msg_id"]) if handler is self.NO_OP: log.debug("Handler is NO_OP: %s", idx) return if not build: - log.debug("No module associated with msg %s", msg.msg_id) + log.debug("No module associated with msg %s", event_info["msg_id"]) return MBSConsumer.current_module_build_id = build.id log.info("Calling %s", idx) + kwargs = event_info.copy() + kwargs.pop("event") + try: - further_work = handler(msg) or [] + further_work = handler(**kwargs) or [] except Exception as e: log.exception("Could not process message handler.") db_session.rollback() @@ -306,6 +310,9 @@ def work_queue_put(msg): def fake_repo_done_message(tag_name): - msg = events.KojiRepoChange( - msg_id="a faked internal message", repo_tag=tag_name + "-build") - work_queue_put(msg) + event_info = { + "msg_id": "a faked internal message", + "event": events.KOJI_REPO_CHANGE, + "repo_tag": tag_name + "-build" + } + work_queue_put(event_info) diff --git a/module_build_service/scheduler/events.py b/module_build_service/scheduler/events.py index 70501f67..2a1ee401 100644 --- a/module_build_service/scheduler/events.py +++ b/module_build_service/scheduler/events.py @@ -1,151 +1,19 @@ # -*- coding: utf-8 -*- # SPDX-License-Identifier: MIT -try: - from inspect import signature -except ImportError: - from funcsigs import signature +""" +This module defines constant for events emitted by external services that work +with MBS together to complete a module build. +The event name is defined in general as much as possible, especially for the +events from Koji. Because some instance based on Koji, like Brew, might send +messages to different topics on different message bus. For example, when a +build is complete, Koji sends a message to topic buildsys.build.state.change, +however Brew sends to topic brew.build.complete, etc. +""" -class IgnoreMessage(Exception): - pass - - -class BaseMessage(object): - def __init__(self, msg_id): - """ - A base class to abstract messages from different backends - :param msg_id: the id of the msg (e.g. 2016-SomeGUID) - """ - self.msg_id = msg_id - - # Moksha calls `consumer.validate` on messages that it receives, and - # even though we have validation turned off in the config there's still - # a step that tries to access `msg['body']`, `msg['topic']` and - # `msg.get('topic')`. - # These are here just so that the `validate` method won't raise an - # exception when we push our fake messages through. - # Note that, our fake message pushing has worked for a while... but the - # *latest* version of fedmsg has some code that exercises the bug. I - # didn't hit this until I went to test in jenkins. - self.body = {} - self.topic = None - - def __repr__(self): - init_sig = signature(self.__init__) - - args_strs = ( - "{}={!r}".format(name, getattr(self, name)) - if param.default != param.empty - else repr(getattr(self, name)) - for name, param in init_sig.parameters.items() - ) - - return "{}({})".format(type(self).__name__, ", ".join(args_strs)) - - def __getitem__(self, key): - """ Used to trick moksha into thinking we are a dict. """ - return getattr(self, key) - - def __setitem__(self, key, value): - """ Used to trick moksha into thinking we are a dict. """ - return setattr(self, key, value) - - def get(self, key, value=None): - """ Used to trick moksha into thinking we are a dict. """ - return getattr(self, key, value) - - def __json__(self): - return dict(msg_id=self.msg_id, topic=self.topic, body=self.body) - - -class KojiBuildChange(BaseMessage): - """ A class that inherits from BaseMessage to provide a message - object for a build's info (in fedmsg this replaces the msg dictionary) - :param msg_id: the id of the msg (e.g. 2016-SomeGUID) - :param build_id: the id of the build (e.g. 264382) - :param build_new_state: the new build state, this is currently a Koji - integer - :param build_name: the name of what is being built - (e.g. golang-googlecode-tools) - :param build_version: the version of the build (e.g. 6.06.06) - :param build_release: the release of the build (e.g. 4.fc25) - :param module_build_id: the optional id of the module_build in the database - :param state_reason: the optional reason as to why the state changed - """ - - def __init__( - self, - msg_id, - build_id, - task_id, - build_new_state, - build_name, - build_version, - build_release, - module_build_id=None, - state_reason=None, - ): - if task_id is None: - raise IgnoreMessage("KojiBuildChange with a null task_id is invalid.") - super(KojiBuildChange, self).__init__(msg_id) - self.build_id = build_id - self.task_id = task_id - self.build_new_state = build_new_state - self.build_name = build_name - self.build_version = build_version - self.build_release = build_release - self.module_build_id = module_build_id - self.state_reason = state_reason - - -class KojiTagChange(BaseMessage): - """ - A class that inherits from BaseMessage to provide a message - object for a buildsys.tag info (in fedmsg this replaces the msg dictionary) - :param tag: the name of tag (e.g. module-123456789-build) - :param artifact: the name of tagged artifact (e.g. module-build-macros) - :param nvr: the nvr of the tagged artifact - """ - - def __init__(self, msg_id, tag, artifact, nvr): - super(KojiTagChange, self).__init__(msg_id) - self.tag = tag - self.artifact = artifact - self.nvr = nvr - - -class KojiRepoChange(BaseMessage): - """ A class that inherits from BaseMessage to provide a message - object for a repo's info (in fedmsg this replaces the msg dictionary) - :param msg_id: the id of the msg (e.g. 2016-SomeGUID) - :param repo_tag: the repo's tag (e.g. SHADOWBUILD-f25-build) - """ - - def __init__(self, msg_id, repo_tag): - super(KojiRepoChange, self).__init__(msg_id) - self.repo_tag = repo_tag - - -class MBSModule(BaseMessage): - """ A class that inherits from BaseMessage to provide a message - object for a module event generated by module_build_service - :param msg_id: the id of the msg (e.g. 2016-SomeGUID) - :param module_build_id: the id of the module build - :param module_build_state: the state of the module build - """ - - def __init__(self, msg_id, module_build_id, module_build_state): - super(MBSModule, self).__init__(msg_id) - self.module_build_id = module_build_id - self.module_build_state = module_build_state - - -class GreenwaveDecisionUpdate(BaseMessage): - """A class representing message send to topic greenwave.decision.update""" - - def __init__(self, msg_id, decision_context, policies_satisfied, subject_identifier): - super(GreenwaveDecisionUpdate, self).__init__(msg_id) - self.decision_context = decision_context - self.policies_satisfied = policies_satisfied - self.subject_identifier = subject_identifier +KOJI_BUILD_CHANGE = "koji_build_change" +KOJI_TAG_CHANGE = "koji_tag_change" +KOJI_REPO_CHANGE = "koji_repo_change" +MBS_MODULE_STATE_CHANGE = "mbs_module_state_change" +GREENWAVE_DECISION_UPDATE = "greenwave_decision_update" diff --git a/module_build_service/scheduler/handlers/components.py b/module_build_service/scheduler/handlers/components.py index a70e6498..1b14af2d 100644 --- a/module_build_service/scheduler/handlers/components.py +++ b/module_build_service/scheduler/handlers/components.py @@ -4,42 +4,67 @@ import logging import koji -import module_build_service.builder from module_build_service import conf, models, log +from module_build_service.builder import GenericBuilder from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder -from module_build_service.scheduler import events from module_build_service.utils.general import mmd_to_str from module_build_service.db_session import db_session +from module_build_service.scheduler import events +from module_build_service.utils.batches import continue_batch_build logging.basicConfig(level=logging.DEBUG) -def _finalize(msg, state): - """ Called whenever a koji build completes or fails. """ +def build_task_finalize( + msg_id, build_id, task_id, build_new_state, + build_name, build_version, build_release, + module_build_id=None, state_reason=None +): + """Called when corresponding Koji build task of a component build finishes + + When a task finishes, the task could be in state COMPLETE, FAILED or CANCELED. + + :param str msg_id: the original id of the message being handled which is + received from the message bus. + :param int build_id: the Koji build id. + :param int task_id: the Koji build task id. + :param int build_new_state: the state of the build. Refer to + ``koji.BUILD_STATES`` for details. For this handler, values could be + the corresponding integer value of COMPLETE, FAILED or CANCELED. + :param str build_name: the build name. + :param str build_version: the build version. + :param str build_release: the build release. + :param int module_build_id: optionally set when this event handler is + scheduled from internal rather than just handling the received message. + When set, the value should be the id of module build having the + component build just built by the finished task. + :param str state_reason: optional. When set a reason explicitly, the + corresponding component build will have this reason as the + ``state_reason``. Otherwise, a custom reason will be set for a failed + build. + """ # First, find our ModuleBuild associated with this component, if any. - component_build = models.ComponentBuild.from_component_event(db_session, msg) - try: - nvr = "{}-{}-{}".format(msg.build_name, msg.build_version, msg.build_release) - except KeyError: - nvr = None + component_build = models.ComponentBuild.from_component_event( + db_session, task_id, module_id=module_build_id) + nvr = "{}-{}-{}".format(build_name, build_version, build_release) if not component_build: - log.debug("We have no record of %s" % nvr) + log.debug("We have no record of %s", nvr) return - log.info("Saw relevant component build of %r from %r." % (nvr, msg.msg_id)) + log.info("Saw relevant component build of %r from %r.", nvr, msg_id) - if msg.state_reason: - state_reason = msg.state_reason - elif state != koji.BUILD_STATES["COMPLETE"]: - state_reason = "Failed to build artifact %s in Koji" % (msg.build_name) + if state_reason: + state_reason = state_reason + elif build_new_state != koji.BUILD_STATES["COMPLETE"]: + state_reason = "Failed to build artifact {} in Koji".format(build_name) else: state_reason = "" # Mark the state in the db. - component_build.state = state + component_build.state = build_new_state component_build.nvr = nvr component_build.state_reason = state_reason db_session.commit() @@ -47,7 +72,8 @@ def _finalize(msg, state): parent = component_build.module_build # If the macro build failed, then the module is doomed. - if component_build.package == "module-build-macros" and state != koji.BUILD_STATES["COMPLETE"]: + if (component_build.package == "module-build-macros" + and build_new_state != koji.BUILD_STATES["COMPLETE"]): parent.transition( db_session, conf, @@ -61,7 +87,7 @@ def _finalize(msg, state): if ( component_build.buildonly and conf.system in ["koji", "test"] - and state == koji.BUILD_STATES["COMPLETE"] + and build_new_state == koji.BUILD_STATES["COMPLETE"] ): koji_session = KojiModuleBuilder.get_session(conf) rpms = koji_session.listBuildRPMs(component_build.nvr) @@ -85,9 +111,7 @@ def _finalize(msg, state): failed_components_in_batch = [c for c in parent_current_batch if c.is_unsuccessful] built_components_in_batch = [c for c in parent_current_batch if c.is_completed] - builder = module_build_service.builder.GenericBuilder.create_from_module( - db_session, parent, conf - ) + builder = GenericBuilder.create_from_module(db_session, parent, conf) if failed_components_in_batch: log.info( @@ -110,10 +134,11 @@ def _finalize(msg, state): # The repository won't be regenerated in this case and therefore we generate fake repo # change message here. log.info("Batch done. No component to tag") - further_work += [ - events.KojiRepoChange( - "components::_finalize: fake msg", builder.module_build_tag["name"]) - ] + further_work += [{ + "msg_id": "components::_finalize: fake msg", + "event": events.KOJI_REPO_CHANGE, + "repo_tag": builder.module_build_tag["name"], + }] else: built_component_nvrs_in_batch = [c.nvr for c in built_components_in_batch] # tag && add to srpm-build group if neccessary @@ -148,21 +173,7 @@ def _finalize(msg, state): # done in repos.py:done(...), but because we have just finished one # build, try to call continue_batch_build again so in case we hit the # threshold previously, we will submit another build from this batch. - builder = module_build_service.builder.GenericBuilder.create_from_module( - db_session, parent, conf) - further_work += module_build_service.utils.continue_batch_build( - conf, parent, builder) + builder = GenericBuilder.create_from_module(db_session, parent, conf) + further_work += continue_batch_build(conf, parent, builder) return further_work - - -def complete(msg): - return _finalize(msg, state=koji.BUILD_STATES["COMPLETE"]) - - -def failed(msg): - return _finalize(msg, state=koji.BUILD_STATES["FAILED"]) - - -def canceled(msg): - return _finalize(msg, state=koji.BUILD_STATES["CANCELED"]) diff --git a/module_build_service/scheduler/handlers/greenwave.py b/module_build_service/scheduler/handlers/greenwave.py index cabe2aad..3ed5be9f 100644 --- a/module_build_service/scheduler/handlers/greenwave.py +++ b/module_build_service/scheduler/handlers/greenwave.py @@ -31,36 +31,38 @@ def get_corresponding_module_build(nvr): return ModuleBuild.get_by_id(db_session, module_build_id) -def decision_update(msg): +def decision_update(msg_id, decision_context, subject_identifier, policies_satisfied): """Move module build to ready or failed according to Greenwave result - :param config: the config object returned from function :func:`init_config`, - which is loaded from configuration file. - :type config: :class:`Config` - :param msg: the message object representing a message received from topic - ``greenwave.decision.update``. - :type msg: :class:`GreenwaveDecisionUpdate` + :param str msg_id: the original id of the message being handled which is + received from the message bus. + :param str decision_context: the context of the greewave decision. Refer to + the messaging document for detailed information. + :param str subject_identifier: usually a build NVR. Refer to + https://docs.pagure.org/greenwave/messaging.html for detailed information. + :param bool policies_satisfied: whether the build satisfies Greenwave rules. + Refer to the messaging document for detailed information. """ if not conf.greenwave_decision_context: log.debug( "Skip Greenwave message %s as MBS does not have GREENWAVE_DECISION_CONTEXT " "configured", - msg.msg_id, + msg_id, ) return - if msg.decision_context != conf.greenwave_decision_context: + if decision_context != conf.greenwave_decision_context: log.debug( "Skip Greenwave message %s as MBS only handles messages with the " 'decision context "%s"', - msg.msg_id, + msg_id, conf.greenwave_decision_context, ) return - module_build_nvr = msg.subject_identifier + module_build_nvr = subject_identifier - if not msg.policies_satisfied: + if not policies_satisfied: log.debug( "Skip to handle module build %s because it has not satisfied Greenwave policies.", module_build_nvr, @@ -87,8 +89,7 @@ def decision_update(msg): log.warning( "Module build %s is not in done state but Greenwave tells " "it passes tests in decision context %s", - module_build_nvr, - msg.decision_context, + module_build_nvr, decision_context, ) db_session.commit() diff --git a/module_build_service/scheduler/handlers/modules.py b/module_build_service/scheduler/handlers/modules.py index 887c77af..4cabfbe4 100644 --- a/module_build_service/scheduler/handlers/modules.py +++ b/module_build_service/scheduler/handlers/modules.py @@ -3,7 +3,6 @@ """ Handlers for module change events on the message bus. """ from module_build_service import conf, models, log, build_logs -import module_build_service.builder import module_build_service.resolver import module_build_service.utils from module_build_service.utils import ( @@ -15,12 +14,13 @@ from module_build_service.utils import ( record_module_build_arches ) from module_build_service.db_session import db_session +from module_build_service.builder import GenericBuilder from module_build_service.errors import UnprocessableEntity, Forbidden, ValidationError -from module_build_service.scheduler import events from module_build_service.utils.greenwave import greenwave from module_build_service.scheduler.default_modules import ( add_default_modules, handle_collisions_with_base_module_rpms) from module_build_service.utils.submit import format_mmd +from module_build_service.scheduler import events from module_build_service.utils.ursine import handle_stream_collision_modules from requests.exceptions import ConnectionError @@ -40,27 +40,29 @@ def get_artifact_from_srpm(srpm_path): return os.path.basename(srpm_path).replace(".src.rpm", "") -def failed(msg): - """ - Called whenever a module enters the 'failed' state. +def failed(msg_id, module_build_id, module_build_state): + """Called whenever a module enters the 'failed' state. We cancel all the remaining component builds of a module and stop the building. + + :param str msg_id: the original id of the message being handled, which is + received from the message bus. + :param int module_build_id: the module build id. + :param int module_build_state: the module build state. """ + build = models.ModuleBuild.get_by_id(db_session, module_build_id) - build = models.ModuleBuild.from_module_event(db_session, msg) - - if build.state != msg.module_build_state: + if build.state != module_build_state: log.warning( "Note that retrieved module state %r doesn't match message module state %r", - build.state, msg.module_build_state, + build.state, module_build_state, ) # This is ok.. it's a race condition we can ignore. pass if build.koji_tag: - builder = module_build_service.builder.GenericBuilder.create_from_module( - db_session, build, conf) + builder = GenericBuilder.create_from_module(db_session, build, conf) if build.new_repo_task_id: builder.cancel_build(build.new_repo_task_id) @@ -94,22 +96,27 @@ def failed(msg): db_session.commit() build_logs.stop(build) - module_build_service.builder.GenericBuilder.clear_cache(build) + GenericBuilder.clear_cache(build) -def done(msg): +def done(msg_id, module_build_id, module_build_state): """Called whenever a module enters the 'done' state. We currently don't do anything useful, so moving to ready. Except for scratch module builds, which remain in the done state. Otherwise the done -> ready state should happen when all dependent modules were re-built, at least that's the current plan. + + :param str msg_id: the original id of the message being handled, which is + received from the message bus. + :param int module_build_id: the module build id. + :param int module_build_state: the module build state. """ - build = models.ModuleBuild.from_module_event(db_session, msg) - if build.state != msg.module_build_state: + build = models.ModuleBuild.get_by_id(db_session, module_build_id) + if build.state != module_build_state: log.warning( "Note that retrieved module state %r doesn't match message module state %r", - build.state, msg.module_build_state, + build.state, module_build_state, ) # This is ok.. it's a race condition we can ignore. pass @@ -126,15 +133,21 @@ def done(msg): db_session.commit() build_logs.stop(build) - module_build_service.builder.GenericBuilder.clear_cache(build) + GenericBuilder.clear_cache(build) -def init(msg): - """ Called whenever a module enters the 'init' state.""" +def init(msg_id, module_build_id, module_build_state): + """Called whenever a module enters the 'init' state. + + :param str msg_id: the original id of the message being handled, which is + received from message bus. + :param int module_build_id: the module build id. + :param int module_build_state: the module build state. + """ # Sleep for a few seconds to make sure the module in the database is committed # TODO: Remove this once messaging is implemented in SQLAlchemy hooks for i in range(3): - build = models.ModuleBuild.from_module_event(db_session, msg) + build = models.ModuleBuild.get_by_id(db_session, module_build_id) if build: break time.sleep(1) @@ -297,7 +310,7 @@ def get_content_generator_build_koji_tag(module_deps): return conf.koji_cg_default_build_tag -def wait(msg): +def wait(msg_id, module_build_id, module_build_state): """ Called whenever a module enters the 'wait' state. We transition to this state shortly after a modulebuild is first requested. @@ -305,6 +318,11 @@ def wait(msg): All we do here is request preparation of the buildroot. The kicking off of individual component builds is handled elsewhere, in module_build_service.schedulers.handlers.repos. + + :param str msg_id: the original id of the message being handled which is + received from the message bus. + :param int module_build_id: the module build id. + :param int module_build_state: the module build state. """ # Wait for the db on the frontend to catch up to the message, otherwise the @@ -312,7 +330,7 @@ def wait(msg): # See https://pagure.io/fm-orchestrator/issue/386 @module_build_service.utils.retry(interval=10, timeout=120, wait_on=RuntimeError) def _get_build_containing_xmd_for_mbs(): - build = models.ModuleBuild.from_module_event(db_session, msg) + build = models.ModuleBuild.get_by_id(db_session, module_build_id) if "mbs" in build.mmd().get_xmd(): return build db_session.expire(build) @@ -323,10 +341,10 @@ def wait(msg): log.info("Found build=%r from message" % build) log.debug("%r", build.modulemd) - if build.state != msg.module_build_state: + if build.state != module_build_state: log.warning( "Note that retrieved module state %r doesn't match message module state %r", - build.state, msg.module_build_state, + build.state, module_build_state, ) # This is ok.. it's a race condition we can ignore. pass @@ -365,8 +383,7 @@ def wait(msg): "It is disabled to tag module build during importing into Koji by Content Generator.") log.debug("Skip to assign Content Generator build koji tag to module build.") - builder = module_build_service.builder.GenericBuilder.create_from_module( - db_session, build, conf) + builder = GenericBuilder.create_from_module(db_session, build, conf) log.debug( "Adding dependencies %s into buildroot for module %s:%s:%s", @@ -381,10 +398,11 @@ def wait(msg): db_session.commit() # Return a KojiRepoChange message so that the build can be transitioned to done # in the repos handler - return [ - events.KojiRepoChange( - "handlers.modules.wait: fake msg", builder.module_build_tag["name"]) - ] + return [{ + "msg_id": "handlers.modules.wait: fake msg", + "event": events.KOJI_REPO_CHANGE, + "repo_tag": builder.module_build_tag["name"], + }] # If all components in module build will be reused, we don't have to build # module-build-macros, because there won't be any build done. @@ -457,8 +475,9 @@ def wait(msg): build.new_repo_task_id = task_id db_session.commit() else: - further_work.append( - events.KojiRepoChange( - "fake msg", builder.module_build_tag["name"]) - ) + further_work.append({ + "msg_id": "fake msg", + "event": events.KOJI_REPO_CHANGE, + "repo_tag": builder.module_build_tag["name"], + }) return further_work diff --git a/module_build_service/scheduler/handlers/repos.py b/module_build_service/scheduler/handlers/repos.py index 96d927fb..c880874b 100644 --- a/module_build_service/scheduler/handlers/repos.py +++ b/module_build_service/scheduler/handlers/repos.py @@ -2,26 +2,30 @@ # SPDX-License-Identifier: MIT """ Handlers for repo change events on the message bus. """ -import module_build_service.builder import logging from datetime import datetime from module_build_service import conf, models, log +from module_build_service.builder import GenericBuilder from module_build_service.utils import start_next_batch_build from module_build_service.db_session import db_session logging.basicConfig(level=logging.DEBUG) -def done(msg): - """ Called whenever koji rebuilds a repo, any repo. """ +def done(msg_id, repo_tag): + """Called whenever koji rebuilds a repo, any repo. + + :param str msg_id: the original id of the message being handled which is + received from the message bus. + :param str repo_tag: the tag name from which the repo is generated. + """ # First, find our ModuleBuild associated with this repo, if any. - tag = msg.repo_tag - if conf.system in ("koji", "test") and not tag.endswith("-build"): - log.debug("Tag %r does not end with '-build' suffix, ignoring" % tag) + if conf.system in ("koji", "test") and not repo_tag.endswith("-build"): + log.debug("Tag %r does not end with '-build' suffix, ignoring", repo_tag) return - tag = tag[:-6] if tag.endswith("-build") else tag - module_build = models.ModuleBuild.from_repo_done_event(db_session, msg) + tag = repo_tag[:-6] if repo_tag.endswith("-build") else repo_tag + module_build = models.ModuleBuild.get_by_tag(db_session, repo_tag) if not module_build: log.debug("No module build found associated with koji tag %r" % tag) return @@ -75,10 +79,8 @@ def done(msg): log.warning("Odd! All components in batch failed for %r." % module_build) return - groups = module_build_service.builder.GenericBuilder.default_buildroot_groups( - db_session, module_build) - - builder = module_build_service.builder.GenericBuilder.create( + groups = GenericBuilder.default_buildroot_groups(db_session, module_build) + builder = GenericBuilder.create( db_session, module_build.owner, module_build, diff --git a/module_build_service/scheduler/handlers/tags.py b/module_build_service/scheduler/handlers/tags.py index 81f6b138..5e3c363c 100644 --- a/module_build_service/scheduler/handlers/tags.py +++ b/module_build_service/scheduler/handlers/tags.py @@ -2,38 +2,45 @@ # SPDX-License-Identifier: MIT """ Handlers for repo change events on the message bus. """ -import module_build_service.builder import logging import koji from module_build_service import conf, models, log from module_build_service.db_session import db_session +from module_build_service.builder import GenericBuilder from module_build_service.scheduler import events logging.basicConfig(level=logging.DEBUG) -def tagged(msg): - """ Called whenever koji tags a build to tag. """ +def tagged(msg_id, tag_name, build_name, build_nvr): + """Called whenever koji tags a build to tag. + + :param str msg_id: the original id of the message being handled which is + received from the message bus. + :param str tag_name: the tag name applied. + :param str build_name: name of the tagged build. + :param str build_nvr: nvr of the tagged build. + """ if conf.system not in ("koji", "test"): return [] # Find our ModuleBuild associated with this tagged artifact. - tag = msg.tag - module_build = models.ModuleBuild.from_tag_change_event(db_session, msg) + module_build = models.ModuleBuild.get_by_tag(db_session, tag_name) if not module_build: - log.debug("No module build found associated with koji tag %r" % tag) + log.debug("No module build found associated with koji tag %r", tag_name) return # Find tagged component. - component = models.ComponentBuild.from_component_nvr(db_session, msg.nvr, module_build.id) + component = models.ComponentBuild.from_component_nvr( + db_session, build_nvr, module_build.id) if not component: - log.error("No component %s in module %r", msg.nvr, module_build) + log.error("No component %s in module %r", build_nvr, module_build) return - log.info("Saw relevant component tag of %r from %r." % (component.nvr, msg.msg_id)) + log.info("Saw relevant component tag of %r from %r.", component.nvr, msg_id) # Mark the component as tagged - if tag.endswith("-build"): + if tag_name.endswith("-build"): component.tagged = True else: component.tagged_in_final = True @@ -42,7 +49,7 @@ def tagged(msg): if any(c.is_unbuilt for c in module_build.current_batch()): log.info( "Not regenerating repo for tag %s, there are still building components in a batch", - tag, + tag_name, ) return [] @@ -50,7 +57,7 @@ def tagged(msg): # If all components are tagged, start newRepo task. if not any(c.is_completed and not c.is_tagged for c in module_build.up_to_current_batch()): - builder = module_build_service.builder.GenericBuilder.create_from_module( + builder = GenericBuilder.create_from_module( db_session, module_build, conf) if any(c.is_unbuilt for c in module_build.component_builds): @@ -70,10 +77,11 @@ def tagged(msg): # would be useless to wait for a repository we will not use anyway. log.info( "All components in module tagged and built, skipping the last repo regeneration") - further_work += [ - events.KojiRepoChange( - "components::_finalize: fake msg", builder.module_build_tag["name"]) - ] + further_work += [{ + "msg_id": "components::_finalize: fake msg", + "event": events.KOJI_REPO_CHANGE, + "repo_tag": builder.module_build_tag["name"], + }] db_session.commit() return further_work diff --git a/module_build_service/scheduler/parser.py b/module_build_service/scheduler/parser.py index 3ed95d24..987ec6d0 100644 --- a/module_build_service/scheduler/parser.py +++ b/module_build_service/scheduler/parser.py @@ -4,6 +4,7 @@ import re from module_build_service import log +from module_build_service.errors import IgnoreMessage from module_build_service.scheduler import events @@ -70,49 +71,60 @@ class FedmsgMessageParser(MessageParser): return if object == "build" and subobject == "state" and event == "change": - build_id = msg_inner_msg.get("build_id") task_id = msg_inner_msg.get("task_id") - build_new_state = msg_inner_msg.get("new") - build_name = msg_inner_msg.get("name") - build_version = msg_inner_msg.get("version") - build_release = msg_inner_msg.get("release") - - return events.KojiBuildChange( - msg_id, - build_id, - task_id, - build_new_state, - build_name, - build_version, - build_release, - ) + if task_id is None: + raise IgnoreMessage( + "Ignore message {}, with has a null task_id.".format(msg_id)) + return { + "msg_id": msg_id, + "event": events.KOJI_BUILD_CHANGE, + "build_id": msg_inner_msg.get("build_id"), + "task_id": task_id, + "build_new_state": msg_inner_msg.get("new"), + "build_name": msg_inner_msg.get("name"), + "build_version": msg_inner_msg.get("version"), + "build_release": msg_inner_msg.get("release"), + "module_build_id": None, + "state_reason": None, + } if object == "repo" and subobject is None and event == "done": - repo_tag = msg_inner_msg.get("tag") - return events.KojiRepoChange(msg_id, repo_tag) + return { + "msg_id": msg_id, + "event": events.KOJI_REPO_CHANGE, + "repo_tag": msg_inner_msg.get("tag") + } if event == "tag": - tag = msg_inner_msg.get("tag") name = msg_inner_msg.get("name") version = msg_inner_msg.get("version") release = msg_inner_msg.get("release") nvr = None if name and version and release: nvr = "-".join((name, version, release)) - return events.KojiTagChange(msg_id, tag, name, nvr) + return { + "msg_id": msg_id, + "event": events.KOJI_TAG_CHANGE, + "tag_name": msg_inner_msg.get("tag"), + "build_name": msg_inner_msg.get("name"), + "build_nvr": nvr, + } if (category == "mbs" and object == "module" and subobject == "state" and event == "change"): - return events.MBSModule( - msg_id, - msg_inner_msg.get("id"), - msg_inner_msg.get("state")) + return { + "msg_id": msg_id, + "event": events.MBS_MODULE_STATE_CHANGE, + "module_build_id": msg_inner_msg.get("id"), + "module_build_state": msg_inner_msg.get("state"), + } if (category == "greenwave" and object == "decision" and subobject is None and event == "update"): - return events.GreenwaveDecisionUpdate( - msg_id=msg_id, - decision_context=msg_inner_msg.get("decision_context"), - policies_satisfied=msg_inner_msg.get("policies_satisfied"), - subject_identifier=msg_inner_msg.get("subject_identifier"), - ) + return { + "msg_id": msg_id, + "event": events.GREENWAVE_DECISION_UPDATE, + "decision_context": msg_inner_msg.get("decision_context"), + "policies_satisfied": msg_inner_msg.get("policies_satisfied"), + "subject_identifier": msg_inner_msg.get("subject_identifier"), + } diff --git a/module_build_service/scheduler/producer.py b/module_build_service/scheduler/producer.py index 0f8d190d..ae0a29d0 100644 --- a/module_build_service/scheduler/producer.py +++ b/module_build_service/scheduler/producer.py @@ -16,9 +16,9 @@ import module_build_service.scheduler.consumer from module_build_service import conf, models, log from module_build_service.builder import GenericBuilder from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder -from module_build_service.scheduler import events from module_build_service.utils.greenwave import greenwave from module_build_service.db_session import db_session +from module_build_service.scheduler import events class MBSProducer(PollingProducer): @@ -110,15 +110,18 @@ class MBSProducer(PollingProducer): log.info(" task {0!r} is in state {1!r}".format(task_id, task_info["state"])) if task_info["state"] in state_mapping: # Fake a fedmsg message on our internal queue - msg = events.KojiBuildChange( - msg_id="producer::fail_lost_builds fake msg", - build_id=component_build.task_id, - task_id=component_build.task_id, - build_name=component_build.package, - build_new_state=state_mapping[task_info["state"]], - build_release=build_release, - build_version=build_version, - ) + msg = { + "msg_id": "producer::fail_lost_builds fake msg", + "event": events.KOJI_BUILD_CHANGE, + "build_id": component_build.task_id, + "task_id": component_build.task_id, + "build_new_state": state_mapping[task_info["state"]], + "build_name": component_build.package, + "build_release": build_release, + "build_version": build_version, + "module_build_id": None, + "state_reason": None + } module_build_service.scheduler.consumer.work_queue_put(msg) elif conf.system == "mock": @@ -215,9 +218,13 @@ class MBSProducer(PollingProducer): # Fake a message to kickstart the build anew in the consumer state = module_build_service.models.BUILD_STATES[state_name] - msg = events.MBSModule( - "nudge_module_builds_fake_message", build.id, state) - log.info(" Scheduling faked event %r" % msg) + msg = { + "msg_id": "nudge_module_builds_fake_message", + "event": events.MBS_MODULE_STATE_CHANGE, + "module_build_id": build.id, + "module_build_state": state, + } + log.info(" Scheduling faked event %r", msg) module_build_service.scheduler.consumer.work_queue_put(msg) db_session.commit() @@ -437,19 +444,28 @@ class MBSProducer(PollingProducer): # If it is tagged in final tag, but MBS does not think so, # schedule fake message. if not c.tagged_in_final and module_build.koji_tag in tags: - msg = events.KojiTagChange( - "sync_koji_build_tags_fake_message", module_build.koji_tag, c.package, c.nvr - ) - log.info(" Scheduling faked event %r" % msg) + msg = { + "msg_id": "sync_koji_build_tags_fake_message", + "event": events.KOJI_TAG_CHANGE, + "tag_name": module_build.koji_tag, + "build_name": c.package, + "build_nvr": c.nvr, + } + log.info(" Scheduling faked event %r", msg) module_build_service.scheduler.consumer.work_queue_put(msg) # If it is tagged in the build tag, but MBS does not think so, # schedule fake message. build_tag = module_build.koji_tag + "-build" if not c.tagged and build_tag in tags: - msg = events.KojiTagChange( - "sync_koji_build_tags_fake_message", build_tag, c.package, c.nvr) - log.info(" Scheduling faked event %r" % msg) + msg = { + "msg_id": "sync_koji_build_tags_fake_message", + "event": events.KOJI_TAG_CHANGE, + "tag_name": build_tag, + "build_name": c.package, + "build_nvr": c.nvr, + } + log.info(" Scheduling faked event %r", msg) module_build_service.scheduler.consumer.work_queue_put(msg) def poll_greenwave(self, config): diff --git a/module_build_service/utils/batches.py b/module_build_service/utils/batches.py index 2e95e2f7..6e343aae 100644 --- a/module_build_service/utils/batches.py +++ b/module_build_service/utils/batches.py @@ -5,7 +5,7 @@ import concurrent.futures from module_build_service import conf, log, models from module_build_service.db_session import db_session -from module_build_service.scheduler.events import KojiRepoChange +from module_build_service.scheduler import events from .reuse import get_reusable_components, reuse_component @@ -274,9 +274,11 @@ def start_next_batch_build(config, module, builder, components=None): # If all the components were reused in the batch then make a KojiRepoChange # message and return if components_reused and not unbuilt_components_after_reuse: - further_work.append( - KojiRepoChange("start_build_batch: fake msg", builder.module_build_tag["name"]) - ) + further_work.append({ + "msg_id": "start_build_batch: fake msg", + "event": events.KOJI_REPO_CHANGE, + "repo_tag": builder.module_build_tag["name"], + }) return further_work return further_work + continue_batch_build( diff --git a/module_build_service/utils/general.py b/module_build_service/utils/general.py index 24c41902..78e02155 100644 --- a/module_build_service/utils/general.py +++ b/module_build_service/utils/general.py @@ -646,7 +646,8 @@ def get_build_arches(mmd, config): :return list of architectures """ # Imported here to allow import of utils in GenericBuilder. - import module_build_service.builder + from module_build_service.builder import GenericBuilder + nsvc = mmd.get_nsvc() # At first, handle BASE_MODULE_ARCHES - this overrides any other option. @@ -698,8 +699,7 @@ def get_build_arches(mmd, config): ) if not module_obj: continue - arches = module_build_service.builder.GenericBuilder.get_module_build_arches( - module_obj) + arches = GenericBuilder.get_module_build_arches(module_obj) if arches: log.info("Setting build arches of %s to %r based on the buildrequired " "module %r." % (nsvc, arches, module_obj)) diff --git a/module_build_service/utils/reuse.py b/module_build_service/utils/reuse.py index 92aed78d..c1cf472a 100644 --- a/module_build_service/utils/reuse.py +++ b/module_build_service/utils/reuse.py @@ -5,7 +5,7 @@ import kobo.rpmlib from module_build_service import log, models, conf from module_build_service.db_session import db_session from module_build_service.resolver import GenericResolver -from module_build_service.scheduler.events import KojiBuildChange +from module_build_service.scheduler import events from module_build_service.utils.mse import get_base_module_mmds @@ -34,29 +34,29 @@ def reuse_component(component, previous_component_build, change_state_now=False) component.state = previous_component_build.state else: # Use BUILDING state here, because we want the state to change to - # COMPLETE by the fake KojiBuildChange message we are generating - # few lines below. If we would set it to the right state right - # here, we would miss the code path handling the KojiBuildChange - # which works only when switching from BUILDING to COMPLETE. + # COMPLETE by scheduling a internal buildsys.build.state.change message + # we are generating few lines below. + # If we would set it to the right state right here, we would miss the + # code path handling that event which works only when switching from + # BUILDING to COMPLETE. component.state = koji.BUILD_STATES["BUILDING"] component.state_reason = "Reused component from previous module build" component.nvr = previous_component_build.nvr nvr_dict = kobo.rpmlib.parse_nvr(component.nvr) # Add this message to further_work so that the reused # component will be tagged properly - return [ - KojiBuildChange( - msg_id="reuse_component: fake msg", - build_id=None, - task_id=component.task_id, - build_new_state=previous_component_build.state, - build_name=nvr_dict["name"], - build_version=nvr_dict["version"], - build_release=nvr_dict["release"], - module_build_id=component.module_id, - state_reason=component.state_reason, - ) - ] + return [{ + "msg_id": "reuse_component: fake msg", + "event": events.KOJI_BUILD_CHANGE, + "build_id": None, + "task_id": component.task_id, + "build_new_state": previous_component_build.state, + "build_name": nvr_dict["name"], + "build_version": nvr_dict["version"], + "build_release": nvr_dict["release"], + "module_build_id": component.module_id, + "state_reason": component.state_reason, + }] def get_reusable_module(module): diff --git a/module_build_service/utils/submit.py b/module_build_service/utils/submit.py index 392c780c..7c279a09 100644 --- a/module_build_service/utils/submit.py +++ b/module_build_service/utils/submit.py @@ -394,7 +394,7 @@ def record_component_builds( mmd, module, initial_batch=1, previous_buildorder=None, main_mmd=None ): # Imported here to allow import of utils in GenericBuilder. - import module_build_service.builder + from module_build_service.builder import GenericBuilder # When main_mmd is set, merge the metadata from this mmd to main_mmd, # otherwise our current mmd is main_mmd. @@ -436,7 +436,7 @@ def record_component_builds( # Get map of packages that have SRPM overrides srpm_overrides = get_module_srpm_overrides(module) - rpm_weights = module_build_service.builder.GenericBuilder.get_build_weights( + rpm_weights = GenericBuilder.get_build_weights( [c.get_name() for c in rpm_components] ) all_components.sort(key=lambda x: x.get_buildorder()) diff --git a/tests/test_build/test_build.py b/tests/test_build/test_build.py index 5dd8b26b..3659490d 100644 --- a/tests/test_build/test_build.py +++ b/tests/test_build/test_build.py @@ -17,6 +17,7 @@ import module_build_service.utils from module_build_service.errors import Forbidden from module_build_service import models, conf, build_logs from module_build_service.db_session import db_session +from module_build_service.scheduler import events from module_build_service.scheduler.local import make_simple_stop_condition from mock import patch, PropertyMock, Mock, MagicMock @@ -27,9 +28,8 @@ import pytest import json import itertools -from module_build_service.builder.base import GenericBuilder +from module_build_service.builder import GenericBuilder from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder -from module_build_service.scheduler import events from tests import ( app, clean_database, read_staged_data, staged_data_filename ) @@ -232,32 +232,43 @@ class FakeModuleBuilder(GenericBuilder): return {"name": self.tag_name + "-build"} def _send_repo_done(self): - msg = events.KojiRepoChange( - msg_id="a faked internal message", repo_tag=self.tag_name + "-build") - module_build_service.scheduler.consumer.work_queue_put(msg) + event_info = { + "msg_id": "a faked internal message", + "event": events.KOJI_REPO_CHANGE, + "repo_tag": self.tag_name + "-build", + } + module_build_service.scheduler.consumer.work_queue_put(event_info) def _send_tag(self, artifact, nvr, dest_tag=True): if dest_tag: tag = self.tag_name else: tag = self.tag_name + "-build" - msg = events.KojiTagChange( - msg_id="a faked internal message", tag=tag, artifact=artifact, nvr=nvr) - module_build_service.scheduler.consumer.work_queue_put(msg) + event_info = { + "msg_id": "a faked internal message", + "event": events.KOJI_TAG_CHANGE, + "tag_name": tag, + "build_name": artifact, + "build_nvr": nvr + } + module_build_service.scheduler.consumer.work_queue_put(event_info) def _send_build_change(self, state, name, build_id): # build_id=1 and task_id=1 are OK here, because we are building just # one RPM at the time. - msg = events.KojiBuildChange( - msg_id="a faked internal message", - build_id=build_id, - task_id=build_id, - build_name=name, - build_new_state=state, - build_release="1", - build_version="1", - ) - module_build_service.scheduler.consumer.work_queue_put(msg) + event_info = { + "msg_id": "a faked internal message", + "event": events.KOJI_BUILD_CHANGE, + "build_id": build_id, + "task_id": build_id, + "build_name": name, + "build_new_state": state, + "build_release": "1", + "build_version": "1", + "module_build_id": None, + "state_reason": None + } + module_build_service.scheduler.consumer.work_queue_put(event_info) def build(self, artifact_name, source): print("Starting building artifact %s: %s" % (artifact_name, source)) @@ -298,27 +309,26 @@ class FakeModuleBuilder(GenericBuilder): component_build.state_reason = "Found existing build" nvr_dict = kobo.rpmlib.parse_nvr(component_build.nvr) # Send a message stating the build is complete - msgs.append( - events.KojiBuildChange( - "recover_orphaned_artifact: fake message", - randint(1, 9999999), - component_build.task_id, - koji.BUILD_STATES["COMPLETE"], - component_build.package, - nvr_dict["version"], - nvr_dict["release"], - component_build.module_build.id, - ) - ) + msgs.append({ + "msg_id": "recover_orphaned_artifact: fake message", + "event": events.KOJI_BUILD_CHANGE, + "build_id": randint(1, 9999999), + "task_id": component_build.task_id, + "build_new_state": koji.BUILD_STATES["COMPLETE"], + "build_name": component_build.package, + "build_version": nvr_dict["version"], + "build_release": nvr_dict["release"], + "module_build_id": component_build.module_build.id, + "state_reason": None + }) # Send a message stating that the build was tagged in the build tag - msgs.append( - events.KojiTagChange( - "recover_orphaned_artifact: fake message", - component_build.module_build.koji_tag + "-build", - component_build.package, - component_build.nvr, - ) - ) + msgs.append({ + "msg_id": "recover_orphaned_artifact: fake message", + "event": events.KOJI_TAG_CHANGE, + "tag_name": component_build.module_build.koji_tag + "-build", + "build_name": component_build.package, + "build_nvr": component_build.nvr, + }) return msgs def finalize(self, succeeded=None): @@ -1092,7 +1102,14 @@ class TestBuild(BaseTestBuild): from module_build_service.db_session import db_session # Create a dedicated database session for scheduler to avoid hang - self.run_scheduler(msgs=[events.MBSModule("local module build", 3, 1)]) + self.run_scheduler( + msgs=[{ + "msg_id": "local module build", + "event": events.MBS_MODULE_STATE_CHANGE, + "module_build_id": 3, + "module_build_state": 1 + }] + ) reused_component_ids = { "module-build-macros": None, @@ -1171,7 +1188,14 @@ class TestBuild(BaseTestBuild): FakeModuleBuilder.on_buildroot_add_artifacts_cb = on_buildroot_add_artifacts_cb - self.run_scheduler(msgs=[events.MBSModule("local module build", 3, 1)]) + self.run_scheduler( + msgs=[{ + "msg_id": "local module build", + "event": events.MBS_MODULE_STATE_CHANGE, + "module_build_id": 3, + "module_build_state": 1 + }] + ) # All components should be built and module itself should be in "done" # or "ready" state. @@ -1741,11 +1765,11 @@ class TestBuild(BaseTestBuild): # Simulate a random repo regen message that MBS didn't expect cleanup_moksha() module = models.ModuleBuild.get_by_id(db_session, module_build_id) - msgs = [ - events.KojiRepoChange( - msg_id="a faked internal message", repo_tag=module.koji_tag + "-build" - ) - ] + events_info = [{ + "msg_id": "a faked internal message", + "event": events.KOJI_REPO_CHANGE, + "repo_tag": module.koji_tag + "-build" + }] db_session.expire_all() # Stop after processing the seeded message @@ -1753,7 +1777,7 @@ class TestBuild(BaseTestBuild): db_session.remove() return True - self.run_scheduler(msgs, stop_condition=stop) + self.run_scheduler(events_info, stop_condition=stop) # Make sure the module build didn't fail so that the poller can resume it later module = models.ModuleBuild.get_by_id(db_session, module_build_id) diff --git a/tests/test_builder/test_koji.py b/tests/test_builder/test_koji.py index 605a0bc8..a7aebafc 100644 --- a/tests/test_builder/test_koji.py +++ b/tests/test_builder/test_koji.py @@ -13,11 +13,11 @@ from collections import OrderedDict import module_build_service.messaging import module_build_service.scheduler.handlers.repos import module_build_service.models -import module_build_service.builder from module_build_service import Modulemd from module_build_service.db_session import db_session -from module_build_service.utils.general import mmd_to_str +from module_build_service.builder import GenericBuilder from module_build_service.scheduler import events +from module_build_service.utils.general import mmd_to_str import pytest from mock import patch, MagicMock @@ -110,7 +110,7 @@ class TestKojiBuilder: """ Test that when a repo msg hits us and we have no match, that we do nothing gracefully. """ - repo = module_build_service.builder.GenericBuilder.tag_to_repo( + repo = GenericBuilder.tag_to_repo( "koji", self.config, "module-base-runtime-0.25-9", "x86_64" ) assert repo == ( @@ -151,20 +151,24 @@ class TestKojiBuilder: db_session.commit() assert len(actual) == 3 - assert type(actual[0]) == events.KojiBuildChange - assert actual[0].build_id == 91 - assert actual[0].task_id == 12345 - assert actual[0].build_new_state == koji.BUILD_STATES["COMPLETE"] - assert actual[0].build_name == "rubygem-rails" - assert actual[0].build_version == "1.0" - assert actual[0].build_release == "1.module+e0095747" - assert actual[0].module_build_id == 4 - assert type(actual[1]) == events.KojiTagChange - assert actual[1].tag == "module-foo-build" - assert actual[1].artifact == "rubygem-rails" - assert type(actual[2]) == events.KojiTagChange - assert actual[2].tag == "module-foo" - assert actual[2].artifact == "rubygem-rails" + + assert actual[0]["event"] == events.KOJI_BUILD_CHANGE + assert actual[0]["build_id"] == 91 + assert actual[0]["task_id"] == 12345 + assert actual[0]["build_new_state"] == koji.BUILD_STATES["COMPLETE"] + assert actual[0]["build_name"] == "rubygem-rails" + assert actual[0]["build_version"] == "1.0" + assert actual[0]["build_release"] == "1.module+e0095747" + assert actual[0]["module_build_id"] == 4 + + assert actual[1]["event"] == events.KOJI_TAG_CHANGE + assert actual[1]["tag_name"] == "module-foo-build" + assert actual[1]["build_name"] == "rubygem-rails" + + assert actual[2]["event"] == events.KOJI_TAG_CHANGE + assert actual[2]["tag_name"] == "module-foo" + assert actual[2]["build_name"] == "rubygem-rails" + assert component_build.state == koji.BUILD_STATES["COMPLETE"] assert component_build.task_id == 12345 assert component_build.state_reason == "Found existing build" @@ -206,14 +210,14 @@ class TestKojiBuilder: db_session.commit() assert len(actual) == 1 - assert type(actual[0]) == events.KojiBuildChange - assert actual[0].build_id == 91 - assert actual[0].task_id == 12345 - assert actual[0].build_new_state == koji.BUILD_STATES["COMPLETE"] - assert actual[0].build_name == "rubygem-rails" - assert actual[0].build_version == "1.0" - assert actual[0].build_release == "1.{0}".format(dist_tag) - assert actual[0].module_build_id == 4 + assert actual[0]["event"] == events.KOJI_BUILD_CHANGE + assert actual[0]["build_id"] == 91 + assert actual[0]["task_id"] == 12345 + assert actual[0]["build_new_state"] == koji.BUILD_STATES["COMPLETE"] + assert actual[0]["build_name"] == "rubygem-rails" + assert actual[0]["build_version"] == "1.0" + assert actual[0]["build_release"] == "1.{0}".format(dist_tag) + assert actual[0]["module_build_id"] == 4 assert component_build.state == koji.BUILD_STATES["COMPLETE"] assert component_build.task_id == 12345 assert component_build.state_reason == "Found existing build" @@ -260,14 +264,14 @@ class TestKojiBuilder: db_session.commit() assert len(actual) == 1 - assert type(actual[0]) == events.KojiBuildChange - assert actual[0].build_id == 91 - assert actual[0].task_id == 12345 - assert actual[0].build_new_state == koji.BUILD_STATES["COMPLETE"] - assert actual[0].build_name == "module-build-macros" - assert actual[0].build_version == "1.0" - assert actual[0].build_release == "1.{0}".format(dist_tag) - assert actual[0].module_build_id == 4 + assert actual[0]["event"] == events.KOJI_BUILD_CHANGE + assert actual[0]["build_id"] == 91 + assert actual[0]["task_id"] == 12345 + assert actual[0]["build_new_state"] == koji.BUILD_STATES["COMPLETE"] + assert actual[0]["build_name"] == "module-build-macros" + assert actual[0]["build_version"] == "1.0" + assert actual[0]["build_release"] == "1.{0}".format(dist_tag) + assert actual[0]["module_build_id"] == 4 assert component_build.state == koji.BUILD_STATES["COMPLETE"] assert component_build.task_id == 12345 assert component_build.state_reason == "Found existing build" diff --git a/tests/test_messaging.py b/tests/test_messaging.py index 3a6f9244..d0159d13 100644 --- a/tests/test_messaging.py +++ b/tests/test_messaging.py @@ -27,10 +27,10 @@ class TestFedmsgMessaging: } parser = FedmsgMessageParser(messaging.known_fedmsg_services) - msg = parser.parse(buildsys_state_change_msg) + event_info = parser.parse(buildsys_state_change_msg) - assert msg.build_id == 614503 - assert msg.build_new_state == 1 + assert event_info["build_id"] == 614503 + assert event_info["build_new_state"] == 1 def test_buildsys_tag(self): # https://fedora-fedmsg.readthedocs.io/en/latest/topics.html#id134 @@ -52,10 +52,10 @@ class TestFedmsgMessaging: } parser = FedmsgMessageParser(messaging.known_fedmsg_services) - msg = parser.parse(buildsys_tag_msg) + event_info = parser.parse(buildsys_tag_msg) - assert msg.tag == "module-debugging-tools-master-20170405115403-build" - assert msg.artifact == "module-build-macros" + assert event_info["tag_name"] == "module-debugging-tools-master-20170405115403-build" + assert event_info["build_name"] == "module-build-macros" def test_buildsys_repo_done(self): # https://fedora-fedmsg.readthedocs.io/en/latest/topics.html#id134 @@ -72,6 +72,6 @@ class TestFedmsgMessaging: } parser = FedmsgMessageParser(messaging.known_fedmsg_services) - msg = parser.parse(buildsys_tag_msg) + event_info = parser.parse(buildsys_tag_msg) - assert msg.repo_tag == "module-f0f7e44f3c6cccab-build" + assert event_info["repo_tag"] == "module-f0f7e44f3c6cccab-build" diff --git a/tests/test_scheduler/test_consumer.py b/tests/test_scheduler/test_consumer.py index 3e608a7f..5a92c884 100644 --- a/tests/test_scheduler/test_consumer.py +++ b/tests/test_scheduler/test_consumer.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- # SPDX-License-Identifier: MIT from mock import patch, MagicMock +from module_build_service.scheduler import events from module_build_service.scheduler.consumer import MBSConsumer -from module_build_service.scheduler.events import KojiTagChange, KojiRepoChange class TestConsumer: @@ -35,11 +35,11 @@ class TestConsumer: "release": "1.el7", }, } - msg_obj = consumer.get_abstracted_msg(msg) - assert isinstance(msg_obj, KojiTagChange) - assert msg_obj.msg_id == msg["msg_id"] - assert msg_obj.tag == msg["msg"]["tag"] - assert msg_obj.artifact == msg["msg"]["name"] + event_info = consumer.get_abstracted_event_info(msg) + assert event_info["event"] == events.KOJI_TAG_CHANGE + assert event_info["msg_id"] == msg["msg_id"] + assert event_info["tag_name"] == msg["msg"]["tag"] + assert event_info["build_name"] == msg["msg"]["name"] @patch("module_build_service.scheduler.consumer.models") @patch.object(MBSConsumer, "process_message") @@ -73,7 +73,7 @@ class TestConsumer: } consumer.consume(msg) assert process_message.call_count == 1 - msg_obj = process_message.call_args[0][0] - assert isinstance(msg_obj, KojiRepoChange) - assert msg_obj.msg_id == msg["body"]["msg_id"] - assert msg_obj.repo_tag == msg["body"]["msg"]["tag"] + event_info = process_message.call_args[0][0] + assert event_info["event"] == events.KOJI_REPO_CHANGE + assert event_info["msg_id"] == msg["body"]["msg_id"] + assert event_info["repo_tag"] == msg["body"]["msg"]["tag"] diff --git a/tests/test_scheduler/test_greenwave.py b/tests/test_scheduler/test_greenwave.py index 887e1682..997e1adb 100644 --- a/tests/test_scheduler/test_greenwave.py +++ b/tests/test_scheduler/test_greenwave.py @@ -74,8 +74,12 @@ class TestDecisionUpdateHandler: @patch("module_build_service.scheduler.handlers.greenwave.log") def test_decision_context_is_not_match(self, log): - msg = Mock(msg_id="msg-id-1", decision_context="bodhi_update_push_testing") - decision_update(msg) + decision_update( + msg_id="msg-id-1", + decision_context="bodhi_update_push_testing", + policies_satisfied=True, + subject_identifier="xxx", + ) log.debug.assert_called_once_with( 'Skip Greenwave message %s as MBS only handles messages with the decision context "%s"', "msg-id-1", @@ -84,16 +88,15 @@ class TestDecisionUpdateHandler: @patch("module_build_service.scheduler.handlers.greenwave.log") def test_not_satisfy_policies(self, log): - msg = Mock( + subject_identifier = "pkg-0.1-1.c1" + decision_update( msg_id="msg-id-1", decision_context="test_dec_context", policies_satisfied=False, - subject_identifier="pkg-0.1-1.c1", - ) - decision_update(msg) + subject_identifier=subject_identifier) log.debug.assert_called_once_with( "Skip to handle module build %s because it has not satisfied Greenwave policies.", - msg.subject_identifier, + subject_identifier, ) @patch("module_build_service.messaging.publish") diff --git a/tests/test_scheduler/test_module_init.py b/tests/test_scheduler/test_module_init.py index 3b807791..18295ee7 100644 --- a/tests/test_scheduler/test_module_init.py +++ b/tests/test_scheduler/test_module_init.py @@ -12,7 +12,7 @@ from module_build_service import build_logs from module_build_service.db_session import db_session from module_build_service.models import ModuleBuild from module_build_service.utils.general import mmd_to_str, load_mmd -from module_build_service.scheduler.events import MBSModule +# from module_build_service.scheduler.events import MBSModule class TestModuleInit: @@ -71,8 +71,7 @@ class TestModuleInit: platform_build.modulemd = mmd_to_str(mmd) db_session.commit() - msg = MBSModule(msg_id=None, module_build_id=2, module_build_state="init") - self.fn(msg=msg) + self.fn(msg_id="msg-id-1", module_build_id=2, module_build_state="init") build = ModuleBuild.get_by_id(db_session, 2) # Make sure the module entered the wait state @@ -112,8 +111,7 @@ class TestModuleInit: get_latest_error=RuntimeError("Failed in mocked_scm_get_latest") ) - msg = MBSModule(msg_id=None, module_build_id=2, module_build_state="init") - self.fn(msg=msg) + self.fn(msg_id="msg-id-1", module_build_id=2, module_build_state="init") build = ModuleBuild.get_by_id(db_session, 2) # Make sure the module entered the failed state @@ -138,8 +136,7 @@ class TestModuleInit: scmurl = "git://pkgs.domain.local/modules/includedmodule?#da95886" ModuleBuild.create( db_session, conf, "includemodule", "1", 3, mmd_to_str(mmd), scmurl, "mprahl") - msg = MBSModule(msg_id=None, module_build_id=3, module_build_state="init") - self.fn(msg=msg) + self.fn(msg_id="msg-id-1", module_build_id=3, module_build_state="init") build = ModuleBuild.get_by_id(db_session, 3) assert build.state == 1 assert build.name == "includemodule" @@ -173,11 +170,11 @@ class TestModuleInit: "7035bd33614972ac66559ac1fdd019ff6027ad22", get_latest_raise=True, ) - msg = MBSModule(msg_id=None, module_build_id=2, module_build_state="init") + build = ModuleBuild.get_by_id(db_session, 2) mocked_from_module_event.return_value = build - self.fn(msg=msg) + self.fn(msg_id="msg-id-1", module_build_id=2, module_build_state="init") # Query the database again to make sure the build object is updated db_session.refresh(build) diff --git a/tests/test_scheduler/test_module_wait.py b/tests/test_scheduler/test_module_wait.py index f7ae61b2..c9cbaf09 100644 --- a/tests/test_scheduler/test_module_wait.py +++ b/tests/test_scheduler/test_module_wait.py @@ -12,7 +12,6 @@ import module_build_service.resolver from module_build_service import build_logs, Modulemd from module_build_service.db_session import db_session from module_build_service.models import ComponentBuild, ModuleBuild -from module_build_service.scheduler.events import MBSModule base_dir = os.path.dirname(os.path.dirname(__file__)) @@ -23,7 +22,6 @@ class TestModuleWait: self.config = conf self.session = mock.Mock() - self.fn = module_build_service.scheduler.handlers.modules.wait def teardown_method(self, test_method): try: @@ -41,12 +39,11 @@ class TestModuleWait: create_builder.return_value = builder module_build_id = db_session.query(ModuleBuild).first().id - msg = MBSModule( - msg_id=None, - module_build_id=module_build_id, - module_build_state="some state") with patch("module_build_service.resolver.GenericResolver.create"): - self.fn(msg=msg) + module_build_service.scheduler.handlers.modules.wait( + msg_id="msg-id-1", + module_build_id=module_build_id, + module_build_state="some state") @patch( "module_build_service.builder.GenericBuilder.default_buildroot_groups", @@ -81,9 +78,10 @@ class TestModuleWait: resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357" generic_resolver.create.return_value = resolver - msg = MBSModule(msg_id=None, module_build_id=2, module_build_state="some state") - module_build_service.scheduler.handlers.modules.wait(msg=msg) + module_build_service.scheduler.handlers.modules.wait( + msg_id="msg-id-1", + module_build_id=2, module_build_state="some state") koji_session.newRepo.assert_called_once_with("module-123-build") @@ -126,9 +124,11 @@ class TestModuleWait: resolver.get_module_tag.return_value = "module-testmodule-master-20170109091357" generic_resolver.create.return_value = resolver - msg = MBSModule(msg_id=None, module_build_id=2, module_build_state="some state") - module_build_service.scheduler.handlers.modules.wait(msg=msg) + module_build_service.scheduler.handlers.modules.wait( + msg_id="msg-id-1", + module_build_id=2, + module_build_state="some state") assert koji_session.newRepo.called @@ -170,9 +170,11 @@ class TestModuleWait: } generic_resolver.create.return_value = resolver - msg = MBSModule(msg_id=None, module_build_id=2, module_build_state="some state") - module_build_service.scheduler.handlers.modules.wait(msg=msg) + module_build_service.scheduler.handlers.modules.wait( + msg_id="msg-id-1", + module_build_id=2, + module_build_state="some state") module_build = ModuleBuild.get_by_id(db_session, 2) assert module_build.cg_build_koji_tag == "modular-updates-candidate" @@ -239,7 +241,9 @@ class TestModuleWait: new=koji_cg_tag_build, ): generic_resolver.create.return_value = resolver - msg = MBSModule(msg_id=None, module_build_id=2, module_build_state="some state") - module_build_service.scheduler.handlers.modules.wait(msg=msg) + module_build_service.scheduler.handlers.modules.wait( + msg_id="msg-id-1", + module_build_id=2, + module_build_state="some state") module_build = ModuleBuild.get_by_id(db_session, 2) assert module_build.cg_build_koji_tag == expected_cg_koji_build_tag diff --git a/tests/test_scheduler/test_poller.py b/tests/test_scheduler/test_poller.py index 9a83390f..b3380d4f 100644 --- a/tests/test_scheduler/test_poller.py +++ b/tests/test_scheduler/test_poller.py @@ -8,8 +8,8 @@ from tests import clean_database, make_module_in_db import mock import koji from module_build_service.db_session import db_session +from module_build_service.scheduler import events from module_build_service.scheduler.producer import MBSProducer -from module_build_service.scheduler.events import KojiTagChange import six.moves.queue as queue from datetime import datetime, timedelta @@ -671,10 +671,10 @@ class TestPoller: for i in range(consumer.incoming.qsize()): msg = consumer.incoming.get() - assert isinstance(msg, KojiTagChange) - assert msg.artifact == c.package - assert msg.nvr == c.nvr - assert msg.tag in expected_msg_tags + assert events.KOJI_TAG_CHANGE == msg["event"] + assert c.package == msg["build_name"] + assert c.nvr == msg["build_nvr"] + assert msg["tag_name"] in expected_msg_tags @pytest.mark.parametrize("greenwave_result", [True, False]) @patch("module_build_service.utils.greenwave.Greenwave.check_gating") diff --git a/tests/test_scheduler/test_repo_done.py b/tests/test_scheduler/test_repo_done.py index 84ae1b3a..f701cbb1 100644 --- a/tests/test_scheduler/test_repo_done.py +++ b/tests/test_scheduler/test_repo_done.py @@ -7,22 +7,21 @@ import module_build_service.scheduler.handlers.repos import module_build_service.models from module_build_service.db_session import db_session from module_build_service.models import ComponentBuild -from module_build_service.scheduler.events import KojiRepoChange from tests import scheduler_init_data class TestRepoDone: - @mock.patch("module_build_service.models.ModuleBuild.from_repo_done_event") - def test_no_match(self, from_repo_done_event): + @mock.patch("module_build_service.models.ModuleBuild.get_by_tag") + def test_no_match(self, get_by_tag): """ Test that when a repo msg hits us and we have no match, that we do nothing gracefully. """ scheduler_init_data() - from_repo_done_event.return_value = None - msg = KojiRepoChange( - "no matches for this...", "2016-some-nonexistent-build") - module_build_service.scheduler.handlers.repos.done(msg=msg) + get_by_tag.return_value = None + module_build_service.scheduler.handlers.repos.done( + msg_id="no matches for this...", + repo_tag="2016-some-nonexistent-build") @mock.patch( "module_build_service.builder.KojiModuleBuilder." @@ -57,9 +56,9 @@ class TestRepoDone: get_session.return_value = mock.Mock(), "development" build_fn.return_value = 1234, 1, "", None - msg = KojiRepoChange( - "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build") - module_build_service.scheduler.handlers.repos.done(msg=msg) + module_build_service.scheduler.handlers.repos.done( + msg_id="some_msg_id", + repo_tag="module-testmodule-master-20170109091357-7c29193d-build") build_fn.assert_called_once_with( artifact_name="tangerine", source=( @@ -117,9 +116,9 @@ class TestRepoDone: finalizer.side_effect = mocked_finalizer - msg = KojiRepoChange( - "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build") - module_build_service.scheduler.handlers.repos.done(msg=msg) + module_build_service.scheduler.handlers.repos.done( + msg_id="some_msg_id", + repo_tag="module-testmodule-master-20170109091357-7c29193d-build") finalizer.assert_called_once() @@ -157,9 +156,10 @@ class TestRepoDone: config.return_value = mock.Mock(), "development" build_fn.return_value = None, 4, "Failed to submit artifact tangerine to Koji", None - msg = KojiRepoChange( - "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build") - module_build_service.scheduler.handlers.repos.done(msg=msg) + module_build_service.scheduler.handlers.repos.done( + msg_id="some_msg_id", + repo_tag="module-testmodule-master-20170109091357-7c29193d-build") + build_fn.assert_called_once_with( artifact_name="tangerine", source=( @@ -183,10 +183,9 @@ class TestRepoDone: component_build.tagged = False db_session.commit() - msg = KojiRepoChange( - "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build") - - module_build_service.scheduler.handlers.repos.done(msg=msg) + module_build_service.scheduler.handlers.repos.done( + msg_id="some_msg_id", + repo_tag="module-testmodule-master-20170109091357-7c29193d-build") mock_log_info.assert_called_with( "Ignoring repo regen, because not all components are tagged." @@ -223,9 +222,9 @@ class TestRepoDone: config.return_value = mock.Mock(), "development" build_fn.return_value = None, 4, "Failed to submit artifact x to Koji", None - msg = KojiRepoChange( - "some_msg_id", "module-testmodule-master-20170109091357-7c29193d-build") - module_build_service.scheduler.handlers.repos.done(msg=msg) + module_build_service.scheduler.handlers.repos.done( + msg_id="some_msg_id", + repo_tag="module-testmodule-master-20170109091357-7c29193d-build") module_build = module_build_service.models.ModuleBuild.get_by_id(db_session, 2) assert module_build.state == module_build_service.models.BUILD_STATES["failed"] diff --git a/tests/test_scheduler/test_tag_tagged.py b/tests/test_scheduler/test_tag_tagged.py index bd8942df..208ffa3b 100644 --- a/tests/test_scheduler/test_tag_tagged.py +++ b/tests/test_scheduler/test_tag_tagged.py @@ -10,7 +10,6 @@ import module_build_service.scheduler.handlers.repos import module_build_service.scheduler.handlers.tags import module_build_service.models -from module_build_service.scheduler.events import KojiTagChange from module_build_service.db_session import db_session import koji @@ -19,27 +18,28 @@ import koji @pytest.mark.usefixtures("reuse_component_init_data") class TestTagTagged: - @mock.patch("module_build_service.models.ModuleBuild.from_tag_change_event") - def test_no_matching_module(self, from_tag_change_event): + @mock.patch("module_build_service.models.ModuleBuild.get_by_tag") + def test_no_matching_module(self, get_by_tag): """ Test that when a tag msg hits us and we have no match, that we do nothing gracefully. """ - from_tag_change_event.return_value = None - msg = KojiTagChange( - "no matches for this...", "2016-some-nonexistent-build", "artifact", "artifact-1.2-1") - module_build_service.scheduler.handlers.tags.tagged(msg=msg) + get_by_tag.return_value = None + module_build_service.scheduler.handlers.tags.tagged( + msg_id="no matches for this...", + tag_name="2016-some-nonexistent-build", + build_name="artifact", + build_nvr="artifact-1.2-1") def test_no_matching_artifact(self): """ Test that when a tag msg hits us and we have no match, that we do nothing gracefully. """ - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c-build", - "artifact", - "artifact-1.2-1", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c-build", + build_name="artifact", + build_nvr="artifact-1.2-1", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) @patch( "module_build_service.builder.GenericBuilder.default_buildroot_groups", @@ -85,47 +85,43 @@ class TestTagTagged: db_session.commit() # Tag the first component to the buildroot. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c-build", - "perl-Tangerine", - "perl-Tangerine-0.23-1.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c-build", + build_name="perl-Tangerine", + build_nvr="perl-Tangerine-0.23-1.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # Tag the first component to the final tag. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c", - "perl-Tangerine", - "perl-Tangerine-0.23-1.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c", + build_name="perl-Tangerine", + build_nvr="perl-Tangerine-0.23-1.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # newRepo should not be called, because there are still components # to tag. assert not koji_session.newRepo.called # Tag the second component to the buildroot. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c-build", - "perl-List-Compare", - "perl-List-Compare-0.53-5.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c-build", + build_name="perl-List-Compare", + build_nvr="perl-List-Compare-0.53-5.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # newRepo should not be called, because the component has not been # tagged to final tag so far. assert not koji_session.newRepo.called # Tag the first component to the final tag. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c", - "perl-List-Compare", - "perl-List-Compare-0.53-5.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c", + build_name="perl-List-Compare", + build_nvr="perl-List-Compare-0.53-5.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # newRepo should be called now - all components have been tagged. koji_session.newRepo.assert_called_once_with( @@ -174,21 +170,19 @@ class TestTagTagged: db_session.commit() # Tag the perl-List-Compare component to the buildroot. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c-build", - "perl-Tangerine", - "perl-Tangerine-0.23-1.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c-build", + build_name="perl-Tangerine", + build_nvr="perl-Tangerine-0.23-1.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # Tag the perl-List-Compare component to final tag. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c", - "perl-Tangerine", - "perl-Tangerine-0.23-1.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c", + build_name="perl-Tangerine", + build_nvr="perl-Tangerine-0.23-1.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # newRepo should not be called, because perl-List-Compare has not been # built yet. @@ -242,21 +236,19 @@ class TestTagTagged: db_session.commit() # Tag the perl-List-Compare component to the buildroot. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c-build", - "perl-List-Compare", - "perl-List-Compare-0.53-5.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c-build", + build_name="perl-List-Compare", + build_nvr="perl-List-Compare-0.53-5.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # Tag the perl-List-Compare component to final tag. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c", - "perl-List-Compare", - "perl-List-Compare-0.53-5.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c", + build_name="perl-List-Compare", + build_nvr="perl-List-Compare-0.53-5.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # newRepo should be called now - all successfully built # components have been tagged. @@ -315,63 +307,57 @@ class TestTagTagged: db_session.commit() # Tag the first component to the buildroot. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c-build", - "perl-Tangerine", - "perl-Tangerine-0.23-1.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c-build", + build_name="perl-Tangerine", + build_nvr="perl-Tangerine-0.23-1.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # Tag the first component to the final tag. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c", - "perl-Tangerine", - "perl-Tangerine-0.23-1.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c", + build_name="perl-Tangerine", + build_nvr="perl-Tangerine-0.23-1.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # newRepo should not be called, because there are still components # to tag. assert not koji_session.newRepo.called # Tag the second component to the buildroot. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c-build", - "perl-List-Compare", - "perl-List-Compare-0.53-5.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c-build", + build_name="perl-List-Compare", + build_nvr="perl-List-Compare-0.53-5.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # Tag the second component to final tag. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c", - "perl-List-Compare", - "perl-List-Compare-0.53-5.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c", + build_name="perl-List-Compare", + build_nvr="perl-List-Compare-0.53-5.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # newRepo should not be called, because there are still components # to tag. assert not koji_session.newRepo.called # Tag the component from first batch to final tag. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c", - "module-build-macros", - "module-build-macros-0.1-1.module+0+b0a1d1f7", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c", + build_name="module-build-macros", + build_nvr="module-build-macros-0.1-1.module+0+b0a1d1f7", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # Tag the component from first batch to the buildroot. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c-build", - "module-build-macros", - "module-build-macros-0.1-1.module+0+b0a1d1f7", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c-build", + build_name="module-build-macros", + build_nvr="module-build-macros-0.1-1.module+0+b0a1d1f7", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # newRepo should be called now - all components have been tagged. koji_session.newRepo.assert_called_once_with( @@ -436,30 +422,27 @@ class TestTagTagged: db_session.commit() # Tag the perl-Tangerine component to the buildroot. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c-build", - "perl-Tangerine", - "perl-Tangerine-0.23-1.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c-build", + build_name="perl-Tangerine", + build_nvr="perl-Tangerine-0.23-1.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) assert not koji_session.newRepo.called # Tag the perl-List-Compare component to the buildroot. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c-build", - "perl-List-Compare", - "perl-List-Compare-0.53-5.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c-build", + build_name="perl-List-Compare", + build_nvr="perl-List-Compare-0.53-5.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # Tag the perl-List-Compare component to final tag. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c", - "perl-List-Compare", - "perl-List-Compare-0.53-5.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c", + build_name="perl-List-Compare", + build_nvr="perl-List-Compare-0.53-5.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # newRepo should be called now - all successfully built # components have been tagged. @@ -532,37 +515,33 @@ class TestTagTagged: db_session.commit() # Tag the first component to the buildroot. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c-build", - "perl-Tangerine", - "perl-Tangerine-0.23-1.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c-build", + build_name="perl-Tangerine", + build_nvr="perl-Tangerine-0.23-1.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # Tag the first component to the final tag. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c", - "perl-Tangerine", - "perl-Tangerine-0.23-1.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c", + build_name="perl-Tangerine", + build_nvr="perl-Tangerine-0.23-1.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # Tag the second component to the buildroot. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c-build", - "perl-List-Compare", - "perl-List-Compare-0.53-5.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c-build", + build_name="perl-List-Compare", + build_nvr="perl-List-Compare-0.53-5.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # Tag the second component to the final tag. - msg = KojiTagChange( - "id", - "module-testmodule-master-20170219191323-c40c156c", - "perl-List-Compare", - "perl-List-Compare-0.53-5.module+0+d027b723", + module_build_service.scheduler.handlers.tags.tagged( + msg_id="id", + tag_name="module-testmodule-master-20170219191323-c40c156c", + build_name="perl-List-Compare", + build_nvr="perl-List-Compare-0.53-5.module+0+d027b723", ) - module_build_service.scheduler.handlers.tags.tagged(msg=msg) # All components are tagged, newRepo should be called if there are no active tasks. if expect_new_repo: diff --git a/tests/test_utils/test_utils.py b/tests/test_utils/test_utils.py index 7206d727..e72e2701 100644 --- a/tests/test_utils/test_utils.py +++ b/tests/test_utils/test_utils.py @@ -17,7 +17,6 @@ from module_build_service.errors import ProgrammingError, ValidationError, Unpro from module_build_service.utils.reuse import get_reusable_module, get_reusable_component from module_build_service.utils.general import load_mmd from module_build_service.utils.submit import format_mmd -from module_build_service.scheduler.events import KojiBuildChange, KojiRepoChange from tests import ( clean_database, init_data, @@ -30,8 +29,9 @@ import koji import pytest import module_build_service.scheduler.handlers.components from module_build_service.db_session import db_session -from module_build_service.builder.base import GenericBuilder +from module_build_service.builder import GenericBuilder from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder +from module_build_service.scheduler import events from module_build_service import Modulemd from tests import app @@ -1224,25 +1224,38 @@ class TestBatches: # Batch number should increase. assert module_build.batch == 2 - # KojiBuildChange messages in further_work should have build_new_state - # set to COMPLETE, but the current component build state should be set - # to BUILDING, so KojiBuildChange message handler handles the change - # properly. - for msg in further_work: - if type(msg) == KojiBuildChange: - assert msg.build_new_state == koji.BUILD_STATES["COMPLETE"] - component_build = models.ComponentBuild.from_component_event(db_session, msg) + # buildsys.build.state.change messages in further_work should have + # build_new_state set to COMPLETE, but the current component build + # state should be set to BUILDING, so KojiBuildChange message handler + # handles the change properly. + for event_info in further_work: + if event_info["event"] == events.KOJI_BUILD_CHANGE: + assert event_info["build_new_state"] == koji.BUILD_STATES["COMPLETE"] + component_build = models.ComponentBuild.from_component_event( + db_session, + task_id=event_info["task_id"], + module_id=event_info["module_build_id"]) assert component_build.state == koji.BUILD_STATES["BUILDING"] # When we handle these KojiBuildChange messages, MBS should tag all # the components just once. - for msg in further_work: - if type(msg) == KojiBuildChange: - module_build_service.scheduler.handlers.components.complete(msg) + for event_info in further_work: + if event_info["event"] == events.KOJI_BUILD_CHANGE: + module_build_service.scheduler.handlers.components.build_task_finalize( + msg_id=event_info["msg_id"], + build_id=event_info["build_id"], + task_id=event_info["task_id"], + build_new_state=event_info["build_new_state"], + build_name=event_info["build_name"], + build_version=event_info["build_version"], + build_release=event_info["build_release"], + module_build_id=event_info["module_build_id"], + state_reason=event_info["state_reason"] + ) # Since we have reused all the components in the batch, there should # be fake KojiRepoChange message. - assert type(further_work[-1]) == KojiRepoChange + assert further_work[-1]["event"] == events.KOJI_REPO_CHANGE # Check that packages have been tagged just once. assert len(DummyModuleBuilder.TAGGED_COMPONENTS) == 2 @@ -1282,8 +1295,12 @@ class TestBatches: # set to COMPLETE, but the current component build state in the DB should be set # to BUILDING, so KojiBuildChange message handler handles the change # properly. - assert further_work[0].build_new_state == koji.BUILD_STATES["COMPLETE"] - component_build = models.ComponentBuild.from_component_event(db_session, further_work[0]) + event_info = further_work[0] + assert event_info["build_new_state"] == koji.BUILD_STATES["COMPLETE"] + component_build = models.ComponentBuild.from_component_event( + db_session, + task_id=event_info["task_id"], + module_id=event_info["module_build_id"]) assert component_build.state == koji.BUILD_STATES["BUILDING"] assert component_build.package == "perl-Tangerine" assert component_build.reused_component_id is not None @@ -1366,12 +1383,17 @@ class TestBatches: # Make sure we only have one message returned for the one reused component assert len(further_work) == 1 - # The KojiBuildChange message in further_work should have build_new_state - # set to COMPLETE, but the current component build state in the DB should be set - # to BUILDING, so KojiBuildChange message handler handles the change - # properly. - assert further_work[0].build_new_state == koji.BUILD_STATES["COMPLETE"] - component_build = models.ComponentBuild.from_component_event(db_session, further_work[0]) + # The buildsys.build.state.change message in further_work should have + # build_new_state set to COMPLETE, but the current component build state + # in the DB should be set to BUILDING, so the build state change handler + # handles the change properly. + event_info = further_work[0] + assert event_info["build_new_state"] == koji.BUILD_STATES["COMPLETE"] + component_build = models.ComponentBuild.from_component_event( + db_session, + task_id=event_info["task_id"], + module_id=event_info["module_build_id"], + ) assert component_build.state == koji.BUILD_STATES["BUILDING"] assert component_build.package == "perl-Tangerine" assert component_build.reused_component_id is not None @@ -1394,8 +1416,13 @@ class TestBatches: assert module_build.batch == 3 # Verify that tangerine was reused even though perl-Tangerine was rebuilt in the previous # batch - assert further_work[0].build_new_state == koji.BUILD_STATES["COMPLETE"] - component_build = models.ComponentBuild.from_component_event(db_session, further_work[0]) + event_info = further_work[0] + assert event_info["build_new_state"] == koji.BUILD_STATES["COMPLETE"] + component_build = models.ComponentBuild.from_component_event( + db_session, + task_id=event_info["task_id"], + module_id=event_info["module_build_id"] + ) assert component_build.state == koji.BUILD_STATES["BUILDING"] assert component_build.package == "tangerine" assert component_build.reused_component_id is not None