Format the coding style across the codebase using "black" and manual tweaks

The main benefit of this commit is that the use of double quotes
is now consistent.
This commit is contained in:
mprahl
2019-04-25 17:58:44 -04:00
parent 559f0dd922
commit 66c3f82160
78 changed files with 9050 additions and 7438 deletions

View File

@@ -52,6 +52,7 @@ logging.basicConfig(level=logging.DEBUG)
def get_session(config, login=True):
from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder
return KojiModuleBuilder.get_session(config, login=login)
@@ -69,7 +70,7 @@ def strip_suffixes(s, suffixes):
"""
for suffix in suffixes:
if s.endswith(suffix):
s = s[:-len(suffix)]
s = s[: -len(suffix)]
break
return s
@@ -79,8 +80,9 @@ def koji_retrying_multicall_map(*args, **kwargs):
Wrapper around KojiModuleBuilder.koji_retrying_multicall_map, because
we cannot import that method normally because of import loop.
"""
from module_build_service.builder.KojiModuleBuilder import \
koji_retrying_multicall_map as multicall
from module_build_service.builder.KojiModuleBuilder import (
koji_retrying_multicall_map as multicall,)
return multicall(*args, **kwargs)
@@ -109,7 +111,7 @@ class KojiContentGenerator(object):
return "<KojiContentGenerator module: %s>" % (self.module_name)
@staticmethod
def parse_rpm_output(output, tags, separator=';'):
def parse_rpm_output(output, tags, separator=";"):
"""
Copied from:
https://github.com/projectatomic/atomic-reactor/blob/master/atomic_reactor/plugins/exit_koji_promote.py
@@ -130,42 +132,42 @@ class KojiContentGenerator(object):
except ValueError:
return None
if value == '(none)':
if value == "(none)":
return None
return value
components = []
sigmarker = 'Key ID '
sigmarker = "Key ID "
for rpm in output:
fields = rpm.rstrip('\n').split(separator)
fields = rpm.rstrip("\n").split(separator)
if len(fields) < len(tags):
continue
signature = field('SIGPGP:pgpsig') or field('SIGGPG:pgpsig')
signature = field("SIGPGP:pgpsig") or field("SIGGPG:pgpsig")
if signature:
parts = signature.split(sigmarker, 1)
if len(parts) > 1:
signature = parts[1]
component_rpm = {
u'type': u'rpm',
u'name': field('NAME'),
u'version': field('VERSION'),
u'release': field('RELEASE'),
u'arch': field('ARCH'),
u'sigmd5': field('SIGMD5'),
u'signature': signature,
u"type": u"rpm",
u"name": field("NAME"),
u"version": field("VERSION"),
u"release": field("RELEASE"),
u"arch": field("ARCH"),
u"sigmd5": field("SIGMD5"),
u"signature": signature,
}
# Special handling for epoch as it must be an integer or None
epoch = field('EPOCH')
epoch = field("EPOCH")
if epoch is not None:
epoch = int(epoch)
component_rpm[u'epoch'] = epoch
component_rpm[u"epoch"] = epoch
if component_rpm['name'] != 'gpg-pubkey':
if component_rpm["name"] != "gpg-pubkey":
components.append(component_rpm)
return components
@@ -177,28 +179,25 @@ class KojiContentGenerator(object):
Build a list of installed RPMs in the format required for the
metadata.
""" # noqa
""" # noqa
tags = [
'NAME',
'VERSION',
'RELEASE',
'ARCH',
'EPOCH',
'SIGMD5',
'SIGPGP:pgpsig',
'SIGGPG:pgpsig',
"NAME",
"VERSION",
"RELEASE",
"ARCH",
"EPOCH",
"SIGMD5",
"SIGPGP:pgpsig",
"SIGGPG:pgpsig",
]
sep = ';'
sep = ";"
fmt = sep.join(["%%{%s}" % tag for tag in tags])
cmd = "/bin/rpm -qa --qf '{0}\n'".format(fmt)
with open('/dev/null', 'r+') as devnull:
p = subprocess.Popen(cmd,
shell=True,
stdin=devnull,
stdout=subprocess.PIPE,
stderr=devnull)
with open("/dev/null", "r+") as devnull:
p = subprocess.Popen(
cmd, shell=True, stdin=devnull, stdout=subprocess.PIPE, stderr=devnull)
(stdout, stderr) = p.communicate()
status = p.wait()
@@ -216,16 +215,12 @@ class KojiContentGenerator(object):
# TODO: In libmodulemd v1.5, there'll be a property we can check instead
# of using RPM
try:
libmodulemd_version = subprocess.check_output(
['rpm', '--queryformat', '%{VERSION}', '-q', 'libmodulemd'],
universal_newlines=True).strip()
cmd = ["rpm", "--queryformat", "%{VERSION}", "-q", "libmodulemd"]
libmodulemd_version = subprocess.check_output(cmd, universal_newlines=True).strip()
except subprocess.CalledProcessError:
libmodulemd_version = 'unknown'
libmodulemd_version = "unknown"
return [{
'name': 'libmodulemd',
'version': libmodulemd_version
}]
return [{"name": "libmodulemd", "version": libmodulemd_version}]
def _koji_rpms_in_tag(self, tag):
""" Return the list of koji rpms in a tag. """
@@ -257,17 +252,20 @@ class KojiContentGenerator(object):
# Prepare the arguments for Koji multicall.
# We will call session.getRPMHeaders(...) for each SRC RPM to get exclusivearch,
# excludearch and license headers.
multicall_kwargs = [{"rpmID": rpm_id,
"headers": ["exclusivearch", "excludearch", "license"]}
for rpm_id in src_rpms.keys()]
multicall_kwargs = [
{"rpmID": rpm_id, "headers": ["exclusivearch", "excludearch", "license"]}
for rpm_id in src_rpms.keys()
]
# For each binary RPM, we only care about the "license" header.
multicall_kwargs += [{"rpmID": rpm_id, "headers": ["license"]}
for rpm_id in binary_rpms.keys()]
multicall_kwargs += [
{"rpmID": rpm_id, "headers": ["license"]} for rpm_id in binary_rpms.keys()
]
rpms_headers = koji_retrying_multicall_map(
session, session.getRPMHeaders, list_of_kwargs=multicall_kwargs)
session, session.getRPMHeaders, list_of_kwargs=multicall_kwargs
)
# Temporary dict with build_id as a key to find builds easily.
builds = {build['build_id']: build for build in builds}
builds = {build["build_id"]: build for build in builds}
# Create a mapping of build IDs to SRPM NEVRAs so that the for loop below can directly
# access these values when adding the `srpm_nevra` key to the returned RPMs
@@ -280,8 +278,7 @@ class KojiContentGenerator(object):
# also other useful data from the Build associated with the RPM.
for rpm, headers in zip(chain(src_rpms.values(), binary_rpms.values()), rpms_headers):
if not headers:
raise RuntimeError(
"No RPM headers received from Koji for RPM %s" % rpm["name"])
raise RuntimeError("No RPM headers received from Koji for RPM %s" % rpm["name"])
if "license" not in headers:
raise RuntimeError(
"No RPM 'license' header received from Koji for RPM %s" % rpm["name"])
@@ -291,44 +288,42 @@ class KojiContentGenerator(object):
build["excludearch"] = headers["excludearch"]
rpm["license"] = headers["license"]
rpm['srpm_name'] = build['name']
rpm['srpm_nevra'] = build_id_to_srpm_nevra[rpm["build_id"]]
rpm['exclusivearch'] = build['exclusivearch']
rpm['excludearch'] = build['excludearch']
rpm["srpm_name"] = build["name"]
rpm["srpm_nevra"] = build_id_to_srpm_nevra[rpm["build_id"]]
rpm["exclusivearch"] = build["exclusivearch"]
rpm["excludearch"] = build["excludearch"]
return rpms
def _get_build(self):
ret = {}
ret[u'name'] = self.module.name
ret[u"name"] = self.module.name
if self.devel:
ret['name'] += "-devel"
ret[u'version'] = self.module.stream.replace("-", "_")
ret["name"] += "-devel"
ret[u"version"] = self.module.stream.replace("-", "_")
# Append the context to the version to make NVRs of modules unique in the event of
# module stream expansion
ret[u'release'] = '{0}.{1}'.format(self.module.version, self.module.context)
ret[u'source'] = self.module.scmurl
ret[u'start_time'] = calendar.timegm(
self.module.time_submitted.utctimetuple())
ret[u'end_time'] = calendar.timegm(
self.module.time_completed.utctimetuple())
ret[u'extra'] = {
ret[u"release"] = "{0}.{1}".format(self.module.version, self.module.context)
ret[u"source"] = self.module.scmurl
ret[u"start_time"] = calendar.timegm(self.module.time_submitted.utctimetuple())
ret[u"end_time"] = calendar.timegm(self.module.time_completed.utctimetuple())
ret[u"extra"] = {
u"typeinfo": {
u"module": {
u"module_build_service_id": self.module.id,
u"content_koji_tag": self.module.koji_tag,
u"modulemd_str": self.module.modulemd,
u"name": ret['name'],
u"name": ret["name"],
u"stream": self.module.stream,
u"version": self.module.version,
u"context": self.module.context
u"context": self.module.context,
}
}
}
session = get_session(self.config, login=False)
# Only add the CG build owner if the user exists in Koji
if session.getUser(self.owner):
ret[u'owner'] = self.owner
ret[u"owner"] = self.owner
return ret
def _get_buildroot(self):
@@ -338,18 +333,15 @@ class KojiContentGenerator(object):
u"id": 1,
u"host": {
u"arch": text_type(platform.machine()),
u'os': u"%s %s" % (distro[0], distro[1])
u"os": u"%s %s" % (distro[0], distro[1]),
},
u"content_generator": {
u"name": u"module-build-service",
u"version": text_type(version)
},
u"container": {
u"arch": text_type(platform.machine()),
u"type": u"none"
u"version": text_type(version),
},
u"container": {u"arch": text_type(platform.machine()), u"type": u"none"},
u"components": self.__get_rpms(),
u"tools": self.__get_tools()
u"tools": self.__get_tools(),
}
return ret
@@ -368,7 +360,7 @@ class KojiContentGenerator(object):
u"arch": rpm["arch"],
u"epoch": rpm["epoch"],
u"sigmd5": rpm["payloadhash"],
u"type": u"rpm"
u"type": u"rpm",
}
def _get_arch_mmd_output(self, output_path, arch):
@@ -385,15 +377,11 @@ class KojiContentGenerator(object):
:return: Dictionary with record in "output" list.
"""
ret = {
'buildroot_id': 1,
'arch': arch,
'type': 'file',
'extra': {
'typeinfo': {
'module': {}
}
},
'checksum_type': 'md5',
"buildroot_id": 1,
"arch": arch,
"type": "file",
"extra": {"typeinfo": {"module": {}}},
"checksum_type": "md5",
}
# Noarch architecture represents "generic" modulemd.txt.
@@ -406,13 +394,13 @@ class KojiContentGenerator(object):
# parse it to get the Modulemd instance.
mmd_path = os.path.join(output_path, mmd_filename)
try:
with open(mmd_path, 'rb') as mmd_f:
with open(mmd_path, "rb") as mmd_f:
raw_data = mmd_f.read()
data = to_text_type(raw_data)
mmd = load_mmd(data)
ret['filename'] = mmd_filename
ret['filesize'] = len(raw_data)
ret['checksum'] = hashlib.md5(raw_data).hexdigest()
ret["filename"] = mmd_filename
ret["filesize"] = len(raw_data)
ret["checksum"] = hashlib.md5(raw_data).hexdigest()
except IOError:
if arch == "src":
# This might happen in case the Module is submitted directly
@@ -428,8 +416,7 @@ class KojiContentGenerator(object):
if arch in ["noarch", "src"]:
# For generic noarch/src modulemd, include all the RPMs.
for rpm in self.rpms:
components.append(
self._koji_rpm_to_component_record(rpm))
components.append(self._koji_rpm_to_component_record(rpm))
else:
# Check the RPM artifacts built for this architecture in modulemd file,
# find the matching RPM in the `rpms_dict` coming from Koji and use it
@@ -438,11 +425,10 @@ class KojiContentGenerator(object):
# RPM sigmd5 signature is not stored in MMD.
for rpm in mmd.get_rpm_artifacts().get():
if rpm not in self.rpms_dict:
raise RuntimeError("RPM %s found in the final modulemd but not "
"in Koji tag." % rpm)
raise RuntimeError(
"RPM %s found in the final modulemd but not in Koji tag." % rpm)
tag_rpm = self.rpms_dict[rpm]
components.append(
self._koji_rpm_to_component_record(tag_rpm))
components.append(self._koji_rpm_to_component_record(tag_rpm))
ret["components"] = components
return ret
@@ -455,18 +441,18 @@ class KojiContentGenerator(object):
try:
log_path = os.path.join(output_path, "build.log")
with open(log_path, 'rb') as build_log:
with open(log_path, "rb") as build_log:
checksum = hashlib.md5(build_log.read()).hexdigest()
stat = os.stat(log_path)
ret.append(
{
u'buildroot_id': 1,
u'arch': u'noarch',
u'type': u'log',
u'filename': u'build.log',
u'filesize': stat.st_size,
u'checksum_type': u'md5',
u'checksum': checksum
u"buildroot_id": 1,
u"arch": u"noarch",
u"type": u"log",
u"filename": u"build.log",
u"filesize": stat.st_size,
u"checksum_type": u"md5",
u"checksum": checksum,
}
)
except IOError:
@@ -480,7 +466,7 @@ class KojiContentGenerator(object):
u"metadata_version": 0,
u"buildroots": [self._get_buildroot()],
u"build": self._get_build(),
u"output": self._get_output(output_path)
u"output": self._get_output(output_path),
}
return ret
@@ -567,12 +553,10 @@ class KojiContentGenerator(object):
# For example:
# "x86_64" -> ['athlon', 'i386', 'i586', 'i486', 'i686']
# "i686" -> []
multilib_arches = set(compatible_arches) - set(
pungi.arch.get_compatible_arches(arch))
multilib_arches = set(compatible_arches) - set(pungi.arch.get_compatible_arches(arch))
# List of architectures that should be in ExclusiveArch tag or missing
# from ExcludeArch tag. Multilib should not be enabled here.
exclusive_arches = pungi.arch.get_valid_arches(
arch, multilib=False, add_noarch=False)
exclusive_arches = pungi.arch.get_valid_arches(arch, multilib=False, add_noarch=False)
# Modulemd.SimpleSet into which we will add the RPMs.
rpm_artifacts = Modulemd.SimpleSet()
@@ -605,8 +589,7 @@ class KojiContentGenerator(object):
# - the architecture of an RPM is not multilib architecture for `arch`.
# - the architecture of an RPM is not the final mmd architecture.
# - the architecture of an RPM is not "noarch" or "src".
if (rpm["arch"] not in multilib_arches and
rpm["arch"] not in [arch, "noarch", "src"]):
if rpm["arch"] not in multilib_arches and rpm["arch"] not in [arch, "noarch", "src"]:
continue
# Skip the RPM if it is excluded on this arch or exclusive
@@ -728,8 +711,7 @@ class KojiContentGenerator(object):
commit = xmd.get("mbs", {}).get("commit")
scmurl = xmd.get("mbs", {}).get("scmurl")
if not commit or not scmurl:
log.warning("%r: xmd['mbs'] does not contain 'commit' or 'scmurl'.",
self.module)
log.warning("%r: xmd['mbs'] does not contain 'commit' or 'scmurl'.", self.module)
return
td = None
@@ -747,9 +729,7 @@ class KojiContentGenerator(object):
if td is not None:
shutil.rmtree(td)
except Exception as e:
log.warning(
"Failed to remove temporary directory {!r}: {}".format(
td, str(e)))
log.warning("Failed to remove temporary directory {!r}: {}".format(td, str(e)))
def _prepare_file_directory(self):
""" Creates a temporary directory that will contain all the files
@@ -787,10 +767,10 @@ class KojiContentGenerator(object):
Uploads output files to Koji hub.
"""
to_upload = []
for info in metadata['output']:
if info.get('metadata_only', False):
for info in metadata["output"]:
if info.get("metadata_only", False):
continue
localpath = os.path.join(file_dir, info['filename'])
localpath = os.path.join(file_dir, info["filename"])
if not os.path.exists(localpath):
err = "Cannot upload %s to Koji. No such file." % localpath
log.error(err)
@@ -799,7 +779,7 @@ class KojiContentGenerator(object):
to_upload.append([localpath, info])
# Create unique server directory.
serverdir = 'mbs/%r.%d' % (time.time(), self.module.id)
serverdir = "mbs/%r.%d" % (time.time(), self.module.id)
for localpath, info in to_upload:
log.info("Uploading %s to Koji" % localpath)
@@ -816,8 +796,8 @@ class KojiContentGenerator(object):
tag_name = self.module.cg_build_koji_tag
if not tag_name:
log.info("%r: Not tagging Content Generator build, no "
"cg_build_koji_tag set", self.module)
log.info(
"%r: Not tagging Content Generator build, no cg_build_koji_tag set", self.module)
return
tag_names_to_try = [tag_name, self.config.koji_cg_default_build_tag]
@@ -827,20 +807,19 @@ class KojiContentGenerator(object):
if tag_info:
break
log.info("%r: Tag %s not found in Koji, trying next one.",
self.module, tag)
log.info("%r: Tag %s not found in Koji, trying next one.", self.module, tag)
if not tag_info:
log.warning(
"%r:, Not tagging Content Generator build, no available tag"
" found, tried %r", self.module, tag_names_to_try)
"%r:, Not tagging Content Generator build, no available tag found, tried %r",
self.module, tag_names_to_try,
)
return
build = self._get_build()
nvr = "%s-%s-%s" % (build["name"], build["version"], build["release"])
log.info("Content generator build %s will be tagged as %s in "
"Koji", nvr, tag)
log.info("Content generator build %s will be tagged as %s in Koji", nvr, tag)
session.tagBuild(tag_info["id"], nvr)
def _load_koji_tag(self, koji_session):
@@ -879,7 +858,7 @@ class KojiContentGenerator(object):
except koji.GenericError as e:
if "Build already exists" not in str(e):
raise
log.warning('Failed to import content generator')
log.warning("Failed to import content generator")
build_info = None
if conf.koji_cg_tag_build:
self._tag_cg_build()

View File

@@ -76,8 +76,10 @@ def koji_multicall_map(koji_session, koji_session_fnc, list_of_args=None, list_o
if list_of_args is None and list_of_kwargs is None:
raise ProgrammingError("One of list_of_args or list_of_kwargs must be set.")
if (type(list_of_args) not in [type(None), list] or
type(list_of_kwargs) not in [type(None), list]):
if (
type(list_of_args) not in [type(None), list]
or type(list_of_kwargs) not in [type(None), list]
):
raise ProgrammingError("list_of_args and list_of_kwargs must be list or None.")
if list_of_kwargs is None:
@@ -99,16 +101,19 @@ def koji_multicall_map(koji_session, koji_session_fnc, list_of_args=None, list_o
try:
responses = koji_session.multiCall(strict=True)
except Exception:
log.exception("Exception raised for multicall of method %r with args %r, %r:",
koji_session_fnc, args, kwargs)
log.exception(
"Exception raised for multicall of method %r with args %r, %r:",
koji_session_fnc, args, kwargs,
)
return None
if not responses:
log.error("Koji did not return response for multicall of %r", koji_session_fnc)
return None
if type(responses) != list:
log.error("Fault element was returned for multicall of method %r: %r",
koji_session_fnc, responses)
log.error(
"Fault element was returned for multicall of method %r: %r", koji_session_fnc, responses
)
return None
results = []
@@ -122,13 +127,17 @@ def koji_multicall_map(koji_session, koji_session_fnc, list_of_args=None, list_o
for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs):
if type(response) == list:
if not response:
log.error("Empty list returned for multicall of method %r with args %r, %r",
koji_session_fnc, args, kwargs)
log.error(
"Empty list returned for multicall of method %r with args %r, %r",
koji_session_fnc, args, kwargs
)
return None
results.append(response[0])
else:
log.error("Unexpected data returned for multicall of method %r with args %r, %r: %r",
koji_session_fnc, args, kwargs, response)
log.error(
"Unexpected data returned for multicall of method %r with args %r, %r: %r",
koji_session_fnc, args, kwargs, response
)
return None
return results
@@ -150,9 +159,9 @@ class KojiModuleBuilder(GenericBuilder):
backend = "koji"
_build_lock = threading.Lock()
region = dogpile.cache.make_region().configure('dogpile.cache.memory')
region = dogpile.cache.make_region().configure("dogpile.cache.memory")
@module_build_service.utils.validate_koji_tag('tag_name')
@module_build_service.utils.validate_koji_tag("tag_name")
def __init__(self, owner, module, config, tag_name, components):
"""
:param owner: a string representing who kicked off the builds
@@ -186,12 +195,11 @@ class KojiModuleBuilder(GenericBuilder):
self.components = components
def __repr__(self):
return "<KojiModuleBuilder module: %s, tag: %s>" % (
self.module_str, self.tag_name)
return "<KojiModuleBuilder module: %s, tag: %s>" % (self.module_str, self.tag_name)
@region.cache_on_arguments()
def getPerms(self):
return dict([(p['name'], p['id']) for p in self.koji_session.getAllPerms()])
return dict([(p["name"], p["id"]) for p in self.koji_session.getAllPerms()])
@module_build_service.utils.retry(wait_on=(IOError, koji.GenericError))
def buildroot_ready(self, artifacts=None):
@@ -201,24 +209,22 @@ class KojiModuleBuilder(GenericBuilder):
"""
assert self.module_target, "Invalid build target"
tag_id = self.module_target['build_tag']
tag_id = self.module_target["build_tag"]
repo = self.koji_session.getRepo(tag_id)
builds = [self.koji_session.getBuild(a, strict=True) for a in artifacts or []]
log.info("%r checking buildroot readiness for "
"repo: %r, tag_id: %r, artifacts: %r, builds: %r" % (
self, repo, tag_id, artifacts, builds))
log.info(
"%r checking buildroot readiness for repo: %r, tag_id: %r, artifacts: %r, builds: %r"
% (self, repo, tag_id, artifacts, builds)
)
if not repo:
log.info("Repo is not generated yet, buildroot is not ready yet.")
return False
ready = bool(koji.util.checkForBuilds(
self.koji_session,
tag_id,
builds,
repo['create_event'],
latest=True,
))
ready = bool(
koji.util.checkForBuilds(
self.koji_session, tag_id, builds, repo["create_event"], latest=True)
)
if ready:
log.info("%r buildroot is ready" % self)
else:
@@ -239,19 +245,22 @@ class KojiModuleBuilder(GenericBuilder):
# Get all the RPMs and builds of the reusable module in Koji
rpms, builds = koji_session.listTaggedRPMS(reusable_module.koji_tag, latest=True)
# Convert the list to a dict where each key is the build_id
builds = {build['build_id']: build for build in builds}
builds = {build["build_id"]: build for build in builds}
# Create a mapping of package (SRPM) to the RPMs in NVR format
package_to_rpms = {}
for rpm in rpms:
package = builds[rpm['build_id']]['name']
package = builds[rpm["build_id"]]["name"]
if package not in package_to_rpms:
package_to_rpms[package] = []
package_to_rpms[package].append(kobo.rpmlib.make_nvr(rpm))
components_in_module = [c.package for c in module_build.component_builds]
reusable_components = get_reusable_components(
db_session, module_build, components_in_module,
previous_module_build=reusable_module)
db_session,
module_build,
components_in_module,
previous_module_build=reusable_module,
)
# Loop through all the reusable components to find if any of their RPMs are
# being filtered
for reusable_component in reusable_components:
@@ -261,7 +270,7 @@ class KojiModuleBuilder(GenericBuilder):
# We must get the component name from the NVR and not from
# reusable_component.package because macros such as those used
# by SCLs can change the name of the underlying build
component_name = kobo.rpmlib.parse_nvr(reusable_component.nvr)['name']
component_name = kobo.rpmlib.parse_nvr(reusable_component.nvr)["name"]
if component_name not in package_to_rpms:
continue
@@ -270,13 +279,13 @@ class KojiModuleBuilder(GenericBuilder):
for nvr in package_to_rpms[component_name]:
parsed_nvr = kobo.rpmlib.parse_nvr(nvr)
# Don't compare with the epoch
parsed_nvr['epoch'] = None
parsed_nvr["epoch"] = None
# Loop through all the filtered RPMs to find a match with the reusable
# component's RPMs.
for nvr2 in list(filtered_rpms):
parsed_nvr2 = kobo.rpmlib.parse_nvr(nvr2)
# Don't compare with the epoch
parsed_nvr2['epoch'] = None
parsed_nvr2["epoch"] = None
# Only remove the filter if we are going to reuse a component with
# the same exact NVR
if parsed_nvr == parsed_nvr2:
@@ -299,10 +308,10 @@ class KojiModuleBuilder(GenericBuilder):
# Taken from Karsten's create-distmacro-pkg.sh
# - however removed any provides to system-release/redhat-release
name = 'module-build-macros'
name = "module-build-macros"
version = "0.1"
release = "1"
today = datetime.date.today().strftime('%a %b %d %Y')
today = datetime.date.today().strftime("%a %b %d %Y")
mmd = module_build.mmd()
# Generate "Conflicts: name = version-release". This is workaround for
@@ -320,19 +329,20 @@ class KojiModuleBuilder(GenericBuilder):
module_build, req_data["filtered_rpms"])
else:
filtered_rpms = req_data["filtered_rpms"]
filter_conflicts.extend(map(
KojiModuleBuilder.format_conflicts_line, filtered_rpms))
filter_conflicts.extend(map(KojiModuleBuilder.format_conflicts_line, filtered_rpms))
if req_name in conf.base_module_names and 'ursine_rpms' in req_data:
if req_name in conf.base_module_names and "ursine_rpms" in req_data:
comments = (
'# Filter out RPMs from stream collision modules found from ursine content'
' for base module {}:'.format(req_name),
'# ' + ', '.join(req_data['stream_collision_modules']),
("# Filter out RPMs from stream collision modules found from ursine content"
" for base module {}:".format(req_name)),
"# " + ", ".join(req_data["stream_collision_modules"]),
)
filter_conflicts.extend(
chain(
comments,
map(KojiModuleBuilder.format_conflicts_line, req_data["ursine_rpms"]),
)
)
filter_conflicts.extend(chain(
comments,
map(KojiModuleBuilder.format_conflicts_line, req_data['ursine_rpms'])
))
spec_content = textwrap.dedent("""
%global dist {disttag}
@@ -433,11 +443,20 @@ class KojiModuleBuilder(GenericBuilder):
log.debug("Building %s.spec" % name)
# We are not interested in the rpmbuild stdout...
null_fd = open(os.devnull, 'w')
execute_cmd(['rpmbuild', '-bs', '%s.spec' % name,
'--define', '_topdir %s' % td,
'--define', '_sourcedir %s' % sources_dir],
cwd=td, stdout=null_fd)
null_fd = open(os.devnull, "w")
execute_cmd(
[
"rpmbuild",
"-bs",
"%s.spec" % name,
"--define",
"_topdir %s" % td,
"--define",
"_sourcedir %s" % sources_dir,
],
cwd=td,
stdout=null_fd,
)
null_fd.close()
sdir = os.path.join(td, "SRPMS")
srpm_paths = glob.glob("%s/*.src.rpm" % sdir)
@@ -458,10 +477,8 @@ class KojiModuleBuilder(GenericBuilder):
:return: the Koji session object.
:rtype: :class:`koji.ClientSession`
"""
koji_config = munch.Munch(koji.read_config(
profile_name=config.koji_profile,
user_config=config.koji_config,
))
koji_config = munch.Munch(
koji.read_config(profile_name=config.koji_profile, user_config=config.koji_config))
# Timeout after 10 minutes. The default is 12 hours.
koji_config["timeout"] = 60 * 10
@@ -494,9 +511,7 @@ class KojiModuleBuilder(GenericBuilder):
koji_session.krb_login(principal=principal, keytab=keytab, ctx=ctx, ccache=ccache)
elif authtype == "ssl":
koji_session.ssl_login(
os.path.expanduser(koji_config.cert),
None,
os.path.expanduser(koji_config.serverca)
os.path.expanduser(koji_config.cert), None, os.path.expanduser(koji_config.serverca)
)
else:
raise ValueError("Unrecognized koji authtype %r" % authtype)
@@ -512,8 +527,7 @@ class KojiModuleBuilder(GenericBuilder):
# Create or update individual tags
# the main tag needs arches so pungi can dump it
self.module_tag = self._koji_create_tag(
self.tag_name, self.arches, perm="admin")
self.module_tag = self._koji_create_tag(self.tag_name, self.arches, perm="admin")
self.module_build_tag = self._koji_create_tag(
self.tag_name + "-build", self.arches, perm="admin")
@@ -530,19 +544,23 @@ class KojiModuleBuilder(GenericBuilder):
@module_build_service.utils.retry(wait_on=SysCallError, interval=5)
def add_groups():
return self._koji_add_groups_to_tag(
dest_tag=self.module_build_tag,
groups=groups,
)
return self._koji_add_groups_to_tag(dest_tag=self.module_build_tag, groups=groups)
add_groups()
# Koji targets can only be 50 characters long, but the generate_koji_tag function
# checks the length with '-build' at the end, but we know we will never append '-build',
# so we can safely have the name check be more characters
target_length = 50 + len('-build')
target_length = 50 + len("-build")
target = module_build_service.utils.generate_koji_tag(
self.module.name, self.module.stream, self.module.version, self.module.context,
target_length, scratch=self.module.scratch, scratch_id=self.module.id)
self.module.name,
self.module.stream,
self.module.version,
self.module.context,
target_length,
scratch=self.module.scratch,
scratch_id=self.module.id,
)
# Add main build target.
self.module_target = self._koji_add_target(target, self.module_build_tag, self.module_tag)
@@ -570,17 +588,19 @@ class KojiModuleBuilder(GenericBuilder):
This method is safe to call multiple times.
"""
log.info("%r adding artifacts %r" % (self, artifacts))
build_tag = self._get_tag(self.module_build_tag)['id']
build_tag = self._get_tag(self.module_build_tag)["id"]
xmd = self.mmd.get_xmd()
if "mbs_options" in xmd.keys() and "blocked_packages" in xmd["mbs_options"].keys():
packages = [kobo.rpmlib.parse_nvr(nvr)["name"] for nvr in artifacts]
packages = [package for package in packages
if package in xmd["mbs_options"]["blocked_packages"]]
packages = [
package for package in packages
if package in xmd["mbs_options"]["blocked_packages"]
]
if packages:
self._koji_unblock_packages(packages)
tagged_nvrs = self._get_tagged_nvrs(self.module_build_tag['name'])
tagged_nvrs = self._get_tagged_nvrs(self.module_build_tag["name"])
self.koji_session.multicall = True
for nvr in artifacts:
@@ -593,8 +613,8 @@ class KojiModuleBuilder(GenericBuilder):
if not install:
continue
for group in ('srpm-build', 'build'):
name = kobo.rpmlib.parse_nvr(nvr)['name']
for group in ("srpm-build", "build"):
name = kobo.rpmlib.parse_nvr(nvr)["name"]
log.info("%r adding %s to group %s" % (self, name, group))
self.koji_session.groupPackageListAdd(build_tag, group, name)
self.koji_session.multiCall(strict=True)
@@ -606,11 +626,11 @@ class KojiModuleBuilder(GenericBuilder):
:return: None
"""
if dest_tag:
tag = self._get_tag(self.module_tag)['id']
tagged_nvrs = self._get_tagged_nvrs(self.module_tag['name'])
tag = self._get_tag(self.module_tag)["id"]
tagged_nvrs = self._get_tagged_nvrs(self.module_tag["name"])
else:
tag = self._get_tag(self.module_build_tag)['id']
tagged_nvrs = self._get_tagged_nvrs(self.module_build_tag['name'])
tag = self._get_tag(self.module_build_tag)["id"]
tagged_nvrs = self._get_tagged_nvrs(self.module_build_tag["name"])
self.koji_session.multicall = True
for nvr in artifacts:
@@ -626,18 +646,18 @@ class KojiModuleBuilder(GenericBuilder):
:param artifacts: a list of NVRs to untag
:return: None
"""
build_tag_name = self.tag_name + '-build'
build_tag_name = self.tag_name + "-build"
dest_tag = self._get_tag(self.tag_name, strict=False)
build_tag = self._get_tag(build_tag_name, strict=False)
# Get the NVRs in the tags to make sure the builds exist and they're tagged before
# untagging them
if dest_tag:
dest_tagged_nvrs = self._get_tagged_nvrs(dest_tag['name'])
dest_tagged_nvrs = self._get_tagged_nvrs(dest_tag["name"])
else:
log.info('The tag "{0}" doesn\'t exist'.format(self.tag_name))
dest_tagged_nvrs = []
if build_tag:
build_tagged_nvrs = self._get_tagged_nvrs(build_tag['name'])
build_tagged_nvrs = self._get_tagged_nvrs(build_tag["name"])
else:
log.info('The tag "{0}" doesn\'t exist'.format(build_tag_name))
build_tagged_nvrs = []
@@ -649,11 +669,11 @@ class KojiModuleBuilder(GenericBuilder):
self.koji_session.multicall = True
for nvr in artifacts:
if nvr in dest_tagged_nvrs:
log.info("%r untagging %r from %r" % (self, nvr, dest_tag['id']))
self.koji_session.untagBuild(dest_tag['id'], nvr)
log.info("%r untagging %r from %r" % (self, nvr, dest_tag["id"]))
self.koji_session.untagBuild(dest_tag["id"], nvr)
if nvr in build_tagged_nvrs:
log.info("%r untagging %r from %r" % (self, nvr, build_tag['id']))
self.koji_session.untagBuild(build_tag['id'], nvr)
log.info("%r untagging %r from %r" % (self, nvr, build_tag["id"]))
self.koji_session.untagBuild(build_tag["id"], nvr)
self.koji_session.multiCall(strict=True)
def wait_task(self, task_id):
@@ -683,12 +703,12 @@ class KojiModuleBuilder(GenericBuilder):
:param component_build: a ComponentBuild object
:return: a list of msgs that MBS needs to process
"""
opts = {'latest': True, 'package': component_build.package, 'inherit': False}
build_tagged = self.koji_session.listTagged(self.module_build_tag['name'], **opts)
opts = {"latest": True, "package": component_build.package, "inherit": False}
build_tagged = self.koji_session.listTagged(self.module_build_tag["name"], **opts)
dest_tagged = None
# Only check the destination tag if the component is not a build_time_only component
if not component_build.build_time_only:
dest_tagged = self.koji_session.listTagged(self.module_tag['name'], **opts)
dest_tagged = self.koji_session.listTagged(self.module_tag["name"], **opts)
for rv in [build_tagged, dest_tagged]:
if rv and len(rv) != 1:
raise ValueError("Expected exactly one item in list. Got %s" % rv)
@@ -716,33 +736,48 @@ class KojiModuleBuilder(GenericBuilder):
return further_work
# Start setting up MBS' database to use the existing build
log.info('Skipping build of "{0}" since it already exists.'.format(build['nvr']))
log.info('Skipping build of "{0}" since it already exists.'.format(build["nvr"]))
# Set it to COMPLETE so it doesn't count towards the concurrent component threshold
component_build.state = koji.BUILD_STATES['COMPLETE']
component_build.nvr = build['nvr']
component_build.task_id = build['task_id']
component_build.state_reason = 'Found existing build'
component_build.state = koji.BUILD_STATES["COMPLETE"]
component_build.nvr = build["nvr"]
component_build.task_id = build["task_id"]
component_build.state_reason = "Found existing build"
nvr_dict = kobo.rpmlib.parse_nvr(component_build.nvr)
# Trigger a completed build message
further_work.append(module_build_service.messaging.KojiBuildChange(
'recover_orphaned_artifact: fake message', build['build_id'],
build['task_id'], koji.BUILD_STATES['COMPLETE'], component_build.package,
nvr_dict['version'], nvr_dict['release'], component_build.module_build.id))
further_work.append(
module_build_service.messaging.KojiBuildChange(
"recover_orphaned_artifact: fake message",
build["build_id"],
build["task_id"],
koji.BUILD_STATES["COMPLETE"],
component_build.package,
nvr_dict["version"],
nvr_dict["release"],
component_build.module_build.id,
)
)
component_tagged_in = []
if build_tagged:
component_tagged_in.append(self.module_build_tag['name'])
component_tagged_in.append(self.module_build_tag["name"])
else:
# Tag it in the build tag if it's not there
self.tag_artifacts([component_build.nvr], dest_tag=False)
if dest_tagged:
component_tagged_in.append(self.module_tag['name'])
component_tagged_in.append(self.module_tag["name"])
for tag in component_tagged_in:
log.info('The build being skipped isn\'t tagged in the "{0}" tag. Will send a '
'message to the tag handler'.format(tag))
further_work.append(module_build_service.messaging.KojiTagChange(
'recover_orphaned_artifact: fake message', tag, component_build.package,
component_build.nvr))
log.info(
'The build being skipped isn\'t tagged in the "{0}" tag. Will send a message to '
"the tag handler".format(tag)
)
further_work.append(
module_build_service.messaging.KojiTagChange(
"recover_orphaned_artifact: fake message",
tag,
component_build.package,
component_build.nvr,
)
)
return further_work
def build(self, artifact_name, source):
@@ -768,21 +803,23 @@ class KojiModuleBuilder(GenericBuilder):
# For some reason repr(time.time()) includes 4 or 5
# more digits of precision than str(time.time())
# Unnamed Engineer: Guido v. R., I am disappoint
return '%s/%r.%s' % (prefix, time.time(),
''.join([random.choice(string.ascii_letters)
for i in range(8)]))
return "%s/%r.%s" % (
prefix,
time.time(),
"".join([random.choice(string.ascii_letters) for i in range(8)]),
)
if not self.__prep:
raise RuntimeError("Buildroot is not prep-ed")
self._koji_whitelist_packages([artifact_name])
if source.startswith('cli-build/'):
if source.startswith("cli-build/"):
# treat source as a custom srpm that has already been uploaded to koji
pass
elif '://' not in source:
elif "://" not in source:
# treat source as an srpm and upload it
serverdir = _unique_path('cli-build')
serverdir = _unique_path("cli-build")
callback = None
self.koji_session.uploadWrapper(source, serverdir, callback=callback)
source = "%s/%s" % (serverdir, os.path.basename(source))
@@ -792,32 +829,30 @@ class KojiModuleBuilder(GenericBuilder):
# The reason is that it is faster to build this RPM in
# already existing shared target, because Koji does not need to do
# repo-regen.
if (artifact_name == "module-build-macros" and
self.config.koji_build_macros_target):
if artifact_name == "module-build-macros" and self.config.koji_build_macros_target:
module_target = self.config.koji_build_macros_target
else:
module_target = self.module_target['name']
module_target = self.module_target["name"]
build_opts = {
"skip_tag": True,
"mbs_artifact_name": artifact_name,
"mbs_module_target": module_target
"mbs_module_target": module_target,
}
# disabled by default, wouldn't work until Koji issue #1158 is done
if conf.allow_arch_override:
build_opts['arch_override'] = \
self.mmd.get_rpm_components()[artifact_name].get_arches().get()
build_opts["arch_override"] = (
self.mmd.get_rpm_components()[artifact_name].get_arches().get())
task_id = self.koji_session.build(source, module_target, build_opts,
priority=self.build_priority)
log.info("submitted build of %s (task_id=%s), via %s" % (
source, task_id, self))
task_id = self.koji_session.build(
source, module_target, build_opts, priority=self.build_priority)
log.info("submitted build of %s (task_id=%s), via %s" % (source, task_id, self))
if task_id:
state = koji.BUILD_STATES['BUILDING']
state = koji.BUILD_STATES["BUILDING"]
reason = "Submitted %s to Koji" % (artifact_name)
else:
state = koji.BUILD_STATES['FAILED']
state = koji.BUILD_STATES["FAILED"]
reason = "Failed to submit artifact %s to Koji" % (artifact_name)
return task_id, state, reason, None
@@ -825,8 +860,10 @@ class KojiModuleBuilder(GenericBuilder):
try:
self.koji_session.cancelTask(task_id)
except Exception as error:
log.error('Failed to cancel task ID {0} in Koji. The error '
'message was: {1}'.format(task_id, str(error)))
log.error(
"Failed to cancel task ID {0} in Koji. The error "
"message was: {1}".format(task_id, str(error))
)
@classmethod
def repo_from_tag(cls, config, tag_name, arch):
@@ -840,52 +877,52 @@ class KojiModuleBuilder(GenericBuilder):
"""
return "%s/%s/latest/%s" % (config.koji_repository_url, tag_name, arch)
@module_build_service.utils.validate_koji_tag('tag', post='')
@module_build_service.utils.validate_koji_tag("tag", post="")
def _get_tag(self, tag, strict=True):
if isinstance(tag, dict):
tag = tag['name']
tag = tag["name"]
taginfo = self.koji_session.getTag(tag)
if not taginfo:
if strict:
raise SystemError("Unknown tag: %s" % tag)
return taginfo
@module_build_service.utils.validate_koji_tag(['tag_name'], post='')
@module_build_service.utils.validate_koji_tag(["tag_name"], post="")
def _koji_add_many_tag_inheritance(self, tag_name, parent_tags):
tag = self._get_tag(tag_name)
# highest priority num is at the end
inheritance_data = sorted(self.koji_session.getInheritanceData(tag['name']) or
[], key=lambda k: k['priority'])
inheritance_data = sorted(
self.koji_session.getInheritanceData(tag["name"]) or [], key=lambda k: k["priority"])
# Set initial priority to last record in inheritance data or 0
priority = 0
if inheritance_data:
priority = inheritance_data[-1]['priority'] + 10
priority = inheritance_data[-1]["priority"] + 10
def record_exists(parent_id, data):
for item in data:
if parent_id == item['parent_id']:
if parent_id == item["parent_id"]:
return True
return False
for parent in parent_tags: # We expect that they're sorted
parent = self._get_tag(parent)
if record_exists(parent['id'], inheritance_data):
if record_exists(parent["id"], inheritance_data):
continue
parent_data = {}
parent_data['parent_id'] = parent['id']
parent_data['priority'] = priority
parent_data['maxdepth'] = None
parent_data['intransitive'] = False
parent_data['noconfig'] = False
parent_data['pkg_filter'] = ''
parent_data["parent_id"] = parent["id"]
parent_data["priority"] = priority
parent_data["maxdepth"] = None
parent_data["intransitive"] = False
parent_data["noconfig"] = False
parent_data["pkg_filter"] = ""
inheritance_data.append(parent_data)
priority += 10
if inheritance_data:
self.koji_session.setInheritanceData(tag['id'], inheritance_data)
self.koji_session.setInheritanceData(tag["id"], inheritance_data)
@module_build_service.utils.validate_koji_tag('dest_tag')
@module_build_service.utils.validate_koji_tag("dest_tag")
def _koji_add_groups_to_tag(self, dest_tag, groups):
"""Add groups to a tag as well as packages listed by group
@@ -899,17 +936,17 @@ class KojiModuleBuilder(GenericBuilder):
log.debug("Adding groups=%s to tag=%s" % (list(groups), dest_tag))
if groups and not isinstance(groups, dict):
raise ValueError("Expected dict {'group' : [str(package1), ...]")
dest_tag = self._get_tag(dest_tag)['name']
existing_groups = dict([(p['name'], p['group_id'])
for p
in self.koji_session.getTagGroups(dest_tag, inherit=False)
])
dest_tag = self._get_tag(dest_tag)["name"]
existing_groups = dict([
(p["name"], p["group_id"])
for p in self.koji_session.getTagGroups(dest_tag, inherit=False)
])
for group, packages in groups.items():
group_id = existing_groups.get(group, None)
if group_id is not None:
log.debug("Group %s already exists for tag %s. Skipping creation."
% (group, dest_tag))
log.debug(
"Group %s already exists for tag %s. Skipping creation." % (group, dest_tag))
continue
self.koji_session.groupListAdd(dest_tag, group)
@@ -919,7 +956,7 @@ class KojiModuleBuilder(GenericBuilder):
for pkg in packages:
self.koji_session.groupPackageListAdd(dest_tag, group, pkg)
@module_build_service.utils.validate_koji_tag('tag_name')
@module_build_service.utils.validate_koji_tag("tag_name")
def _koji_create_tag(self, tag_name, arches=None, perm=None):
"""Create a tag in Koji
@@ -945,16 +982,16 @@ class KojiModuleBuilder(GenericBuilder):
raise ValueError("Expected list or None on input got %s" % type(arches))
current_arches = []
if taginfo['arches']: # None if none
current_arches = taginfo['arches'].split() # string separated by empty spaces
if taginfo["arches"]: # None if none
current_arches = taginfo["arches"].split() # string separated by empty spaces
if set(arches) != set(current_arches):
opts['arches'] = " ".join(arches)
opts["arches"] = " ".join(arches)
if perm:
if taginfo['locked']:
raise SystemError("Tag %s: master lock already set. Can't edit tag"
% taginfo['name'])
if taginfo["locked"]:
raise SystemError(
"Tag %s: master lock already set. Can't edit tag" % taginfo["name"])
perm_ids = self.getPerms()
@@ -962,15 +999,15 @@ class KojiModuleBuilder(GenericBuilder):
raise ValueError("Unknown permissions %s" % perm)
perm_id = perm_ids[perm]
if taginfo['perm'] not in (perm_id, perm): # check either id or the string
opts['perm'] = perm_id
if taginfo["perm"] not in (perm_id, perm): # check either id or the string
opts["perm"] = perm_id
# Create deepcopy of conf dict, because we are going to change it later.
opts['extra'] = copy.deepcopy(conf.koji_tag_extra_opts)
opts["extra"] = copy.deepcopy(conf.koji_tag_extra_opts)
xmd = self.mmd.get_xmd()
if "mbs_options" in xmd.keys() and "repo_include_all" in xmd["mbs_options"].keys():
opts['extra']['repo_include_all'] = xmd["mbs_options"]["repo_include_all"]
opts["extra"]["repo_include_all"] = xmd["mbs_options"]["repo_include_all"]
# edit tag with opts
self.koji_session.editTag2(tag_name, **opts)
@@ -983,18 +1020,20 @@ class KojiModuleBuilder(GenericBuilder):
# This will help with potential resubmitting of failed builds
pkglists = {}
for tag in tags:
pkglists[tag['id']] = dict([(p['package_name'], p['package_id'])
for p in self.koji_session.listPackages(tagID=tag['id'])])
pkglists[tag["id"]] = dict([
(p["package_name"], p["package_id"])
for p in self.koji_session.listPackages(tagID=tag["id"])
])
self.koji_session.multicall = True
for tag in tags:
pkglist = pkglists[tag['id']]
pkglist = pkglists[tag["id"]]
for package in packages:
if pkglist.get(package, None):
log.debug("%s Package %s is already whitelisted." % (self, package))
continue
self.koji_session.packageListAdd(tag['name'], package, self.owner)
self.koji_session.packageListAdd(tag["name"], package, self.owner)
self.koji_session.multiCall(strict=True)
def _koji_block_packages(self, packages):
@@ -1013,7 +1052,7 @@ class KojiModuleBuilder(GenericBuilder):
args = [[self.module_build_tag["name"], package] for package in packages]
koji_multicall_map(self.koji_session, self.koji_session.packageListUnblock, args)
@module_build_service.utils.validate_koji_tag(['build_tag', 'dest_tag'])
@module_build_service.utils.validate_koji_tag(["build_tag", "dest_tag"])
def _koji_add_target(self, name, build_tag, dest_tag):
"""Add build target if it doesn't exist or validate the existing one
@@ -1036,25 +1075,29 @@ class KojiModuleBuilder(GenericBuilder):
target_info = self.koji_session.getBuildTarget(name)
barches = build_tag.get("arches", None)
assert barches, "Build tag %s has no arches defined." % build_tag['name']
assert barches, "Build tag %s has no arches defined." % build_tag["name"]
if not target_info:
target_info = self.koji_session.createBuildTarget(name, build_tag['name'],
dest_tag['name'])
target_info = self.koji_session.createBuildTarget(
name, build_tag["name"], dest_tag["name"])
else: # verify whether build and destination tag matches
if build_tag['name'] != target_info['build_tag_name']:
raise SystemError(("Target references unexpected build_tag_name. "
"Got '%s', expected '%s'. Please contact administrator.")
% (target_info['build_tag_name'], build_tag['name']))
if dest_tag['name'] != target_info['dest_tag_name']:
raise SystemError(("Target references unexpected dest_tag_name. "
"Got '%s', expected '%s'. Please contact administrator.")
% (target_info['dest_tag_name'], dest_tag['name']))
if build_tag["name"] != target_info["build_tag_name"]:
raise SystemError(
"Target references unexpected build_tag_name. "
"Got '%s', expected '%s'. Please contact administrator."
% (target_info["build_tag_name"], build_tag["name"])
)
if dest_tag["name"] != target_info["dest_tag_name"]:
raise SystemError(
"Target references unexpected dest_tag_name. "
"Got '%s', expected '%s'. Please contact administrator."
% (target_info["dest_tag_name"], dest_tag["name"])
)
return self.koji_session.getBuildTarget(name)
def list_tasks_for_components(self, component_builds=None, state='active'):
def list_tasks_for_components(self, component_builds=None, state="active"):
"""
:param component_builds: list of component builds which we want to check
:param state: limit the check only for Koji tasks in the given state
@@ -1064,33 +1107,36 @@ class KojiModuleBuilder(GenericBuilder):
"""
component_builds = component_builds or []
if state == 'active':
states = [koji.TASK_STATES['FREE'],
koji.TASK_STATES['OPEN'],
koji.TASK_STATES['ASSIGNED']]
if state == "active":
states = [
koji.TASK_STATES["FREE"],
koji.TASK_STATES["OPEN"],
koji.TASK_STATES["ASSIGNED"],
]
elif state.upper() in koji.TASK_STATES:
states = [koji.TASK_STATES[state.upper()]]
else:
raise ValueError("State {} is not valid within Koji task states."
.format(state))
raise ValueError("State {} is not valid within Koji task states.".format(state))
tasks = []
for task in self.koji_session.listTasks(opts={'state': states,
'decode': True,
'method': 'build'}):
task_opts = task['request'][-1]
for task in self.koji_session.listTasks(
opts={"state": states, "decode": True, "method": "build"}
):
task_opts = task["request"][-1]
assert isinstance(task_opts, dict), "Task options shall be a dict."
if 'scratch' in task_opts and task_opts['scratch']:
if "scratch" in task_opts and task_opts["scratch"]:
continue
if 'mbs_artifact_name' not in task_opts:
task_opts['mbs_artifact_name'] = None
if 'mbs_module_target' not in task_opts:
task_opts['mbs_module_target'] = None
if "mbs_artifact_name" not in task_opts:
task_opts["mbs_artifact_name"] = None
if "mbs_module_target" not in task_opts:
task_opts["mbs_module_target"] = None
for c in component_builds:
# TODO: https://pagure.io/fm-orchestrator/issue/397
# Subj: Do not mix target/tag when looking for component builds
if (c.package == task_opts['mbs_artifact_name'] and
c.module_build.koji_tag == task_opts['mbs_module_target']):
if (
c.package == task_opts["mbs_artifact_name"]
and c.module_build.koji_tag == task_opts["mbs_module_target"]
):
tasks.append(task)
return tasks
@@ -1143,7 +1189,8 @@ class KojiModuleBuilder(GenericBuilder):
"packageID": component_id,
"userID": mbs_user_id,
"state": koji.BUILD_STATES["COMPLETE"],
"queryOpts": {"order": "-build_id", "limit": 1}})
"queryOpts": {"order": "-build_id", "limit": 1},
})
# Get the latest Koji build created by MBS for every component in single Koji call.
builds_per_component = koji_retrying_multicall_map(
@@ -1209,8 +1256,7 @@ class KojiModuleBuilder(GenericBuilder):
"""
with models.make_session(conf) as db_session:
build = models.ModuleBuild.get_build_from_nsvc(
db_session, mmd.get_name(), mmd.get_stream(), mmd.get_version(),
mmd.get_context())
db_session, mmd.get_name(), mmd.get_stream(), mmd.get_version(), mmd.get_context())
koji_session = KojiModuleBuilder.get_session(conf, login=False)
rpms = koji_session.listTaggedRPMS(build.koji_tag, latest=True)[0]
nvrs = set(kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms)
@@ -1218,9 +1264,11 @@ class KojiModuleBuilder(GenericBuilder):
def finalize(self, succeeded=True):
# Only import to koji CG if the module is "build" and not scratch.
if (not self.module.scratch and
self.config.koji_enable_content_generator and
self.module.state == models.BUILD_STATES['build']):
if (
not self.module.scratch
and self.config.koji_enable_content_generator
and self.module.state == models.BUILD_STATES["build"]
):
cg = KojiContentGenerator(self.module, self.config)
cg.koji_import()
if conf.koji_cg_devel_module:
@@ -1244,8 +1292,10 @@ class KojiModuleBuilder(GenericBuilder):
tags = []
koji_tags = session.listTags(rpm_md["build_id"])
for t in koji_tags:
if (not t["name"].endswith("-build") and
t["name"].startswith(tuple(conf.koji_tag_prefixes))):
if (
not t["name"].endswith("-build")
and t["name"].startswith(tuple(conf.koji_tag_prefixes))
):
tags.append(t["name"])
return tags

View File

@@ -43,7 +43,7 @@ from module_build_service.builder.utils import (
create_local_repo_from_koji_tag,
execute_cmd,
find_srpm,
get_koji_config
get_koji_config,
)
from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder
@@ -68,8 +68,7 @@ class MockModuleBuilder(GenericBuilder):
except IOError:
pass
else:
raise IOError("None of {} mock config files found."
.format(conf.mock_config_file))
raise IOError("None of {} mock config files found.".format(conf.mock_config_file))
# Load yum config file template
for cf in conf.yum_config_file:
@@ -80,10 +79,9 @@ class MockModuleBuilder(GenericBuilder):
except IOError:
pass
else:
raise IOError("None of {} yum config files found."
.format(conf.yum_config_file))
raise IOError("None of {} yum config files found.".format(conf.yum_config_file))
@module_build_service.utils.validate_koji_tag('tag_name')
@module_build_service.utils.validate_koji_tag("tag_name")
def __init__(self, owner, module, config, tag_name, components):
self.module_str = module.name
self.module = module
@@ -101,8 +99,7 @@ class MockModuleBuilder(GenericBuilder):
if arch_detected:
self.arch = arch_detected
else:
log.warning("Couldn't determine machine arch. Falling back "
"to configured arch.")
log.warning("Couldn't determine machine arch. Falling back to configured arch.")
self.arch = conf.arch_fallback
else:
self.arch = conf.arch_fallback
@@ -144,8 +141,8 @@ class MockModuleBuilder(GenericBuilder):
for name in os.listdir(self.configdir):
os.remove(os.path.join(self.configdir, name))
log.info("MockModuleBuilder initialized, tag_name=%s, tag_dir=%s" %
(tag_name, self.tag_dir))
log.info(
"MockModuleBuilder initialized, tag_name=%s, tag_dir=%s" % (tag_name, self.tag_dir))
@property
def module_build_tag(self):
@@ -175,18 +172,21 @@ class MockModuleBuilder(GenericBuilder):
m1_mmd = self.module.mmd()
artifacts = Modulemd.SimpleSet()
rpm_files = [f
for f in os.listdir(self.resultsdir)
if f.endswith(".rpm")]
rpm_files = [f for f in os.listdir(self.resultsdir) if f.endswith(".rpm")]
if rpm_files:
output = subprocess.check_output(['rpm',
'--queryformat',
'%{NAME} %{EPOCHNUM} %{VERSION} %{RELEASE} %{ARCH}\n',
'-qp'] + rpm_files,
cwd=self.resultsdir,
universal_newlines=True)
nevras = output.strip().split('\n')
output = subprocess.check_output(
[
"rpm",
"--queryformat",
"%{NAME} %{EPOCHNUM} %{VERSION} %{RELEASE} %{ARCH}\n",
"-qp",
]
+ rpm_files,
cwd=self.resultsdir,
universal_newlines=True,
)
nevras = output.strip().split("\n")
if len(nevras) != len(rpm_files):
raise RuntimeError("rpm -qp returned an unexpected number of lines")
@@ -198,20 +198,20 @@ class MockModuleBuilder(GenericBuilder):
if name in m1_mmd.get_rpm_filter().get():
continue
pkglist_f.write(rpm_file + '\n')
artifacts.add('{}-{}:{}-{}.{}'.format(name, epoch, version, release, arch))
pkglist_f.write(rpm_file + "\n")
artifacts.add("{}-{}:{}-{}.{}".format(name, epoch, version, release, arch))
pkglist_f.close()
m1_mmd.set_rpm_artifacts(artifacts)
# Generate repo.
execute_cmd(['/usr/bin/createrepo_c', '--pkglist', pkglist, path])
execute_cmd(["/usr/bin/createrepo_c", "--pkglist", pkglist, path])
# ...and inject modules.yaml there if asked.
if include_module_yaml:
mmd_path = os.path.join(path, "modules.yaml")
m1_mmd.dump(mmd_path)
execute_cmd(['/usr/bin/modifyrepo_c', '--mdtype=modules', mmd_path, repodata_path])
execute_cmd(["/usr/bin/modifyrepo_c", "--mdtype=modules", mmd_path, repodata_path])
def _add_repo(self, name, baseurl, extra=""):
"""
@@ -247,18 +247,18 @@ class MockModuleBuilder(GenericBuilder):
with MockModuleBuilder._config_lock:
infile = os.path.join(self.configdir, "mock.cfg")
with open(infile, 'r') as f:
with open(infile, "r") as f:
# This looks scary, but it is the way how mock itself loads the
# config file ...
config_opts = {}
code = compile(f.read(), infile, 'exec')
code = compile(f.read(), infile, "exec")
# pylint: disable=exec-used
exec(code)
self.groups = config_opts["chroot_setup_cmd"].split(" ")[1:]
self.yum_conf = config_opts['yum.conf']
self.enabled_modules = config_opts['module_enable']
self.releasever = config_opts['releasever']
self.yum_conf = config_opts["yum.conf"]
self.enabled_modules = config_opts["module_enable"]
self.releasever = config_opts["releasever"]
def _write_mock_config(self):
"""
@@ -267,8 +267,8 @@ class MockModuleBuilder(GenericBuilder):
with MockModuleBuilder._config_lock:
config = str(MockModuleBuilder.mock_config_template)
config = config.replace("$root", "%s-%s" % (self.tag_name,
str(threading.current_thread().name)))
config = config.replace(
"$root", "%s-%s" % (self.tag_name, str(threading.current_thread().name)))
config = config.replace("$arch", self.arch)
config = config.replace("$group", " ".join(self.groups))
config = config.replace("$yum_conf", self.yum_conf)
@@ -278,13 +278,13 @@ class MockModuleBuilder(GenericBuilder):
# We write the most recent config to "mock.cfg", so thread-related
# configs can be later (re-)generated from it using _load_mock_config.
outfile = os.path.join(self.configdir, "mock.cfg")
with open(outfile, 'w') as f:
with open(outfile, "w") as f:
f.write(config)
# Write the config to thread-related configuration file.
outfile = os.path.join(self.configdir, "mock-%s.cfg" %
str(threading.current_thread().name))
with open(outfile, 'w') as f:
outfile = os.path.join(
self.configdir, "mock-%s.cfg" % str(threading.current_thread().name))
with open(outfile, "w") as f:
f.write(config)
def buildroot_connect(self, groups):
@@ -319,6 +319,7 @@ class MockModuleBuilder(GenericBuilder):
self._write_mock_config()
from module_build_service.scheduler.consumer import fake_repo_done_message
fake_repo_done_message(self.tag_name)
def tag_artifacts(self, artifacts):
@@ -361,11 +362,11 @@ class MockModuleBuilder(GenericBuilder):
repo = koji_session.getRepo(repo_name)
if repo:
baseurl = koji.PathInfo(topdir=koji_config.topurl).repo(repo["id"], repo_name)
baseurl = '{0}/{1}/'.format(baseurl, self.arch)
baseurl = "{0}/{1}/".format(baseurl, self.arch)
else:
repo_dir = os.path.join(self.config.cache_dir, "koji_tags", tag)
create_local_repo_from_koji_tag(self.config, tag, repo_dir,
[self.arch, "noarch"])
create_local_repo_from_koji_tag(
self.config, tag, repo_dir, [self.arch, "noarch"])
baseurl = "file://" + repo_dir
# Check to see if there are any external repos tied to the tag
for ext_repo in koji_session.getTagExternalRepos(repo_name):
@@ -382,13 +383,13 @@ class MockModuleBuilder(GenericBuilder):
# build_id=1 and task_id=1 are OK here, because we are building just
# one RPM at the time.
msg = module_build_service.messaging.KojiBuildChange(
msg_id='a faked internal message',
msg_id="a faked internal message",
build_id=build_id,
task_id=build_id,
build_name=nvr["name"],
build_new_state=state,
build_release=nvr["release"],
build_version=nvr["version"]
build_version=nvr["version"],
)
module_build_service.scheduler.consumer.work_queue_put(msg)
@@ -411,7 +412,7 @@ class MockModuleBuilder(GenericBuilder):
os.remove(log_path)
# Remove other files containing useless information
elif logf.endswith('-srpm-stdout.log'):
elif logf.endswith("-srpm-stdout.log"):
with open(log_path) as f:
data = f.read(4096)
if re.match("Downloading [^\n]*\n\n\nWrote: [^\n]", data):
@@ -421,24 +422,27 @@ class MockModuleBuilder(GenericBuilder):
"""
Builds the artifact from the SRPM.
"""
state = koji.BUILD_STATES['BUILDING']
state = koji.BUILD_STATES["BUILDING"]
# Use the mock config associated with this thread.
mock_config = os.path.join(self.configdir,
"mock-%s.cfg" % str(threading.current_thread().name))
mock_config = os.path.join(
self.configdir, "mock-%s.cfg" % str(threading.current_thread().name))
# Open the logs to which we will forward mock stdout/stderr.
mock_stdout_log = open(os.path.join(self.resultsdir,
artifact_name + "-mock-stdout.log"), "w")
mock_stderr_log = open(os.path.join(self.resultsdir,
artifact_name + "-mock-stderr.log"), "w")
mock_stdout_log = open(
os.path.join(self.resultsdir, artifact_name + "-mock-stdout.log"), "w")
mock_stderr_log = open(
os.path.join(self.resultsdir, artifact_name + "-mock-stderr.log"), "w")
srpm = artifact_name
resultsdir = builder.resultsdir
try:
# Initialize mock.
execute_cmd(["mock", "-v", "-r", mock_config, "--init"],
stdout=mock_stdout_log, stderr=mock_stderr_log)
execute_cmd(
["mock", "-v", "-r", mock_config, "--init"],
stdout=mock_stdout_log,
stderr=mock_stderr_log,
)
# Start the build and store results to resultsdir
builder.build(mock_stdout_log, mock_stderr_log)
@@ -448,23 +452,21 @@ class MockModuleBuilder(GenericBuilder):
# are put in the scheduler's work queue and are handled
# by MBS after the build_srpm() method returns and scope gets
# back to scheduler.main.main() method.
state = koji.BUILD_STATES['COMPLETE']
state = koji.BUILD_STATES["COMPLETE"]
self._send_build_change(state, srpm, build_id)
with open(os.path.join(resultsdir, "status.log"), 'w') as f:
with open(os.path.join(resultsdir, "status.log"), "w") as f:
f.write("complete\n")
except Exception as e:
log.error("Error while building artifact %s: %s" % (artifact_name,
str(e)))
log.error("Error while building artifact %s: %s" % (artifact_name, str(e)))
# Emit messages simulating complete build. These messages
# are put in the scheduler's work queue and are handled
# by MBS after the build_srpm() method returns and scope gets
# back to scheduler.main.main() method.
state = koji.BUILD_STATES['FAILED']
self._send_build_change(state, srpm,
build_id)
with open(os.path.join(resultsdir, "status.log"), 'w') as f:
state = koji.BUILD_STATES["FAILED"]
self._send_build_change(state, srpm, build_id)
with open(os.path.join(resultsdir, "status.log"), "w") as f:
f.write("failed\n")
mock_stdout_log.close()
@@ -493,7 +495,7 @@ class MockModuleBuilder(GenericBuilder):
# already in repository ready to be used. This is not a case for Mock
# backend in the time we return here.
reason = "Building %s in Mock" % (artifact_name)
return build_id, koji.BUILD_STATES['BUILDING'], reason, None
return build_id, koji.BUILD_STATES["BUILDING"], reason, None
def build(self, artifact_name, source):
log.info("Starting building artifact %s: %s" % (artifact_name, source))
@@ -502,8 +504,8 @@ class MockModuleBuilder(GenericBuilder):
# generate the thread-specific mock config by writing it to fs again.
self._load_mock_config()
self._write_mock_config()
mock_config = os.path.join(self.configdir, "mock-%s.cfg"
% str(threading.current_thread().name))
mock_config = os.path.join(
self.configdir, "mock-%s.cfg" % str(threading.current_thread().name))
# Get the build-id in thread-safe manner.
build_id = None
@@ -513,15 +515,14 @@ class MockModuleBuilder(GenericBuilder):
# Clear resultsdir associated with this thread or in case it does not
# exist, create it.
resultsdir = os.path.join(self.resultsdir,
str(threading.current_thread().name))
resultsdir = os.path.join(self.resultsdir, str(threading.current_thread().name))
if os.path.exists(resultsdir):
for name in os.listdir(resultsdir):
os.remove(os.path.join(resultsdir, name))
else:
os.makedirs(resultsdir)
if source.endswith('.src.rpm'):
if source.endswith(".src.rpm"):
builder = SRPMBuilder(mock_config, resultsdir, source)
else:
# Otherwise, assume we're building from some scm repo
@@ -536,7 +537,7 @@ class MockModuleBuilder(GenericBuilder):
def cancel_build(self, task_id):
pass
def list_tasks_for_components(self, component_builds=None, state='active'):
def list_tasks_for_components(self, component_builds=None, state="active"):
pass
def repo_from_tag(cls, config, tag_name, arch):
@@ -557,8 +558,7 @@ class MockModuleBuilder(GenericBuilder):
"""
with models.make_session(conf) as db_session:
build = models.ModuleBuild.get_build_from_nsvc(
db_session, mmd.get_name(), mmd.get_stream(), mmd.get_version(),
mmd.get_context())
db_session, mmd.get_name(), mmd.get_stream(), mmd.get_version(), mmd.get_context())
if build.koji_tag.startswith("repofile://"):
# Modules from local repository have already the RPMs filled in mmd.
return list(mmd.get_rpm_artifacts().get())
@@ -573,9 +573,7 @@ class BaseBuilder(object):
def __init__(self, config, resultsdir):
self.config = config
self.resultsdir = resultsdir
self.cmd = ["mock", "-v", "-r", config,
"--no-clean",
"--resultdir=%s" % resultsdir]
self.cmd = ["mock", "-v", "-r", config, "--no-clean", "--resultdir=%s" % resultsdir]
def build(self, stdout, stderr):
execute_cmd(self.cmd, stdout=stdout, stderr=stderr)
@@ -602,24 +600,20 @@ class SCMBuilder(BaseBuilder):
# See https://bugzilla.redhat.com/show_bug.cgi?id=1459437 for
# more info. Once mock-scm supports this feature, we can remove
# this code.
distgit_get_branch = \
"sh -c {}'; git -C {} checkout {}'".format(pipes.quote(distgit_get),
artifact_name,
branch)
distgit_get_branch = "sh -c {}'; git -C {} checkout {}'".format(
pipes.quote(distgit_get), artifact_name, branch)
f.writelines([
"config_opts['scm'] = True\n",
"config_opts['scm_opts']['method'] = 'distgit'\n",
"config_opts['scm_opts']['package'] = '{}'\n".format(
artifact_name),
"config_opts['scm_opts']['distgit_get'] = {!r}\n".format(
distgit_get_branch),
"config_opts['scm_opts']['package'] = '{}'\n".format(artifact_name),
"config_opts['scm_opts']['distgit_get'] = {!r}\n".format(distgit_get_branch),
])
# Set distgit_src_get only if it's defined.
if distgit_cmds[1]:
f.write("config_opts['scm_opts']['distgit_src_get'] = '{}'\n".format(
distgit_cmds[1]))
f.write(
"config_opts['scm_opts']['distgit_src_get'] = '{}'\n".format(distgit_cmds[1]))
# The local git repositories cloned by `fedpkg clone` typically do not have
# the tarballs with sources committed in a git repo. They normally live in lookaside
@@ -633,7 +627,7 @@ class SCMBuilder(BaseBuilder):
def _make_executable(self, path):
mode = os.stat(path).st_mode
mode |= (mode & 0o444) >> 2 # copy R bits to X
mode |= (mode & 0o444) >> 2 # copy R bits to X
os.chmod(path, mode)
def _get_distgit_commands(self, source):
@@ -658,6 +652,6 @@ class SCMBuilder(BaseBuilder):
# let's return 0.0 so the type is consistent
return self.koji_session.getAverageBuildDuration(component.package) or 0.0
except Exception:
log.debug('The Koji call to getAverageBuildDuration failed. Is Koji properly '
'configured?')
log.debug(
"The Koji call to getAverageBuildDuration failed. Is Koji properly configured?")
return 0.0

View File

@@ -2,9 +2,7 @@ import pkg_resources
from module_build_service.builder.base import GenericBuilder
__all__ = [
GenericBuilder
]
__all__ = [GenericBuilder]
for entrypoint in pkg_resources.iter_entry_points('mbs.builder_backends'):
for entrypoint in pkg_resources.iter_entry_points("mbs.builder_backends"):
GenericBuilder.register_backend_class(entrypoint.load())

View File

@@ -91,9 +91,10 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
# We are skipping the caching based on the first two arguments of
# default_buildroot_groups, because they are "self" and db.session
# instance which are different each call we call that method.
default_buildroot_groups_cache = dogpile.cache.make_region(
function_key_generator=create_dogpile_key_generator_func(2)).configure(
'dogpile.cache.memory')
default_buildroot_groups_cache = (
dogpile.cache.make_region(function_key_generator=create_dogpile_key_generator_func(2))
.configure("dogpile.cache.memory")
)
@classmethod
def register_backend_class(cls, backend_class):
@@ -113,13 +114,14 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
# check if the backend is within allowed backends for the used resolver
resolver = module_build_service.resolver.system_resolver
if not resolver.is_builder_compatible(backend):
raise ValueError("Builder backend '{}' is not compatible with "
"resolver backend '{}'. Check your configuration."
.format(backend, resolver.backend))
raise ValueError(
"Builder backend '{}' is not compatible with resolver backend '{}'. Check your "
"configuration.".format(backend, resolver.backend)
)
if backend in GenericBuilder.backends:
return GenericBuilder.backends[backend](owner=owner, module=module,
config=config, **extra)
return GenericBuilder.backends[backend](
owner=owner, module=module, config=config, **extra)
else:
raise ValueError("Builder backend='%s' not recognized" % backend)
@@ -137,8 +139,13 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
"""
components = [c.package for c in module.component_builds]
builder = GenericBuilder.create(
module.owner, module, config.system, config, tag_name=module.koji_tag,
components=components)
module.owner,
module,
config.system,
config,
tag_name=module.koji_tag,
components=components,
)
if buildroot_connect is True:
groups = GenericBuilder.default_buildroot_groups(session, module)
builder.buildroot_connect(groups)
@@ -156,8 +163,7 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
the tag with particular name and architecture.
"""
if backend in GenericBuilder.backends:
return GenericBuilder.backends[backend].repo_from_tag(
config, tag_name, arch)
return GenericBuilder.backends[backend].repo_from_tag(config, tag_name, arch)
else:
raise ValueError("Builder backend='%s' not recognized" % backend)
@@ -310,23 +316,18 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
# Resolve default buildroot groups using the MBS, but only for
# non-local modules.
groups = resolver.resolve_profiles(
mmd, ('buildroot', 'srpm-buildroot'))
groups = {
'build': groups['buildroot'],
'srpm-build': groups['srpm-buildroot'],
}
groups = resolver.resolve_profiles(mmd, ("buildroot", "srpm-buildroot"))
groups = {"build": groups["buildroot"], "srpm-build": groups["srpm-buildroot"]}
except ValueError:
reason = "Failed to gather buildroot groups from SCM."
log.exception(reason)
module.transition(conf, state="failed", state_reason=reason, failure_type='user')
module.transition(conf, state="failed", state_reason=reason, failure_type="user")
session.commit()
raise
return groups
@abstractmethod
def list_tasks_for_components(self, component_builds=None, state='active'):
def list_tasks_for_components(self, component_builds=None, state="active"):
"""
:param component_builds: list of component builds which we want to check
:param state: limit the check only for tasks in the given state
@@ -416,13 +417,15 @@ class GenericBuilder(six.with_metaclass(ABCMeta)):
continue
if average_time_to_build < 0:
log.warning("Negative average build duration for component %s: %s",
component, str(average_time_to_build))
log.warning(
"Negative average build duration for component %s: %s",
component, str(average_time_to_build),
)
weights[component] = weight
continue
# Increase the task weight by 0.75 for every hour of build duration.
adj = (average_time_to_build / ((60 * 60) / 0.75))
adj = average_time_to_build / ((60 * 60) / 0.75)
# cap the adjustment at +4.5
weight += min(4.5, adj)

View File

@@ -58,10 +58,8 @@ def get_koji_config(mbs_config):
# Placed here to avoid py2/py3 conflicts...
import koji
koji_config = munch.Munch(koji.read_config(
profile_name=mbs_config.koji_profile,
user_config=mbs_config.koji_config,
))
koji_config = munch.Munch(
koji.read_config(profile_name=mbs_config.koji_profile, user_config=mbs_config.koji_config))
# Timeout after 10 minutes. The default is 12 hours.
koji_config["timeout"] = 60 * 10
return koji_config
@@ -93,7 +91,7 @@ def create_local_repo_from_koji_tag(config, tag, repo_dir, archs=None):
log.exception("Failed to list rpms in tag %r" % tag)
# Reformat builds so they are dict with build_id as a key.
builds = {build['build_id']: build for build in builds}
builds = {build["build_id"]: build for build in builds}
# Prepare pathinfo we will use to generate the URL.
pathinfo = koji.PathInfo(topdir=session.opts["topurl"])
@@ -104,26 +102,25 @@ def create_local_repo_from_koji_tag(config, tag, repo_dir, archs=None):
# Prepare the list of URLs to download
download_args = []
for rpm in rpms:
build_info = builds[rpm['build_id']]
build_info = builds[rpm["build_id"]]
# We do not download debuginfo packages or packages built for archs
# we are not interested in.
if koji.is_debuginfo(rpm['name']) or not rpm['arch'] in archs:
if koji.is_debuginfo(rpm["name"]) or not rpm["arch"] in archs:
continue
fname = pathinfo.rpm(rpm)
relpath = os.path.basename(fname)
local_fn = os.path.join(repo_dir, relpath)
# Download only when the RPM is not downloaded or the size does not match.
if not os.path.exists(local_fn) or os.path.getsize(local_fn) != rpm['size']:
if not os.path.exists(local_fn) or os.path.getsize(local_fn) != rpm["size"]:
if os.path.exists(local_fn):
os.remove(local_fn)
repo_changed = True
url = pathinfo.build(build_info) + '/' + fname
url = pathinfo.build(build_info) + "/" + fname
download_args.append((url, local_fn))
log.info(
"Downloading %d packages from Koji tag %s to %s" % (len(download_args), tag, repo_dir))
log.info("Downloading %d packages from Koji tag %s to %s" % (len(download_args), tag, repo_dir))
# Create the output directory
try:
@@ -162,4 +159,4 @@ def create_local_repo_from_koji_tag(config, tag, repo_dir, archs=None):
shutil.rmtree(repodata_path)
log.info("Creating local repository in %s" % repo_dir)
execute_cmd(['/usr/bin/createrepo_c', repo_dir])
execute_cmd(["/usr/bin/createrepo_c", repo_dir])