C3I role integration with MBS3

Use unified role for C3I workflows - https://pagure.io/c3i-library/blob/master/f/roles/c3i
This commit is contained in:
Michal Kovarik
2020-03-03 12:09:48 +01:00
committed by mkovarik
parent 4195f06ec9
commit c80a361717
41 changed files with 301 additions and 1755 deletions

View File

@@ -1,3 +1,4 @@
.git/
.env/
.pytest_cache/
.tox/

View File

@@ -20,7 +20,7 @@ RUN rpmdev-setuptree && \
rpmbuild --define "_sourcedir $PWD/dist" -ba *.spec && \
mv $HOME/rpmbuild/RPMS /srv
RUN flake8 && \
bandit -r -ll -s B102,B303,B411,B602 module_build_service && \
bandit -r -ll -s B102,B104,B303,B411,B602 module_build_service && \
tox -v -e py3

View File

@@ -1,55 +0,0 @@
OC:=oc
OCFLAGS:=
JOBS_DIR:=jobs
TEMPLATES_DIR:=templates
JOB_PARAM_FILES:=$(wildcard $(JOBS_DIR)/*.env)
JOBS:=$(patsubst $(JOBS_DIR)/%.env,%,$(JOB_PARAM_FILES))
OC_CMD=$(OC) $(OCFLAGS)
help:
@echo TARGETS
@echo -e "\tinstall\t\tInstall or update pipelines to OpenShift"
@echo -e "\tuninstall\tDelete installed pipelines from OpenShift"
@echo
@echo VARIABLES
@echo -e "\tJOBS\t\tSpace seperated list of pipeline jobs to install"
@echo -e "\tJOBS_DIR\tLooking for pipeline job definitions in an alternate directory."
@echo -e "\tTEMPLATES_DIR\tLooking for pipeline job templates in an alternate directory."
@echo -e "\tOC\t\tUse this oc command"
@echo -e "\tOCFLAGS\t\tOptions to append to the oc command arguments"
install:
@$(OC_CMD) project
@for job in $(JOBS); do \
echo "[PIPELINE] Updating pipeline job \"$${job}\"..." ; \
template_file=$$(cat ./$(JOBS_DIR)/$${job}.tmpl); \
jinja2 ./$(TEMPLATES_DIR)/$${template_file} | $(OC_CMD) process --local -f - \
--param-file ./$(JOBS_DIR)/$${job}.env | $(OC_CMD) apply -f -; \
echo "[PIPELINE] Pipeline job \"$${job}\" updated" ; \
done
uninstall:
@$(OC_CMD) project
@for job in $(JOBS); do \
template_file=$$(cat ./$(JOBS_DIR)/$${job}.tmpl); \
template_name=$${template_file%.y?ml}; \
template_name=$${template_name%-template}; \
echo "[PIPELINE] Deleting pipeline job \"$${job}\"..." ; \
$(OC_CMD) delete all -l template="$$template_name" -l app="$$job" ;\
echo "[PIPELINE] Pipeline job \"$${job}\" deleted" ; \
done
create-jenkins-is:
$(OC_CMD) import-image jenkins:2 --confirm --scheduled=true \
--from=registry.access.redhat.com/openshift3/jenkins-2-rhel7:v3.11
install-jenkins: create-jenkins-is
$(OC_CMD) new-app --template=jenkins-persistent \
-p MEMORY_LIMIT=2Gi \
-p VOLUME_CAPACITY=10Gi \
-p NAMESPACE=$(shell $(OC_CMD) project -q) \
-e INSTALL_PLUGINS=script-security:1.46,permissive-script-security:0.3,timestamper:1.9,http_request:1.8.22,ownership:0.12.1,antisamy-markup-formatter:1.5,update-sites-manager:2.0.0 \
-e JENKINS_JAVA_OVERRIDES="-Dpermissive-script-security.enabled=no_security"
update-pagure-api-key:
[ -n "$(KEY)" ] # You must specify KEY=<key value>
$(OC_CMD) delete secret pagure-api-key --ignore-not-found=true
$(OC_CMD) create secret generic pagure-api-key --from-literal=secrettext=$(KEY)
$(OC_CMD) label secret pagure-api-key credential.sync.jenkins.openshift.io=true
.PHONY: help install uninstall create-jenkins-is install-jenkins update-api-key

View File

@@ -0,0 +1,39 @@
---
c3i_component: mbs
c3i_build_and_test_snippet: snippets/mbs-build-and-test.groovy
c3i_integration_test_snippet: snippets/mbs-integration-test.groovy
c3i_images_for_promotion:
- mbs-frontend
- mbs-backend
c3i_git_repo: https://pagure.io/fm-orchestrator.git
c3i_jenkins_agent_buildconfig_contextdir: openshift/integration/koji/containers/jenkins-slave
c3i_jenkins_agent_buildconfig_dockerfile: Dockerfile
c3i_pagure_api_key_secret: pagure-api-key
c3i_build_custom_parameters:
- name: PIPELINE_AS_A_SERVICE_BUILD_NAMESPACE
value: c3i
- name: MBS_BACKEND_IMAGESTREAM_NAME
value: mbs-backend
- name: MBS_FRONTEND_IMAGESTREAM_NAME
value: mbs-frontend
- name: MBS_SPEC_FILE
value: https://src.fedoraproject.org/rpms/module-build-service/raw/master/f/module-build-service.spec
- name: EXTRA_REPOS
value: https://copr.fedorainfracloud.org/coprs/mikeb/mbs-messaging-umb/repo/fedora-31/mikeb-mbs-messaging-umb-fedora-31.repo
- name: EXTRA_RPMS
value: mbs-messaging-umb
c3i_quay_namespace: factory2
c3i_integration_test_custom_parameters:
- name: MBS_BACKEND_REPO
value: "{{ c3i_quay_address }}/{{ c3i_quay_namespace }}/mbs-backend"
c3i_lib_branch: master
c3i_lib_url: https://pagure.io/c3i-library.git
c3i_tracked_container_repo: "{{ c3i_quay_address }}/{{ c3i_quay_namespace }}/mbs-frontend"
c3i_definition_dir: openshift/integration/koji/pipelines

View File

@@ -0,0 +1,15 @@
- name: Deplomyent playbook
hosts: localhost
vars_files:
- c3i-role-vars.yml
tasks:
- git:
repo: "{{ c3i_lib_url }}"
dest: c3i-library
version: "{{ c3i_lib_branch }}"
- file:
src: c3i-library/roles
dest: roles
state: link
- include_role:
name: c3i

View File

@@ -1,9 +0,0 @@
NAME=mbs-backend-greenwave-promote-to-prod
DECISION_CONTEXT_REGEX=c3i_promote_stage_to_prod
SUBJECT_IDENTIFIER_REGEX=^factory2/mbs-backend@sha256:
SOURCE_CONTAINER_REPO=quay.io/factory2/mbs-backend
TARGET_TAG=prod
MESSAGING_TOPIC=Consumer.rh-jenkins-ci-plugin.c3i-mbs-backend-greenwave-promote-to-prod.VirtualTopic.eng.greenwave.decision.update
PROMOTING_DESTINATIONS=quay.io/factory2/mbs-backend
TAG_INTO_IMAGESTREAM=true
DEST_IMAGESTREAM_NAME=mbs-backend

View File

@@ -1 +0,0 @@
mbs-greenwave-trigger.yaml

View File

@@ -1,9 +0,0 @@
NAME=mbs-backend-greenwave-promote-to-stage
DECISION_CONTEXT_REGEX=c3i_promote_dev_to_stage
SUBJECT_IDENTIFIER_REGEX=^factory2/mbs-backend@sha256:
SOURCE_CONTAINER_REPO=quay.io/factory2/mbs-backend
TARGET_TAG=stage
MESSAGING_TOPIC=Consumer.rh-jenkins-ci-plugin.c3i-mbs-backend-greenwave-promote-to-stage.VirtualTopic.eng.greenwave.decision.update
PROMOTING_DESTINATIONS=quay.io/factory2/mbs-backend
TAG_INTO_IMAGESTREAM=true
DEST_IMAGESTREAM_NAME=mbs-backend

View File

@@ -1,9 +0,0 @@
NAME=mbs-frontend-greenwave-promote-to-prod
DECISION_CONTEXT_REGEX=c3i_promote_stage_to_prod
SUBJECT_IDENTIFIER_REGEX=^factory2/mbs-frontend@sha256:
SOURCE_CONTAINER_REPO=quay.io/factory2/mbs-frontend
TARGET_TAG=prod
MESSAGING_TOPIC=Consumer.rh-jenkins-ci-plugin.c3i-mbs-frontend-greenwave-promote-to-prod.VirtualTopic.eng.greenwave.decision.update
PROMOTING_DESTINATIONS=quay.io/factory2/mbs-frontend
TAG_INTO_IMAGESTREAM=true
DEST_IMAGESTREAM_NAME=mbs-frontend

View File

@@ -1,9 +0,0 @@
NAME=mbs-frontend-greenwave-promote-to-stage
DECISION_CONTEXT_REGEX=c3i_promote_dev_to_stage
SUBJECT_IDENTIFIER_REGEX=^factory2/mbs-frontend@sha256:
SOURCE_CONTAINER_REPO=quay.io/factory2/mbs-frontend
TARGET_TAG=stage
MESSAGING_TOPIC=Consumer.rh-jenkins-ci-plugin.c3i-mbs-frontend-greenwave-promote-to-stage.VirtualTopic.eng.greenwave.decision.update
PROMOTING_DESTINATIONS=quay.io/factory2/mbs-frontend
TAG_INTO_IMAGESTREAM=true
DEST_IMAGESTREAM_NAME=mbs-frontend

View File

@@ -1 +0,0 @@
NAME=mbs-polling-for-master

View File

@@ -1 +0,0 @@
mbs-polling-pagure.yaml

View File

@@ -1,2 +0,0 @@
NAME=mbs-polling-for-prs
PAGURE_POLLING_FOR_PR=true

View File

@@ -1 +0,0 @@
mbs-polling-pagure.yaml

View File

@@ -1,4 +0,0 @@
NAME=mbs-postmerge
EXTRA_REPOS=https://copr.fedorainfracloud.org/coprs/mikeb/mbs-messaging-umb/repo/fedora-31/mikeb-mbs-messaging-umb-fedora-31.repo
EXTRA_RPMS=mbs-messaging-umb
MAIL_ADDRESS=pnt-factory2-alerts@redhat.com

View File

@@ -1 +0,0 @@
mbs-build-template.yaml

View File

@@ -1,4 +0,0 @@
NAME=mbs-premerge
EXTRA_REPOS=https://copr.fedorainfracloud.org/coprs/mikeb/mbs-messaging-umb/repo/fedora-31/mikeb-mbs-messaging-umb-fedora-31.repo
EXTRA_RPMS=mbs-messaging-umb
MAIL_ADDRESS=pnt-factory2-alerts@redhat.com

View File

@@ -1 +0,0 @@
mbs-build-template.yaml

View File

@@ -1,4 +0,0 @@
NAME=mbs-trigger-on-latest-tag
MESSAGING_TOPIC=Consumer.rh-jenkins-ci-plugin.c3i-mbs-trigger-on-latest-tag.VirtualTopic.eng.repotracker.container.tag.>
ENVIRONMENT=stage
TRACKED_TAG=latest

View File

@@ -1 +0,0 @@
mbs-repotracker-trigger.yaml

View File

@@ -1,4 +0,0 @@
NAME=mbs-trigger-on-stage-tag
MESSAGING_TOPIC=Consumer.rh-jenkins-ci-plugin.c3i-mbs-trigger-on-stage-tag.VirtualTopic.eng.repotracker.container.tag.>
TRACKED_TAG=stage
ENVIRONMENT=prod

View File

@@ -1 +0,0 @@
mbs-repotracker-trigger.yaml

View File

@@ -0,0 +1,138 @@
stage('Prepare repo and env') {
steps {
script {
// Generate a version-release number for the target Git commit
def version = sh(script: """grep -m 1 -P -o '(?<=version=")[^"]+' setup.py""", returnStdout: true).trim()
def build_suffix = ".jenkins${currentBuild.id}.git${env.GIT_COMMIT.take(7)}"
env.RESULTING_TAG = "${version}${build_suffix}"
def resp = httpRequest params.MBS_SPEC_FILE
def spec_file_name = params.MBS_SPEC_FILE.split("/").last()
writeFile file: spec_file_name, text: resp.content
env.ENVIRONMENT = 'dev'
// Add celery dependency and removy config.py - should be removed after spec is updated to v3
sh """
sed -i \
-e 's/Version:.*/Version: ${version}/' \
-e 's/%{?dist}/${build_suffix}%{?dist}/' \
-e 's|\\(^BuildRequires: python3-dnf\\)|\\1\\nBuildRequires: python3-celery|' \
-e 's|\\(^Requires: python3-dnf\\)|\\1\\nRequires: python3-celery|' \
-e '/%config(noreplace) %{_sysconfdir}\\/module-build-service\\/config.py/d' \
${spec_file_name}
"""
sh 'mkdir repos'
params.EXTRA_REPOS.split().each {
resp = httpRequest it
writeFile file: "repos/${it.split("/").last()}", text: resp.content
}
sh """
sed -i \
-e '/enum34/d' \
-e '/funcsigs/d' \
-e '/futures/d' \
-e '/koji/d' \
requirements.txt
"""
sh """
sed -i \
-e 's/py.test/py.test-3/g' \
-e '/basepython/d' \
-e '/sitepackages/a setenv = PYTHONPATH={toxinidir}' \
tox.ini
"""
}
}
}
stage('Build backend image') {
environment {
BACKEND_BUILDCONFIG_ID = "mbs-backend-build-${currentBuild.id}-${UUID.randomUUID().toString().take(7)}"
}
steps {
script {
openshift.withCluster() {
openshift.withProject(env.PIPELINE_ID) {
// OpenShift BuildConfig doesn't support specifying a tag name at build time.
// We have to create a new BuildConfig for each image build.
echo 'Creating a BuildConfig for mbs-backend build...'
def created = new Date().format("yyyy-MM-dd'T'HH:mm:ss'Z'", TimeZone.getTimeZone('UTC'))
def template = readYaml file: 'openshift/backend/mbs-backend-build-template.yaml'
def processed = openshift.process(template,
'-p', "NAME=${env.BACKEND_BUILDCONFIG_ID}",
'-p', "MBS_GIT_REPO=${params.GIT_REPO}",
// A pull-request branch, like pull/123/head, cannot be built with commit ID
// because refspec cannot be customized in an OpenShift build.
'-p', "MBS_GIT_REF=${env.PR_NO ? env.GIT_REPO_REF : env.GIT_COMMIT}",
'-p', "MBS_BACKEND_IMAGESTREAM_NAME=${params.MBS_BACKEND_IMAGESTREAM_NAME}",
'-p', "MBS_BACKEND_IMAGESTREAM_NAMESPACE=${env.PIPELINE_ID}",
'-p', "MBS_IMAGE_TAG=${env.RESULTING_TAG}",
'-p', "EXTRA_RPMS=${params.EXTRA_RPMS}",
'-p', "CREATED=${created}"
)
def build = c3i.buildAndWait(script: this, objs: processed, '--from-dir=.')
def ocpBuild = build.object()
env.BACKEND_IMAGE_DIGEST = ocpBuild.status.output.to.imageDigest
def ref = ocpBuild.status.outputDockerImageReference
def repo = ref.tokenize(':')[0..-2].join(':')
env.BACKEND_IMAGE_REPO = repo
env.BACKEND_IMAGE_REF = repo + '@' + env.BACKEND_IMAGE_DIGEST
echo "Built image ${env.BACKEND_IMAGE_REF}, digest: ${env.BACKEND_IMAGE_DIGEST}, tag: ${env.RESULTING_TAG}"
}
}
}
}
post {
failure {
echo "Failed to build mbs-backend image ${env.RESULTING_TAG}."
}
}
}
stage('Build frontend image') {
environment {
FRONTEND_BUILDCONFIG_ID = "mbs-frontend-build-${currentBuild.id}-${UUID.randomUUID().toString().take(7)}"
}
steps {
script {
openshift.withCluster() {
openshift.withProject(env.PIPELINE_ID) {
// OpenShift BuildConfig doesn't support specifying a tag name at build time.
// We have to create a new BuildConfig for each image build.
echo 'Creating a BuildConfig for mbs-frontend build...'
def created = new Date().format("yyyy-MM-dd'T'HH:mm:ss'Z'", TimeZone.getTimeZone('UTC'))
def template = readYaml file: 'openshift/frontend/mbs-frontend-build-template.yaml'
def processed = openshift.process(template,
'-p', "NAME=${env.FRONTEND_BUILDCONFIG_ID}",
'-p', "MBS_GIT_REPO=${params.GIT_REPO}",
// A pull-request branch, like pull/123/head, cannot be built with commit ID
// because refspec cannot be customized in an OpenShift build.
'-p', "MBS_GIT_REF=${env.PR_NO ? env.GIT_REPO_REF : env.GIT_COMMIT}",
'-p', "MBS_FRONTEND_IMAGESTREAM_NAME=${params.MBS_FRONTEND_IMAGESTREAM_NAME}",
'-p', "MBS_FRONTEND_IMAGESTREAM_NAMESPACE=${env.PIPELINE_ID}",
'-p', "MBS_IMAGE_TAG=${env.RESULTING_TAG}",
'-p', "MBS_BACKEND_IMAGESTREAM_NAME=${params.MBS_BACKEND_IMAGESTREAM_NAME}",
'-p', "MBS_BACKEND_IMAGESTREAM_NAMESPACE=${env.PIPELINE_ID}",
'-p', "CREATED=${created}"
)
def build = c3i.buildAndWait(script: this, objs: processed, '--from-dir=.')
def ocpBuild = build.object()
env.FRONTEND_IMAGE_DIGEST = ocpBuild.status.output.to.imageDigest
def ref = ocpBuild.status.outputDockerImageReference
def repo = ref.tokenize(':')[0..-2].join(':')
env.FRONTEND_IMAGE_REPO = repo
env.FRONTEND_IMAGE_REF = repo + '@' + env.FRONTEND_IMAGE_DIGEST
env.FRONTEND_IMAGE_TAG = env.RESULTING_TAG
env.RESULTING_IMAGE_REPOS = "${env.BACKEND_IMAGE_REPO},${env.FRONTEND_IMAGE_REPO}"
echo "Built image ${env.FRONTEND_IMAGE_REF}, digest: ${env.FRONTEND_IMAGE_DIGEST}, tag: ${env.FRONTEND_IMAGE_TAG}"
env.REUSE_PROJECT = "true"
}
}
}
}
post {
failure {
echo "Failed to build mbs-frontend image ${env.RESULTING_TAG}."
}
}
}
{% include "mbs-integration-test.groovy" %}

View File

@@ -0,0 +1,107 @@
stage('Get image refs') {
when {
expression { !env.FRONTEND_IMAGE_REF }
}
steps {
script {
env.FRONTEND_IMAGE_REF = env.IMAGE
if (params.CONTAINER_REGISTRY_CREDENTIALS) {
dir ("${env.HOME}/.docker") {
openshift.withCluster() {
def dockerconf = openshift.selector('secret', params.CONTAINER_REGISTRY_CREDENTIALS).object().data['.dockerconfigjson']
writeFile file: 'config.json', text: dockerconf, encoding: "Base64"
}
}
def output = sh(script: "skopeo inspect docker://${params.MBS_BACKEND_REPO}:${params.TRACKED_TAG}", returnStdout: true).trim()
def backendData = readJSON text: output
env.BACKEND_IMAGE_REF = "${params.MBS_BACKEND_REPO}@${backendData.Digest}"
}
}
}
}
stage('Run integration tests') {
stages {
stage('Deploy test environment') {
steps {
script {
if (!env.PIPELINE_ID) {
env.PIPELINE_ID = "c3i-mbs-${UUID.randomUUID().toString().take(8)}"
}
openshift.withCluster() {
openshift.withProject(params.PIPELINE_AS_A_SERVICE_BUILD_NAMESPACE) {
def services = 'umb mbs-frontend mbs-backend krb5 ldap koji-hub'
if (env.REUSE_PROJECT == "true") {
c3i.buildAndWait(script: this, objs: "bc/pipeline-as-a-service",
'-e', "DEFAULT_IMAGE_TAG=${env.ENVIRONMENT}",
'-e', "PIPELINE_ID=${env.PIPELINE_ID}",
'-e', "TRIGGERED_BY=${env.BUILD_URL}",
'-e', "SERVICES_TO_DEPLOY='${services}'",
'-e', "MBS_BACKEND_IMAGE=${env.BACKEND_IMAGE_REF}",
'-e', "MBS_FRONTEND_IMAGE=${env.FRONTEND_IMAGE_REF}",
'-e', "PAAS_DOMAIN=${env.PAAS_DOMAIN}",
'-e', 'C3IAAS_PROJECT=""'
)
}
else {
c3i.buildAndWait(script: this, objs: "bc/pipeline-as-a-service",
'-e', "DEFAULT_IMAGE_TAG=${env.ENVIRONMENT}",
'-e', "PIPELINE_ID=${env.PIPELINE_ID}",
'-e', "TRIGGERED_BY=${env.BUILD_URL}",
'-e', "SERVICES_TO_DEPLOY='${services}'",
'-e', "MBS_BACKEND_IMAGE=${env.BACKEND_IMAGE_REF}",
'-e', "MBS_FRONTEND_IMAGE=${env.FRONTEND_IMAGE_REF}",
'-e', "PAAS_DOMAIN=${env.PAAS_DOMAIN}",
)
}
}
}
}
}
}
stage('Run tests') {
steps {
script {
checkout([$class: 'GitSCM',
branches: [[name: env.GIT_REPO_REF]],
userRemoteConfigs: [[url: params.GIT_REPO, refspec: '+refs/heads/*:refs/remotes/origin/* +refs/pull/*/head:refs/remotes/origin/pull/*/head']],
])
sh "openshift/integration/koji/pipelines/tests/runtests ${env.PIPELINE_ID}"
}
}
post {
success {
echo "All tests successful"
}
failure {
echo "Testcases FAILED"
}
}
}
}
post {
failure {
script {
openshift.withCluster() {
openshift.withProject(env.PIPELINE_ID) {
echo 'Getting logs from all deployments...'
openshift.selector('pods', ['c3i.redhat.com/pipeline': env.PIPELINE_ID]).logs('--tail 100')
}
}
}
}
always {
script {
pd = controller.getVars()
[[pd.MBS_BACKEND_IMAGE, pd.MBS_BACKEND_IMAGE_DIGEST], [pd.MBS_FRONTEND_IMAGE, pd.MBS_FRONTEND_IMAGE_DIGEST]].each {
c3i.sendResultToMessageBus(
imageRef: it[0],
digest: it[1],
environment: env.ENVIRONMENT,
docs: 'https://pagure.io/fm-orchestrator/blob/master/f/openshift/integration/koji',
scratch: env.IMAGE_IS_SCRATCH
)
}
}
}
}
}

View File

@@ -1,297 +0,0 @@
# Template to produce a new MBS build job in OpenShift.
#
# MBS build job is a part of the MBS C3I pipeline, covering the following steps:
#
# - Run Flake8 and Bandit checks
# - Run unit tests
# - Build SRPM
# - Build RPM
# - Invoke Rpmlint
# - Build container
# - Run integration tests against the latest Koji images
# - Push container
#
# Required Jenkins Plugins:
# - Openshift Sync plugin
# - Openshift Client plugin
# - Kubernetes plugin
# - SSH Agent plugin
# - Timestamper plugin
# - HTTP Request plugin
# - Red Hat CI Plugin
#
---
apiVersion: v1
kind: Template
metadata:
name: mbs-build-pipeline
parameters:
- name: NAME
displayName: Short unique identifier for the templated instances
description: This field is used to deploy multiple pipelines to one OpenShift project from this template.
required: true
value: mbs-build
- name: MBS_GIT_REPO
displayName: MBS Git repo URL
description: Default MBS Git repo URL in which to run dev tests against
required: true
value: https://pagure.io/fm-orchestrator.git
- name: MBS_GIT_REF
displayName: MBS Git repo ref
description: Default MBS Git repo ref in which to run dev tests against
required: true
value: master
- name: MBS_MAIN_BRANCH
displayName: Name of the main branch.
description: If MBS_MAIN_BRANCH equals MBS_GIT_REF, this is a post-merge build, otherwise it's a pre-merge build.
value: master
required: true
- name: JENKINS_AGENT_CLOUD_NAME
displayName: Name of OpenShift cloud in Jenkins master configuration
required: true
value: openshift
- name: JENKINS_AGENT_IMAGE
displayName: Container image for Jenkins slave pods
required: true
value: quay.io/factory2/mbs-jenkins-slave:latest
- name: JENKINS_AGENT_CA_URLS
displayName: Space-separated list of URLs to CA certificates to install in the agent image
required: false
value: ""
- name: MBS_BACKEND_DEV_IMAGE_DESTINATIONS
displayName: Comma seperated list of container repositories (without tag) to which the built MBS backend dev image will be pushed
description: OpenShift registries must be prefixed with 'atomic:'
required: false
value: "quay.io/factory2/mbs-backend"
- name: MBS_FRONTEND_DEV_IMAGE_DESTINATIONS
displayName: Comma seperated list of container repositories (without tag) to which the built MBS frontend dev image will be pushed
description: OpenShift registries must be prefixed with 'atomic:'
required: false
value: "quay.io/factory2/mbs-frontend"
- name: CONTAINER_REGISTRY_CREDENTIALS
displayName: Secret name of container registries used for pulling and pushing images
value: factory2-pipeline-registry-credentials
required: false
- name: MBS_DEV_IMAGE_TAG
displayName: Tag name of the resulting container image for development environment
value: "latest"
required: true
- name: MBS_BACKEND_IMAGESTREAM_NAME
displayName: Name of ImageStream for MBS backend images
required: true
value: mbs-backend
- name: MBS_BACKEND_IMAGESTREAM_NAMESPACE
displayName: Namespace of ImageStream for MBS backend images
required: false
- name: MBS_FRONTEND_IMAGESTREAM_NAME
displayName: Name of ImageStream for MBS frontend images
required: true
value: mbs-frontend
- name: MBS_FRONTEND_IMAGESTREAM_NAMESPACE
displayName: Namespace of ImageStream for MBS frontend images
required: false
- name: FORCE_PUBLISH_IMAGE
displayName: Whether to push the resulting image regardless of the Git branch
value: "false"
required: true
- name: TAG_INTO_IMAGESTREAM
displayName: Whether to tag the pushed image as dev
value: "true"
required: true
- name: PAGURE_URL
displayName: Pagure URL
value: "https://pagure.io"
- name: PAGURE_REPO_NAME
value: fm-orchestrator
- name: PAGURE_REPO_IS_FORK
value: "false"
- name: PAGURE_API_KEY_SECRET_NAME
displayName: Name of Pagure API key secret for updating Pagure pull-request statuses
value: "pagure-api-key"
- name: MAIL_ADDRESS
displayName: If set, build failure messages to this mail address.
- name: MBS_SPEC_FILE
displayName: Repo to the rpm specfile for the module-build-service
required: true
value: "https://src.fedoraproject.org/rpms/module-build-service/raw/master/f/module-build-service.spec"
- name: EXTRA_REPOS
displayName: Space-separated list of URLs to .repo files to install in the images
required: false
value: ""
- name: EXTRA_RPMS
displayName: Space-separated list of rpm names to install in the images
required: false
value: ""
- name: TESTCASES
displayName: >-
Space-separated list of testcases to run as part of the pipeline. An empty string (the default)
causes all available testcases to run. The value "skip" causes no testcases to be run.
required: false
value: ""
- name: USE_C3IAAS
displayName: >-
Use C3I-as-a-Service to dynamically allocate a temporary OpenShift project for building
and testing.
required: true
value: "true"
- name: C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAMESPACE
displayName: The namespace where the C3I-as-a-Service project request BuildConfig has been defined
required: false
value: c3i
- name: C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAME
displayName: The name of the C3I-as-a-Service project request BuildConfig
required: false
value: c3iaas-request-project
- name: C3IAAS_LIFETIME
displayName: The lifetime of the OpenShift project allocated by C3I-as-a-Service.
required: true
value: "120"
- name: PIPELINE_AS_A_SERVICE_BUILD_NAMESPACE
displayName: The namespace where the Pipeline-as-a-Service project request BuildConfig has been defined
required: false
value: c3i
- name: CLEANUP
displayName: Cleanup objects after the pipeline is complete
required: true
value: "true"
- name: ENVIRONMENT
displayName: environment name (dev/stage/prod)
required: true
value: dev
{% include "snippets/c3i-library-parameters.yaml" %}
labels:
template: mbs-build
objects:
- kind: "BuildConfig"
apiVersion: "v1"
metadata:
name: "${NAME}-jenkins-slave"
labels:
app: "${NAME}"
spec:
runPolicy: "Serial"
completionDeadlineSeconds: 1800
strategy:
dockerStrategy:
forcePull: true
dockerfilePath: Dockerfile
buildArgs:
- name: CA_URLS
value: "${JENKINS_AGENT_CA_URLS}"
resources:
requests:
memory: 512Mi
cpu: 300m
limits:
memory: 768Mi
cpu: 500m
source:
contextDir: openshift/integration/koji/containers/jenkins-slave
git:
uri: "${MBS_GIT_REPO}"
ref: "${MBS_GIT_REF}"
output:
to:
kind: DockerImage
name: "${JENKINS_AGENT_IMAGE}"
pushSecret:
name: "${CONTAINER_REGISTRY_CREDENTIALS}"
- kind: ServiceAccount
apiVersion: v1
metadata:
name: "${NAME}-jenkins-slave"
labels:
app: "${NAME}"
- kind: RoleBinding
apiVersion: v1
metadata:
name: "${NAME}-jenkins-slave_edit"
labels:
app: "${NAME}"
subjects:
- kind: ServiceAccount
name: "${NAME}-jenkins-slave"
roleRef:
name: edit
- kind: "BuildConfig"
apiVersion: "v1"
metadata:
name: "${NAME}"
labels:
app: "${NAME}"
spec:
runPolicy: "Parallel"
completionDeadlineSeconds: 1800
strategy:
type: JenkinsPipeline
jenkinsPipelineStrategy:
env:
- name: MBS_GIT_REPO
value: "${MBS_GIT_REPO}"
- name: MBS_GIT_REF
value: "${MBS_GIT_REF}"
- name: JENKINS_AGENT_CLOUD_NAME
value: "${JENKINS_AGENT_CLOUD_NAME}"
- name: JENKINS_AGENT_IMAGE
value: "${JENKINS_AGENT_IMAGE}"
- name: JENKINS_AGENT_SERVICE_ACCOUNT
value: "${NAME}-jenkins-slave"
- name: MBS_BACKEND_DEV_IMAGE_DESTINATIONS
value: "${MBS_BACKEND_DEV_IMAGE_DESTINATIONS}"
- name: MBS_FRONTEND_DEV_IMAGE_DESTINATIONS
value: "${MBS_FRONTEND_DEV_IMAGE_DESTINATIONS}"
- name: CONTAINER_REGISTRY_CREDENTIALS
value: "${CONTAINER_REGISTRY_CREDENTIALS}"
- name: FORCE_PUBLISH_IMAGE
value: "${FORCE_PUBLISH_IMAGE}"
- name: TAG_INTO_IMAGESTREAM
value: "${TAG_INTO_IMAGESTREAM}"
- name: MBS_DEV_IMAGE_TAG
value: "${MBS_DEV_IMAGE_TAG}"
- name: MBS_BACKEND_IMAGESTREAM_NAME
value: "${MBS_BACKEND_IMAGESTREAM_NAME}"
- name: MBS_BACKEND_IMAGESTREAM_NAMESPACE
value: "${MBS_BACKEND_IMAGESTREAM_NAMESPACE}"
- name: MBS_FRONTEND_IMAGESTREAM_NAME
value: "${MBS_FRONTEND_IMAGESTREAM_NAME}"
- name: MBS_FRONTEND_IMAGESTREAM_NAMESPACE
value: "${MBS_FRONTEND_IMAGESTREAM_NAMESPACE}"
- name: MBS_MAIN_BRANCH
value: "${MBS_MAIN_BRANCH}"
- name: PAGURE_REPO_NAME
value: "${PAGURE_REPO_NAME}"
- name: PAGURE_REPO_IS_FORK
value: "${PAGURE_REPO_IS_FORK}"
- name: PAGURE_URL
value: "${PAGURE_URL}"
- name: PAGURE_API_KEY_SECRET_NAME
value: "${PAGURE_API_KEY_SECRET_NAME}"
- name: MAIL_ADDRESS
value: "${MAIL_ADDRESS}"
- name: MBS_SPEC_FILE
value: "${MBS_SPEC_FILE}"
- name: EXTRA_REPOS
value: "${EXTRA_REPOS}"
- name: EXTRA_RPMS
value: "${EXTRA_RPMS}"
- name: TESTCASES
value: "${TESTCASES}"
- name: ENVIRONMENT
value: "${ENVIRONMENT}"
- name: USE_C3IAAS
value: "${USE_C3IAAS}"
- name: C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAMESPACE
value: "${C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAMESPACE}"
- name: C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAME
value: "${C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAME}"
- name: C3IAAS_LIFETIME
value: "${C3IAAS_LIFETIME}"
- name: PIPELINE_AS_A_SERVICE_BUILD_NAMESPACE
value: "${PIPELINE_AS_A_SERVICE_BUILD_NAMESPACE}"
- name: CLEANUP
value: "${CLEANUP}"
jenkinsfile: |
{% filter indent(width=10) %}{% include "mbs-build.Jenkinsfile" %}{% endfilter %}

View File

@@ -1,485 +0,0 @@
{% include "snippets/c3i-library.groovy" %}
import static org.apache.commons.lang.StringEscapeUtils.escapeHtml;
pipeline {
{% include "snippets/default-agent.groovy" %}
options {
timestamps()
timeout(time: 120, unit: 'MINUTES')
buildDiscarder(logRotator(numToKeepStr: '10'))
skipDefaultCheckout()
}
environment {
TRIGGER_NAMESPACE = readFile("/run/secrets/kubernetes.io/serviceaccount/namespace").trim()
PAGURE_API = "${params.PAGURE_URL}/api/0"
PAGURE_REPO_IS_FORK = "${params.PAGURE_REPO_IS_FORK}"
PAGURE_REPO_HOME = "${env.PAGURE_URL}${env.PAGURE_REPO_IS_FORK == 'true' ? '/fork' : ''}/${params.PAGURE_REPO_NAME}"
}
stages {
stage('Prepare') {
steps {
script {
c3i.clone(repo: params.MBS_GIT_REPO, branch: params.MBS_GIT_REF)
// get current commit ID
// FIXME: Due to a bug discribed in https://issues.jenkins-ci.org/browse/JENKINS-45489,
// the return value of checkout() is unreliable.
// Not working: env.MBS_GIT_COMMIT = scmVars.GIT_COMMIT
env.MBS_GIT_COMMIT = sh(returnStdout: true, script: 'git rev-parse HEAD').trim()
// Set for pagure function from c3i-library
env.GIT_COMMIT = env.MBS_GIT_COMMIT
echo "Build ${params.MBS_GIT_REF}, commit=${env.MBS_GIT_COMMIT}"
env.IMAGE_IS_SCRATCH = (params.MBS_GIT_REF != params.MBS_MAIN_BRANCH)
// Is the current branch a pull-request? If no, env.PR_NO will be empty.
env.PR_NO = getPrNo(params.MBS_GIT_REF)
// Generate a version-release number for the target Git commit
env.MBS_VERSION = sh(script: """grep -m 1 -P -o '(?<=version=")[^"]+' setup.py""", returnStdout: true).trim()
env.BUILD_SUFFIX = ".jenkins${currentBuild.id}.git${env.MBS_GIT_COMMIT.take(7)}"
env.TEMP_TAG = "${env.MBS_VERSION}${env.BUILD_SUFFIX}"
def resp = httpRequest params.MBS_SPEC_FILE
env.SPEC_FILE_NAME = params.MBS_SPEC_FILE.split("/").last()
writeFile file: env.SPEC_FILE_NAME, text: resp.content
sh """
sed -i \
-e 's/Version:.*/Version: ${env.MBS_VERSION}/' \
-e 's/%{?dist}/${env.BUILD_SUFFIX}%{?dist}/' \
${env.SPEC_FILE_NAME}
"""
sh 'mkdir repos'
params.EXTRA_REPOS.split().each {
resp = httpRequest it
writeFile file: "repos/${it.split("/").last()}", text: resp.content
}
sh """
sed -i \
-e '/enum34/d' \
-e '/funcsigs/d' \
-e '/futures/d' \
-e '/koji/d' \
requirements.txt
"""
sh """
sed -i \
-e 's/py.test/py.test-3/g' \
-e '/basepython/d' \
-e '/sitepackages/a setenv = PYTHONPATH={toxinidir}' \
tox.ini
"""
{% include "snippets/get_paas_domain.groovy" %}
}
}
}
stage('Update Build Info') {
when {
expression {
return params.PAGURE_URL && params.PAGURE_REPO_NAME
}
}
steps {
script {
// Set friendly display name and description
if (env.PR_NO) { // is pull-request
env.PR_URL = "${env.PAGURE_REPO_HOME}/pull-request/${env.PR_NO}"
echo "Building PR #${env.PR_NO}: ${env.PR_URL}"
// NOTE: Old versions of OpenShift Client Jenkins plugin are buggy to handle arguments
// with special bash characters (like whitespaces, #, etc).
// https://bugzilla.redhat.com/show_bug.cgi?id=1625518
currentBuild.displayName = "PR#${env.PR_NO}"
// To enable HTML syntax in build description, go to `Jenkins/Global Security/Markup Formatter` and select 'Safe HTML'.
def pagureLink = """<a href="${env.PR_URL}">${currentBuild.displayName}</a>"""
try {
def prInfo = pagure.getPR(env.PR_NO)
pagureLink = """<a href="${env.PR_URL}">PR#${env.PR_NO}: ${escapeHtml(prInfo.title)}</a>"""
// set PR status to Pending
if (params.PAGURE_API_KEY_SECRET_NAME)
pagure.setBuildStatusOnPR(null, "Build #${env.BUILD_NUMBER} in progress (commit: ${env.MBS_GIT_COMMIT.take(8)})")
} catch (Exception e) {
echo "Error using pagure API: ${e}"
}
currentBuild.description = pagureLink
} else { // is a branch
currentBuild.displayName = "${env.MBS_GIT_REF}: ${env.MBS_GIT_COMMIT.take(7)}"
currentBuild.description = """<a href="${env.PAGURE_REPO_HOME}/c/${env.MBS_GIT_COMMIT}">${currentBuild.displayName}</a>"""
if (params.PAGURE_API_KEY_SECRET_NAME) {
try {
pagure.flagCommit('pending', null, "Build #${env.BUILD_NUMBER} in progress (commit: ${env.MBS_GIT_COMMIT.take(8)})")
echo "Updated commit ${env.MBS_GIT_COMMIT} status to PENDING."
} catch (e) {
echo "Error updating commit ${env.MBS_GIT_COMMIT} status to PENDING: ${e}"
}
}
}
}
}
}
stage('Allocate C3IaaS project') {
when {
expression {
return params.USE_C3IAAS == 'true'
}
}
steps {
script {
if (!params.C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAME ||
!params.C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAMESPACE) {
error("USE_C3IAAS is set to true but missing C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAME" +
" or C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAMESPACE")
}
if (env.PR_NO) {
env.PIPELINE_ID = "c3i-mbs-pr-${env.PR_NO}-git${env.MBS_GIT_COMMIT.take(8)}"
} else {
env.PIPELINE_ID = "c3i-mbs-${params.MBS_GIT_REF}-git${env.MBS_GIT_COMMIT.take(8)}"
}
echo "Requesting new OpenShift project ${env.PIPELINE_ID}..."
openshift.withCluster() {
openshift.withProject(params.C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAMESPACE) {
c3i.buildAndWait(script: this, objs: "bc/${params.C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAME}",
'-e', "PROJECT_NAME=${env.PIPELINE_ID}",
'-e', "ADMIN_GROUPS=system:serviceaccounts:${TRIGGER_NAMESPACE},system:serviceaccounts:${PIPELINE_AS_A_SERVICE_BUILD_NAMESPACE}",
'-e', "LIFETIME_IN_MINUTES=${params.C3IAAS_LIFETIME}"
)
}
}
}
}
post {
success {
echo "Allocated project ${env.PIPELINE_ID}"
}
failure {
echo "Failed to allocate ${env.PIPELINE_ID} project"
}
}
}
stage('Build backend image') {
environment {
BACKEND_BUILDCONFIG_ID = "mbs-backend-build-${currentBuild.id}-${UUID.randomUUID().toString().take(7)}"
}
steps {
script {
openshift.withCluster() {
openshift.withProject(env.PIPELINE_ID) {
// OpenShift BuildConfig doesn't support specifying a tag name at build time.
// We have to create a new BuildConfig for each image build.
echo 'Creating a BuildConfig for mbs-backend build...'
def created = new Date().format("yyyy-MM-dd'T'HH:mm:ss'Z'", TimeZone.getTimeZone('UTC'))
def template = readYaml file: 'openshift/backend/mbs-backend-build-template.yaml'
def processed = openshift.process(template,
'-p', "NAME=${env.BACKEND_BUILDCONFIG_ID}",
'-p', "MBS_GIT_REPO=${params.MBS_GIT_REPO}",
// A pull-request branch, like pull/123/head, cannot be built with commit ID
// because refspec cannot be customized in an OpenShift build.
'-p', "MBS_GIT_REF=${env.PR_NO ? params.MBS_GIT_REF : env.MBS_GIT_COMMIT}",
'-p', "MBS_BACKEND_IMAGESTREAM_NAME=${params.MBS_BACKEND_IMAGESTREAM_NAME}",
'-p', "MBS_BACKEND_IMAGESTREAM_NAMESPACE=${env.PIPELINE_ID}",
'-p', "MBS_IMAGE_TAG=${env.TEMP_TAG}",
'-p', "EXTRA_RPMS=${params.EXTRA_RPMS}",
'-p', "CREATED=${created}"
)
def build = c3i.buildAndWait(script: this, objs: processed, '--from-dir=.')
def ocpBuild = build.object()
env.BACKEND_IMAGE_DIGEST = ocpBuild.status.output.to.imageDigest
def ref = ocpBuild.status.outputDockerImageReference
def repo = ref.tokenize(':')[0..-2].join(':')
env.BACKEND_IMAGE_REPO = repo
env.BACKEND_IMAGE_REF = repo + '@' + env.BACKEND_IMAGE_DIGEST
env.BACKEND_IMAGE_TAG = env.TEMP_TAG
echo "Built image ${env.BACKEND_IMAGE_REF}, digest: ${env.BACKEND_IMAGE_DIGEST}, tag: ${env.BACKEND_IMAGE_TAG}"
}
}
}
}
post {
failure {
echo "Failed to build mbs-backend image ${env.TEMP_TAG}."
}
cleanup {
script {
if (params.USE_C3IAAS != 'true') {
openshift.withCluster() {
openshift.withProject(env.PIPELINE_ID) {
echo 'Tearing down...'
openshift.selector('bc', [
'app': env.BACKEND_BUILDCONFIG_ID,
'template': 'mbs-backend-build-template',
]).delete()
}
}
}
}
}
}
}
stage('Build frontend image') {
environment {
FRONTEND_BUILDCONFIG_ID = "mbs-frontend-build-${currentBuild.id}-${UUID.randomUUID().toString().take(7)}"
}
steps {
script {
openshift.withCluster() {
openshift.withProject(env.PIPELINE_ID) {
// OpenShift BuildConfig doesn't support specifying a tag name at build time.
// We have to create a new BuildConfig for each image build.
echo 'Creating a BuildConfig for mbs-frontend build...'
def created = new Date().format("yyyy-MM-dd'T'HH:mm:ss'Z'", TimeZone.getTimeZone('UTC'))
def template = readYaml file: 'openshift/frontend/mbs-frontend-build-template.yaml'
def processed = openshift.process(template,
'-p', "NAME=${env.FRONTEND_BUILDCONFIG_ID}",
'-p', "MBS_GIT_REPO=${params.MBS_GIT_REPO}",
// A pull-request branch, like pull/123/head, cannot be built with commit ID
// because refspec cannot be customized in an OpenShift build.
'-p', "MBS_GIT_REF=${env.PR_NO ? params.MBS_GIT_REF : env.MBS_GIT_COMMIT}",
'-p', "MBS_FRONTEND_IMAGESTREAM_NAME=${params.MBS_FRONTEND_IMAGESTREAM_NAME}",
'-p', "MBS_FRONTEND_IMAGESTREAM_NAMESPACE=${env.PIPELINE_ID}",
'-p', "MBS_IMAGE_TAG=${env.TEMP_TAG}",
'-p', "MBS_BACKEND_IMAGESTREAM_NAME=${params.MBS_BACKEND_IMAGESTREAM_NAME}",
'-p', "MBS_BACKEND_IMAGESTREAM_NAMESPACE=${env.PIPELINE_ID}",
'-p', "CREATED=${created}"
)
def build = c3i.buildAndWait(script: this, objs: processed, '--from-dir=.')
def ocpBuild = build.object()
env.FRONTEND_IMAGE_DIGEST = ocpBuild.status.output.to.imageDigest
def ref = ocpBuild.status.outputDockerImageReference
def repo = ref.tokenize(':')[0..-2].join(':')
env.FRONTEND_IMAGE_REPO = repo
env.FRONTEND_IMAGE_REF = repo + '@' + env.FRONTEND_IMAGE_DIGEST
env.FRONTEND_IMAGE_TAG = env.TEMP_TAG
echo "Built image ${env.FRONTEND_IMAGE_REF}, digest: ${env.FRONTEND_IMAGE_DIGEST}, tag: ${env.FRONTEND_IMAGE_TAG}"
}
}
}
}
post {
failure {
echo "Failed to build mbs-frontend image ${env.TEMP_TAG}."
}
cleanup {
script {
if (!env.C3IAAS_NAMESPACE) {
openshift.withCluster() {
echo 'Tearing down...'
openshift.selector('bc', [
'app': env.FRONTEND_BUILDCONFIG_ID,
'template': 'mbs-frontend-build-template',
]).delete()
}
}
}
}
}
}
{% include "snippets/mbs-integration-test.groovy" %}
stage('Push images') {
when {
expression {
return params.FORCE_PUBLISH_IMAGE == 'true' ||
params.MBS_GIT_REF == params.MBS_MAIN_BRANCH
}
}
steps {
script {
if (params.CONTAINER_REGISTRY_CREDENTIALS) {
dir ("${env.HOME}/.docker") {
openshift.withCluster() {
def dockerconf = openshift.selector('secret', params.CONTAINER_REGISTRY_CREDENTIALS).object().data['.dockerconfigjson']
writeFile file: 'config.json', text: dockerconf, encoding: "Base64"
}
}
}
def registryToken = readFile(file: '/run/secrets/kubernetes.io/serviceaccount/token')
def copyDown = { name, src ->
src = "docker://${src}"
echo "Pulling ${name} from ${src}..."
withEnv(["SOURCE_IMAGE_REF=${src}", "TOKEN=${registryToken}"]) {
sh """
set -e +x # hide the token from Jenkins console
mkdir -p _images/${name}
skopeo copy \
--src-cert-dir=/run/secrets/kubernetes.io/serviceaccount/ \
--src-creds=serviceaccount:"$TOKEN" \
"$SOURCE_IMAGE_REF" dir:_images/${name}
"""
}
}
def pullJobs = [
'Pulling mbs-backend' : { copyDown('mbs-backend', env.BACKEND_IMAGE_REF) },
'Pulling mbs-frontend' : { copyDown('mbs-frontend', env.FRONTEND_IMAGE_REF) }
]
parallel pullJobs
def copyUp = { name, dest ->
dest = "${dest}:${params.MBS_DEV_IMAGE_TAG ?: 'latest'}"
if (!dest.startsWith('atomic:') && !dest.startsWith('docker://')) {
dest = "docker://${dest}"
}
echo "Pushing ${name} to ${dest}..."
withEnv(["DEST_IMAGE_REF=${dest}"]) {
retry(5) {
sh """
skopeo copy dir:_images/${name} "$DEST_IMAGE_REF"
"""
}
}
}
def backendDests = params.MBS_BACKEND_DEV_IMAGE_DESTINATIONS ?
params.MBS_BACKEND_DEV_IMAGE_DESTINATIONS.split(',') : []
def backendPushJobs = backendDests.collectEntries {
[ "Pushing mbs-backend to ${it}" : { copyUp('mbs-backend', it) } ]
}
parallel backendPushJobs
// Run all the frontend push jobs after the backend push jobs, so we can trigger
// on the frontend repo being updated and be confident it is in sync with the
// backend repo.
def frontendDests = params.MBS_FRONTEND_DEV_IMAGE_DESTINATIONS ?
params.MBS_FRONTEND_DEV_IMAGE_DESTINATIONS.split(',') : []
def frontendPushJobs = frontendDests.collectEntries {
[ "Pushing mbs-frontend to ${it}" : { copyUp('mbs-frontend', it) } ]
}
parallel frontendPushJobs
}
}
post {
failure {
echo 'Pushing images FAILED'
}
}
}
stage('Tag into ImageStreams') {
when {
expression {
return "${params.MBS_DEV_IMAGE_TAG}" && params.TAG_INTO_IMAGESTREAM == "true" &&
(params.FORCE_PUBLISH_IMAGE == "true" || params.MBS_GIT_REF == params.MBS_MAIN_BRANCH)
}
}
steps {
script {
openshift.withCluster() {
openshift.withProject(params.MBS_BACKEND_IMAGESTREAM_NAMESPACE ?: env.PIPELINE_ID) {
def sourceRef = "${params.MBS_BACKEND_IMAGESTREAM_NAME}@${env.BACKEND_IMAGE_DIGEST}"
def destRef = "${params.MBS_BACKEND_IMAGESTREAM_NAME}:${params.MBS_DEV_IMAGE_TAG}"
echo "Tagging ${sourceRef} as ${destRef}..."
openshift.tag(sourceRef, destRef)
}
openshift.withProject(params.MBS_FRONTEND_IMAGESTREAM_NAMESPACE ?: env.PIPELINE_ID) {
def sourceRef = "${params.MBS_FRONTEND_IMAGESTREAM_NAME}@${env.FRONTEND_IMAGE_DIGEST}"
def destRef = "${params.MBS_FRONTEND_IMAGESTREAM_NAME}:${params.MBS_DEV_IMAGE_TAG}"
echo "Tagging ${sourceRef} as ${destRef}..."
openshift.tag(sourceRef, destRef)
}
}
}
}
post {
failure {
echo "Tagging images as :${params.MBS_DEV_IMAGE_TAG} FAILED"
}
}
}
}
post {
cleanup {
script {
if (params.CLEANUP == 'true' && params.USE_C3IAAS != 'true') {
openshift.withCluster() {
openshift.withProject(env.PIPELINE_ID) {
if (env.BACKEND_IMAGE_TAG) {
echo "Removing tag ${env.BACKEND_IMAGE_TAG} from the ${params.MBS_BACKEND_IMAGESTREAM_NAME} ImageStream..."
openshift.withProject(params.MBS_BACKEND_IMAGESTREAM_NAMESPACE ?: env.PIPELINE_ID) {
openshift.tag("${params.MBS_BACKEND_IMAGESTREAM_NAME}:${env.BACKEND_IMAGE_TAG}", "-d")
}
}
if (env.FRONTEND_IMAGE_TAG) {
echo "Removing tag ${env.FRONTEND_IMAGE_TAG} from the ${params.MBS_FRONTEND_IMAGESTREAM_NAME} ImageStream..."
openshift.withProject(params.BS_FRONTEND_IMAGESTREAM_NAMESPACE ?: env.PIPELINE_ID) {
openshift.tag("${params.MBS_FRONTEND_IMAGESTREAM_NAME}:${env.FRONTEND_IMAGE_TAG}", "-d")
}
}
}
}
}
}
}
success {
script {
// on pre-merge workflow success
if (params.PAGURE_API_KEY_SECRET_NAME && env.PR_NO) {
try {
pagure.setBuildStatusOnPR(100, "Build #${env.BUILD_NUMBER} successful (commit: ${env.MBS_GIT_COMMIT.take(8)})")
echo "Updated PR #${env.PR_NO} status to PASS."
} catch (e) {
echo "Error updating PR #${env.PR_NO} status to PASS: ${e}"
}
}
// on post-merge workflow success
if (params.PAGURE_API_KEY_SECRET_NAME && !env.PR_NO) {
try {
pagure.flagCommit('success', 100, "Build #${env.BUILD_NUMBER} successful (commit: ${env.MBS_GIT_COMMIT.take(8)})")
echo "Updated commit ${env.MBS_GIT_COMMIT} status to PASS."
} catch (e) {
echo "Error updating commit ${env.MBS_GIT_COMMIT} status to PASS: ${e}"
}
}
}
}
failure {
script {
// on pre-merge workflow failure
if (params.PAGURE_API_KEY_SECRET_NAME && env.PR_NO) {
// updating Pagure PR flag
try {
pagure.setBuildStatusOnPR(0, "Build #${env.BUILD_NUMBER} failed (commit: ${env.MBS_GIT_COMMIT.take(8)})")
echo "Updated PR #${env.PR_NO} status to FAILURE."
} catch (e) {
echo "Error updating PR #${env.PR_NO} status to FAILURE: ${e}"
}
// making a comment
try {
pagure.commentOnPR("""
Build #${env.BUILD_NUMBER} [failed](${env.BUILD_URL}) (commit: ${env.MBS_GIT_COMMIT}).
Rebase or make new commits to rebuild.
""".stripIndent(), env.PR_NO)
echo "Comment made."
} catch (e) {
echo "Error making a comment on PR #${env.PR_NO}: ${e}"
}
}
// on post-merge workflow failure
if (!env.PR_NO) {
// updating Pagure commit flag
if (params.PAGURE_API_KEY_SECRET_NAME) {
try {
pagure.flagCommit('failure', 0, "Build #${env.BUILD_NUMBER} failed (commit: ${env.MBS_GIT_COMMIT.take(8)})")
echo "Updated commit ${env.MBS_GIT_COMMIT} status to FAILURE."
} catch (e) {
echo "Error updating commit ${env.MBS_GIT_COMMIT} status to FAILURE: ${e}"
}
}
// sending email
if (params.MAIL_ADDRESS){
try {
sendBuildStatusEmail('failed')
} catch (e) {
echo "Error sending email: ${e}"
}
}
}
}
}
}
}
def getPrNo(branch) {
def prMatch = branch =~ /^(?:.+\/)?pull\/(\d+)\/head$/
return prMatch ? prMatch[0][1] : ''
}
def sendBuildStatusEmail(String status) {
def recipient = params.MAIL_ADDRESS
def subject = "Jenkins job ${env.JOB_NAME} #${env.BUILD_NUMBER} ${status}."
def body = "Build URL: ${env.BUILD_URL}"
if (env.PR_NO) {
subject = "Jenkins job ${env.JOB_NAME}, PR #${env.PR_NO} ${status}."
body += "\nPull Request: ${env.PR_URL}"
}
emailext to: recipient, subject: subject, body: body
}
{% include "snippets/functions.groovy" %}

View File

@@ -1,123 +0,0 @@
{% include "snippets/c3i-library.groovy" %}
pipeline {
{% include "snippets/default-agent.groovy" %}
options {
timestamps()
timeout(time: 30, unit: 'MINUTES')
buildDiscarder(logRotator(numToKeepStr: '10'))
}
environment {
PIPELINE_NAMESPACE = readFile(file: '/run/secrets/kubernetes.io/serviceaccount/namespace').trim()
SERVICE_ACCOUNT_TOKEN = readFile(file: '/run/secrets/kubernetes.io/serviceaccount/token').trim()
}
triggers {
ciBuildTrigger(
noSquash: false,
providerList: [
activeMQSubscriber(
name: params.MESSAGING_PROVIDER,
overrides: [topic: params.MESSAGING_TOPIC],
checks: [
[field: '$.msg.subject_type', expectedValue: 'container-image'],
[field: '$.msg.subject_identifier', expectedValue: params.SUBJECT_IDENTIFIER_REGEX],
[field: '$.msg.decision_context', expectedValue: params.DECISION_CONTEXT_REGEX],
[field: '$.msg.policies_satisfied', expectedValue: 'true'],
]
)
]
)
}
stages {
stage("Message Check and setup") {
steps {
script {
if (!params.CI_MESSAGE) {
error("This build is not started by a CI message. Only configurations were done.")
}
def message = readJSON text: params.CI_MESSAGE
// Extract the digest of the image to be promoted.
// e.g. factory2/waiverdb@sha256:35201c572fc8a137862b7a256476add8d7465fa5043d53d117f4132402f8ef6b
// -> sha256:35201c572fc8a137862b7a256476add8d7465fa5043d53d117f4132402f8ef6b
def digest = (message.msg.subject_identifier =~ /@(sha256:\w+)$/)[0][1]
// Generate the pull spec of the image
// e.g. quay.io/mkovarik/waiverdb@sha256:35201c572fc8a137862b7a256476add8d7465fa5043d53d117f4132402f8ef6b
env.IMAGE = "${params.SOURCE_CONTAINER_REPO}@${digest}"
echo "Starting promotion of image ${env.IMAGE} to :${params.TARGET_TAG}..."
// Setting up registry credentials
dir ("${env.HOME}/.docker") {
// for the OpenShift internal registry
def dockerConfig = readJSON text: '{ "auths": {} }'
dockerConfig.auths['docker-registry.default.svc:5000'] = [
'email': '',
'auth': sh(returnStdout: true, script: 'set +x; echo -n "serviceaccount:$SERVICE_ACCOUNT_TOKEN" | base64 -').trim()
]
// merging user specified credentials
if (params.CONTAINER_REGISTRY_CREDENTIALS) {
openshift.withCluster() {
def dockerconf = openshift.selector('secret', params.CONTAINER_REGISTRY_CREDENTIALS).object().data['.dockerconfigjson']
def dockerString = new String(dockerconf.decodeBase64())
toBeMerged = readJSON text: dockerString
dockerConfig.auths.putAll(toBeMerged.auths)
}
}
// writing to ~/.docker/config.json
writeJSON file: 'config.json', json: dockerConfig
}
}
}
}
stage('Pull image') {
steps {
echo "Pulling container image ${env.IMAGE}..."
withEnv(["SOURCE_IMAGE_REF=${env.IMAGE}"]) {
sh '''
set -e +x # hide the token from Jenkins console
mkdir -p _image
skopeo copy docker://"$SOURCE_IMAGE_REF" dir:_image
'''
}
}
}
stage('Promote image') {
steps {
script {
def destinations = params.PROMOTING_DESTINATIONS ? params.PROMOTING_DESTINATIONS.split(',') : []
openshift.withCluster() {
def pushTasks = destinations.collectEntries {
["Pushing ${it}" : {
def dest = "${it}:${params.TARGET_TAG}"
// Only docker and atomic registries are allowed
if (!dest.startsWith('atomic:') && !dest.startsWith('docker://')) {
dest = "docker://${dest}"
}
echo "Pushing container image to ${dest}..."
withEnv(["DEST_IMAGE_REF=${dest}"]) {
retry(5) {
sh 'skopeo copy dir:_image "$DEST_IMAGE_REF"'
}
}
}]
}
parallel pushTasks
}
}
}
}
stage('Tag ImageStream') {
when {
expression {
return params.DEST_IMAGESTREAM_NAME && params.TAG_INTO_IMAGESTREAM == "true"
}
}
steps {
script {
def destRef = "${params.DEST_IMAGESTREAM_NAMESPACE ?: env.PIPELINE_NAMESPACE}/${params.DEST_IMAGESTREAM_NAME}:${params.TARGET_TAG}"
openshift.withCluster() {
echo "Tagging ${env.IMAGE} into ${destRef}..."
openshift.tag('--source=docker', env.IMAGE, destRef)
}
}
}
}
}
}

View File

@@ -1,127 +0,0 @@
# Template to produce a new OpenShift pipeline job for triggering a build on repotracker messages
#
---
apiVersion: v1
kind: Template
metadata:
name: mbs-greenwave-trigger
labels:
template: mbs-greenwave-trigger
parameters:
- name: NAME
displayName: Short unique identifier for the templated instances
description: This field is used to deploy multiple pipelines to one OpenShift project from this template.
required: true
- name: MBS_GIT_REPO
displayName: MBS Git repo URL
description: Default MBS Git repo URL in which to run dev tests against
value: "https://pagure.io/fm-orchestrator.git"
- name: MBS_GIT_REF
displayName: MBS Git repo ref
description: Default MBS Git repo ref in which to run dev tests against
value: master
- name: DECISION_CONTEXT_REGEX
displayName: Regex pattern for Greenwave decision context in CI message
required: true
- name: SUBJECT_IDENTIFIER_REGEX
displayName: Regex pattern for Greenwave subject identifier in CI message
required: true
- name: SOURCE_CONTAINER_REPO
displayName: Container repo of the image
required: true
- name: TARGET_TAG
displayName: Tag name to promote the image to
required: true
- name: TAG_INTO_IMAGESTREAM
displayName: Whether to tag the image into an ImageStream
value: "false"
required: true
- name: DEST_IMAGESTREAM_NAME
displayName: Name of the ImageStream to be tagged
required: false
value: ""
- name: CONTAINER_REGISTRY_CREDENTIALS
displayName: Secret name of container registries used for pulling and pushing images
value: factory2-pipeline-registry-credentials
required: false
- name: MESSAGING_PROVIDER
displayName: Name of the JMS messaging provider
value: Red Hat UMB
- name: MESSAGING_TOPIC
displayName: Name of the topic that the trigger subscribes to
value: "Consumer.rh-jenkins-ci-plugin.c3i-greenwave-trigger.VirtualTopic.eng.greenwave.decision.update"
- name: JENKINS_AGENT_IMAGE
displayName: Container image for Jenkins slave pods
value: quay.io/factory2/mbs-jenkins-slave:latest
- name: JENKINS_AGENT_CLOUD_NAME
displayName: Name of OpenShift cloud in Jenkins master configuration
value: openshift
- name: PROMOTING_DESTINATIONS
displayName: Comma seperated list of container repositories (without tags) to which the image will be promoted
description: OpenShift registries must be prefixed with 'atomic:'
required: true
{% include "snippets/c3i-library-parameters.yaml" %}
objects:
- kind: ServiceAccount
apiVersion: v1
metadata:
name: "${NAME}-jenkins-slave"
labels:
app: "${NAME}"
- kind: RoleBinding
apiVersion: v1
metadata:
name: "${NAME}-jenkins-slave_edit"
labels:
app: "${NAME}"
subjects:
- kind: ServiceAccount
name: "${NAME}-jenkins-slave"
roleRef:
name: edit
- kind: "BuildConfig"
apiVersion: "v1"
metadata:
name: "${NAME}"
labels:
app: "${NAME}"
spec:
runPolicy: "Parallel"
completionDeadlineSeconds: 1800
strategy:
type: JenkinsPipeline
jenkinsPipelineStrategy:
env:
- name: PROMOTING_DESTINATIONS
value: "${PROMOTING_DESTINATIONS}"
- name: JENKINS_AGENT_CLOUD_NAME
value: "${JENKINS_AGENT_CLOUD_NAME}"
- name: JENKINS_AGENT_IMAGE
value: "${JENKINS_AGENT_IMAGE}"
- name: JENKINS_AGENT_SERVICE_ACCOUNT
value: "${NAME}-jenkins-slave"
- name: SOURCE_CONTAINER_REPO
value: "${SOURCE_CONTAINER_REPO}"
- name: CONTAINER_REGISTRY_CREDENTIALS
value: "${CONTAINER_REGISTRY_CREDENTIALS}"
- name: TARGET_TAG
value: "${TARGET_TAG}"
- name: TAG_INTO_IMAGESTREAM
value: "${TAG_INTO_IMAGESTREAM}"
- name: DEST_IMAGESTREAM_NAME
value: "${DEST_IMAGESTREAM_NAME}"
- name: DECISION_CONTEXT_REGEX
value: "${DECISION_CONTEXT_REGEX}"
- name: SUBJECT_IDENTIFIER_REGEX
value: "${SUBJECT_IDENTIFIER_REGEX}"
- name: MESSAGING_PROVIDER
value: "${MESSAGING_PROVIDER}"
- name: MESSAGING_TOPIC
value: "${MESSAGING_TOPIC}"
# CI_MESSAGE and MESSAGE_HEADERS are used internally by JMS messaging plugin
- name: CI_MESSAGE
value:
- name: MESSAGE_HEADERS
value:
jenkinsfile: |
{% filter indent(width=10) %}{% include "mbs-greenwave-trigger.Jenkinsfile" %}{% endfilter %}

View File

@@ -1,103 +0,0 @@
{% include "snippets/c3i-library.groovy" %}
pipeline {
{% include "snippets/default-agent.groovy" %}
options {
timestamps()
timeout(time: 60, unit: 'MINUTES')
buildDiscarder(logRotator(numToKeepStr: '10'))
}
environment {
PIPELINE_NAMESPACE = readFile('/run/secrets/kubernetes.io/serviceaccount/namespace').trim()
PAGURE_URL = "${PAGURE_URL}"
PAGURE_API = "${env.PAGURE_URL}/api/0"
PAGURE_REPO_NAME = "${PAGURE_REPO_NAME}"
PAGURE_REPO_IS_FORK = "${PAGURE_REPO_IS_FORK}"
PAGURE_POLLING_FOR_PR = "${PAGURE_POLLING_FOR_PR}"
PAGURE_REPO_HOME = "${env.PAGURE_URL}${env.PAGURE_REPO_IS_FORK == 'true' ? '/fork' : ''}/${env.PAGURE_REPO_NAME}"
GIT_URL = "${env.PAGURE_URL}/${env.PAGURE_REPO_IS_FORK == 'true' ? 'forks/' : ''}${env.PAGURE_REPO_NAME}.git"
PREMERGE_JOB_NAME = "${PREMERGE_JOB_NAME}"
POSTMERGE_JOB_NAME = "${POSTMERGE_JOB_NAME}"
}
triggers { pollSCM("${PAGURE_POLLING_SCHEDULE}") }
stages {
stage('Prepare') {
agent { label 'master' }
steps {
script {
def polled = env.PAGURE_POLLING_FOR_PR == 'true' ? 'pull/*/head' : "${PAGURE_POLLED_BRANCH}"
// Need to prefix the rev with origin/ for pollSCM to work correctly
def rev = "origin/${polled}"
def scmVars = c3i.clone(repo: env.GIT_URL, branch: polled, rev: rev)
env.GIT_COMMIT = scmVars.GIT_COMMIT
// setting build display name
def prefix = 'origin/'
def branch = scmVars.GIT_BRANCH.startsWith(prefix) ? scmVars.GIT_BRANCH.substring(prefix.size())
: scmVars.GIT_BRANCH // origin/pull/1234/head -> pull/1234/head, origin/master -> master
env.MBS_GIT_BRANCH = branch
echo "Build on branch=${env.MBS_GIT_BRANCH}, commit=${env.GIT_COMMIT}"
if (env.PAGURE_POLLING_FOR_PR == 'false') {
currentBuild.displayName = "${env.MBS_GIT_BRANCH}: ${env.GIT_COMMIT.substring(0, 7)}"
currentBuild.description = """<a href="${env.PAGURE_REPO_HOME}/c/${env.GIT_COMMIT}">${currentBuild.displayName}</a>"""
} else if (env.PAGURE_POLLING_FOR_PR == 'true' && branch ==~ /^pull\/[0-9]+\/head$/) {
env.PR_NO = branch.split('/')[1]
def prInfo = pagure.getPR(env.PR_NO)
if (prInfo.status == 'Open') {
env.PR_URL = "${env.PAGURE_REPO_HOME}/pull-request/${env.PR_NO}"
// To HTML syntax in build description, go to `Jenkins/Global Security/Markup Formatter` and select 'Safe HTML'.
def pagureLink = """<a href="${env.PR_URL}">PR#${env.PR_NO}</a>"""
echo "Building PR #${env.PR_NO}: ${env.PR_URL}"
currentBuild.displayName = "PR#${env.PR_NO}"
currentBuild.description = pagureLink
} else {
echo "Skipping PR#${env.PR_NO} because it is ${prInfo.status}"
env.SKIP = 'true'
}
} else { // This shouldn't happen.
error("Build is aborted due to unexpected polling trigger actions.")
}
}
}
}
stage('Update pipeline jobs') {
when {
expression {
return "${PIPELINE_UPDATE_JOBS_DIR}" && env.PAGURE_POLLING_FOR_PR == 'false' && env.MBS_GIT_BRANCH == "${PAGURE_POLLED_BRANCH}"
}
}
steps {
script {
c3i.clone(repo: env.GIT_URL, branch: env.MBS_GIT_BRANCH)
dir('openshift/integration/koji/pipelines') {
sh '''
make install JOBS_DIR="${PIPELINE_UPDATE_JOBS_DIR}"
'''
}
}
}
}
stage('Build') {
when {
not {
environment name: 'SKIP', value: 'true'
}
}
steps {
script {
openshift.withCluster() {
echo 'Starting a MBS build run...'
def devBuild = c3i.build(script: this,
objs: "bc/${env.PAGURE_POLLING_FOR_PR == 'true' ? env.PREMERGE_JOB_NAME : env.POSTMERGE_JOB_NAME}",
'-e', "MBS_GIT_REF=${env.MBS_GIT_BRANCH}", '-e', "PAGURE_REPO_IS_FORK=${env.PAGURE_REPO_IS_FORK}",
'-e', "PAGURE_REPO_NAME=${env.PAGURE_REPO_NAME}"
)
c3i.waitForBuildStart(script: this, build: devBuild)
def devBuildInfo = devBuild.object()
def downstreamBuildName = devBuildInfo.metadata.name
def downstreamBuildUrl = devBuildInfo.metadata.annotations['openshift.io/jenkins-build-uri']
echo "Downstream build ${downstreamBuildName}(${downstreamBuildUrl}) started."
}
}
}
}
}
}

View File

@@ -1,100 +0,0 @@
# Template to produce a new OpenShift pipeline job for polling for Pagure branches or PRs
#
---
apiVersion: v1
kind: Template
metadata:
name: mbs-polling-to-pagure
labels:
template: mbs-polling-to-pagure
parameters:
- name: NAME
displayName: Short unique identifier for the templated instances
description: This field is used to deploy multiple pipelines to one OpenShift project from this template.
required: true
value: mbs-polling-to-pagure
- name: PAGURE_REPO_NAME
displayName: Pagure project name
description: <username>/<namespace>/<repo>
required: true
value: fm-orchestrator
- name: PAGURE_REPO_IS_FORK
displayName: Is the Pagure repo a fork?
required: true
value: "false"
- name: PAGURE_POLLING_FOR_PR
displayName: set to 'true' to poll for PRs, or 'false' for the master branch
required: true
value: "false"
- name: PAGURE_URL
displayName: Pagure URL
value: "https://pagure.io"
- name: PAGURE_POLLING_SCHEDULE
displayName: Schedule of polling
description: using cron-style syntax
required: true
value: "H/5 * * * *"
- name: PAGURE_POLLED_BRANCH
displayName: Name of polled branch
required: true
value: "master"
- name: PREMERGE_JOB_NAME
displayName: Downstream pre-merge job name
required: true
value: mbs-premerge
- name: POSTMERGE_JOB_NAME
displayName: Downstream post-merge job name
required: true
value: mbs-postmerge
- name: PIPELINE_UPDATE_JOBS_DIR
displayName: location of pipeline job definitions for auto update
value: jobs
required: false
- name: JENKINS_AGENT_IMAGE
displayName: Container image for Jenkins slave pods
required: true
value: quay.io/factory2/mbs-jenkins-slave:latest
- name: JENKINS_AGENT_CLOUD_NAME
displayName: Name of OpenShift cloud in Jenkins master configuration
required: true
value: openshift
{% include "snippets/c3i-library-parameters.yaml" %}
objects:
- kind: ServiceAccount
apiVersion: v1
metadata:
name: "${NAME}-jenkins-slave"
labels:
app: "${NAME}"
- kind: RoleBinding
apiVersion: v1
metadata:
name: "${NAME}-jenkins-slave_edit"
labels:
app: "${NAME}"
subjects:
- kind: ServiceAccount
name: "${NAME}-jenkins-slave"
roleRef:
name: edit
- kind: "BuildConfig"
apiVersion: "v1"
metadata:
name: "${NAME}"
labels:
app: "${NAME}"
spec:
runPolicy: "Parallel"
completionDeadlineSeconds: 1800
strategy:
type: JenkinsPipeline
jenkinsPipelineStrategy:
env:
- name: JENKINS_AGENT_CLOUD_NAME
value: "${JENKINS_AGENT_CLOUD_NAME}"
- name: JENKINS_AGENT_IMAGE
value: "${JENKINS_AGENT_IMAGE}"
- name: JENKINS_AGENT_SERVICE_ACCOUNT
value: "${NAME}-jenkins-slave"
jenkinsfile: |-
{% filter indent(width=10) %}{% include "mbs-polling-pagure.Jenkinsfile" %}{% endfilter %}

View File

@@ -1,56 +0,0 @@
{% include "snippets/c3i-library.groovy" %}
pipeline {
{% include "snippets/default-agent.groovy" %}
options {
timestamps()
timeout(time: 120, unit: 'MINUTES')
buildDiscarder(logRotator(numToKeepStr: '10'))
}
triggers {
ciBuildTrigger(
noSquash: false,
providerList: [
activeMQSubscriber(
name: params.MESSAGING_PROVIDER,
overrides: [topic: params.MESSAGING_TOPIC],
selector: "repo = '${params.TRACKED_CONTAINER_REPO}' AND action IN ('added', 'updated') AND tag = '${params.TRACKED_TAG}'",
)
]
)
}
stages {
stage("Message Check and setup") {
steps {
script {
if (!params.CI_MESSAGE) {
error("This build is not started by a CI message. Only configurations were done.")
}
c3i.clone(repo: params.MBS_GIT_REPO, branch: params.MBS_GIT_REF)
def message = readJSON text: params.CI_MESSAGE
echo "Tag :${message.tag} is ${message.action} in ${message.repo}. New digest: ${message.digest}"
env.FRONTEND_IMAGE_REF = "${message.repo}@${message.digest}"
// We have the digest of the current frontend image with this tag.
// Lookup the digest of the current backend image with the same tag.
if (params.CONTAINER_REGISTRY_CREDENTIALS) {
dir ("${env.HOME}/.docker") {
openshift.withCluster() {
def dockerconf = openshift.selector('secret', params.CONTAINER_REGISTRY_CREDENTIALS).object().data['.dockerconfigjson']
writeFile file: 'config.json', text: dockerconf, encoding: "Base64"
}
}
}
def output = sh(script: "skopeo inspect docker://${params.MBS_BACKEND_REPO}:${message.tag}", returnStdout: true).trim()
def backendData = readJSON text: output
env.BACKEND_IMAGE_REF = "${params.MBS_BACKEND_REPO}@${backendData.Digest}"
echo "Current mbs-backend image is: ${env.BACKEND_IMAGE_REF}"
echo "Triggering a job to test if ${env.FRONTEND_IMAGE_REF} and ${env.BACKEND_IMAGE_REF} meet all criteria of desired tag"
env.C3IAAS_PROJECT = params.C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAMESPACE
env.IMAGE_IS_SCRATCH = false
env.PIPELINE_ID = "c3i-mbs-tag-${message.tag}-${message.digest[-9..-1]}"
}
}
}
{% include "snippets/mbs-integration-test.groovy" %}
}
}
{% include "snippets/functions.groovy" %}

View File

@@ -1,148 +0,0 @@
# Template to produce a new OpenShift pipeline job for triggering a build on repotracker messages
#
---
apiVersion: v1
kind: Template
metadata:
name: mbs-repotracker-trigger
labels:
template: mbs-repotracker-trigger
parameters:
- name: NAME
displayName: Short unique identifier for the templated instances
description: This field is used to deploy multiple pipelines to one OpenShift project from this template.
value: mbs-repotracker-trigger
- name: MBS_GIT_REPO
displayName: MBS Git repo URL
description: Default MBS Git repo URL in which to run dev tests against
required: true
value: https://pagure.io/fm-orchestrator.git
- name: MBS_GIT_REF
displayName: MBS Git repo ref
description: Default MBS Git repo ref in which to run dev tests against
required: true
value: master
- name: TRACKED_CONTAINER_REPO
displayName: Container repo to be tracked
value: quay.io/factory2/mbs-frontend
- name: TRACKED_TAG
displayName: Name of tag to be tracked
required: true
- name: MBS_BACKEND_REPO
displayName: The location of the repo (without tag) where the mbs-backend image is located
value: quay.io/factory2/mbs-backend
- name: CONTAINER_REGISTRY_CREDENTIALS
displayName: Secret name of container registries used for pulling and pushing images
value: factory2-pipeline-registry-credentials
required: false
- name: JENKINS_AGENT_IMAGE
displayName: Container image for Jenkins slave pods
value: quay.io/factory2/mbs-jenkins-slave:latest
- name: JENKINS_AGENT_CLOUD_NAME
displayName: Name of OpenShift cloud in Jenkins master configuration
value: openshift
- name: MESSAGING_PROVIDER
displayName: Name of the JMS messaging provider
value: Red Hat UMB
- name: MESSAGING_TOPIC
displayName: Name of the topic that the trigger subscribes to
value: "Consumer.rh-jenkins-ci-plugin.c3i-mbs-repotracker-trigger.VirtualTopic.eng.repotracker.container.tag.>"
- name: ENVIRONMENT
displayName: environment name (dev/stage/prod)
required: true
value: dev
- name: USE_C3IAAS
displayName: >-
Use C3I-as-a-Service to dynamically allocate a temporary OpenShift project for building
and testing.
required: true
value: "true"
- name: C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAMESPACE
displayName: The namespace where the C3I-as-a-Service project request BuildConfig has been defined
required: false
value: c3i
- name: C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAME
displayName: The name of the C3I-as-a-Service project request BuildConfig
required: false
value: c3iaas-request-project
- name: C3IAAS_LIFETIME
displayName: The lifetime of the OpenShift project allocated by C3I-as-a-Service.
required: true
value: "120"
- name: PIPELINE_AS_A_SERVICE_BUILD_NAMESPACE
displayName: The namespace where the Pipeline-as-a-Service project request BuildConfig has been defined
required: false
value: c3i
{% include "snippets/c3i-library-parameters.yaml" %}
objects:
- kind: ServiceAccount
apiVersion: v1
metadata:
name: "${NAME}-jenkins-slave"
labels:
app: "${NAME}"
- kind: RoleBinding
apiVersion: v1
metadata:
name: "${NAME}-jenkins-slave_edit"
labels:
app: "${NAME}"
subjects:
- kind: ServiceAccount
name: "${NAME}-jenkins-slave"
roleRef:
name: edit
- kind: "BuildConfig"
apiVersion: "v1"
metadata:
name: "${NAME}"
labels:
app: "${NAME}"
spec:
runPolicy: "Parallel"
completionDeadlineSeconds: 1800
strategy:
type: JenkinsPipeline
jenkinsPipelineStrategy:
env:
- name: JENKINS_AGENT_CLOUD_NAME
value: "${JENKINS_AGENT_CLOUD_NAME}"
- name: JENKINS_AGENT_IMAGE
value: "${JENKINS_AGENT_IMAGE}"
- name: JENKINS_AGENT_SERVICE_ACCOUNT
value: "${NAME}-jenkins-slave"
- name: MBS_GIT_REPO
value: "${MBS_GIT_REPO}"
- name: MBS_GIT_REF
value: "${MBS_GIT_REF}"
- name: TRACKED_CONTAINER_REPO
value: "${TRACKED_CONTAINER_REPO}"
- name: TRACKED_TAG
value: "${TRACKED_TAG}"
- name: MBS_BACKEND_REPO
value: "${MBS_BACKEND_REPO}"
- name: CONTAINER_REGISTRY_CREDENTIALS
value: "${CONTAINER_REGISTRY_CREDENTIALS}"
- name: USE_C3IAAS
value: "${USE_C3IAAS}"
- name: ENVIRONMENT
value: "${ENVIRONMENT}"
- name: C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAMESPACE
value: "${C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAMESPACE}"
- name: C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAME
value: "${C3IAAS_REQUEST_PROJECT_BUILD_CONFIG_NAME}"
- name: C3IAAS_LIFETIME
value: "${C3IAAS_LIFETIME}"
- name: PIPELINE_AS_A_SERVICE_BUILD_NAMESPACE
value: "${PIPELINE_AS_A_SERVICE_BUILD_NAMESPACE}"
- name: MESSAGING_PROVIDER
value: "${MESSAGING_PROVIDER}"
- name: MESSAGING_TOPIC
value: "${MESSAGING_TOPIC}"
# CI_MESSAGE and MESSAGE_HEADERS are used internally by JMS messaging plugin
- name: CI_MESSAGE
value:
- name: MESSAGE_HEADERS
value:
jenkinsfile: |
{% filter indent(width=10) %}{% include "mbs-repotracker-trigger.Jenkinsfile" %}{% endfilter %}

View File

@@ -1,8 +0,0 @@
- name: C3I_LIB_URL
displayName: C3I library git url
required: true
value: "https://pagure.io/c3i-library.git"
- name: C3I_LIB_BRANCH
displayName: C3I library branch
required: true
value: "master"

View File

@@ -1,2 +0,0 @@
library identifier: "c3i@${C3I_LIB_BRANCH}", changelog: false,
retriever: modernSCM([$class: 'GitSCMSource', remote: "${C3I_LIB_URL}"])

View File

@@ -1,29 +0,0 @@
agent {
kubernetes {
cloud "${params.JENKINS_AGENT_CLOUD_NAME}"
label "jenkins-slave-${UUID.randomUUID().toString()}"
serviceAccount "${params.JENKINS_AGENT_SERVICE_ACCOUNT}"
defaultContainer 'jnlp'
yaml """
apiVersion: v1
kind: Pod
metadata:
labels:
app: "jenkins-${env.JOB_BASE_NAME.take(50).endsWith('-') ? env.JOB_BASE_NAME.take(49): env.JOB_BASE_NAME.take(50)}"
factory2-pipeline-build-number: "${env.BUILD_NUMBER}"
spec:
containers:
- name: jnlp
image: "${params.JENKINS_AGENT_IMAGE}"
imagePullPolicy: Always
tty: true
resources:
requests:
memory: 512Mi
cpu: 300m
limits:
memory: 768Mi
cpu: 500m
"""
}
}

View File

@@ -1,64 +0,0 @@
def sendToResultsDB(imageRef, status) {
if (!params.MESSAGING_PROVIDER) {
echo "Message bus is not set. Skipping send of:\nimageRef: ${imageRef}\nstatus: ${status}"
return
}
def (repourl, digest) = imageRef.tokenize('@')
def (registry, reponame) = repourl.split('/', 2)
def image = reponame.split('/').last()
def sendResult = sendCIMessage \
providerName: params.MESSAGING_PROVIDER, \
overrides: [topic: 'VirtualTopic.eng.ci.container-image.test.complete'], \
messageType: 'Custom', \
messageProperties: '', \
messageContent: """
{
"ci": {
"name": "C3I Jenkins",
"team": "DevOps",
"url": "${env.JENKINS_URL}",
"docs": "https://pagure.io/fm-orchestrator/blob/master/f/openshift/integration/koji",
"irc": "#pnt-devops-dev",
"email": "pnt-factory2-devel@redhat.com",
"environment": "${params.ENVIRONMENT}"
},
"run": {
"url": "${env.BUILD_URL}",
"log": "${env.BUILD_URL}/console",
"debug": "",
"rebuild": "${env.BUILD_URL}/rebuild/parametrized"
},
"artifact": {
"type": "container-image",
"repository": "${reponame}",
"digest": "${digest}",
"nvr": "${imageRef}",
"issuer": "c3i-jenkins",
"scratch": ${env.IMAGE_IS_SCRATCH},
"id": "${image}@${digest}"
},
"system":
[{
"os": "${params.JENKINS_AGENT_IMAGE}",
"provider": "openshift",
"architecture": "x86_64"
}],
"type": "integration",
"category": "${params.ENVIRONMENT}",
"status": "${status}",
"xunit": "",
"generated_at": "${new Date().format("yyyy-MM-dd'T'HH:mm:ss'Z'", TimeZone.getTimeZone('UTC'))}",
"namespace": "c3i",
"version": "0.1.0"
}
"""
if (sendResult.getMessageId()) {
// echo sent message id and content
echo 'Successfully sent the test result to ResultsDB.'
echo "Message ID: ${sendResult.getMessageId()}"
echo "Message content: ${sendResult.getMessageContent()}"
} else {
echo 'Failed to sent the test result to ResultsDB.'
}
}

View File

@@ -1,14 +0,0 @@
if (!env.TRIGGER_NAMESPACE) {
env.TRIGGER_NAMESPACE = readFile("/run/secrets/kubernetes.io/serviceaccount/namespace").trim()
}
if(!env.PAAS_DOMAIN) {
openshift.withCluster() {
openshift.withProject(env.TRIGGER_NAMESPACE) {
def testroute = openshift.create('route', 'edge', "test-${env.BUILD_NUMBER}", '--service=test', '--port=8080')
def testhost = testroute.object().spec.host
env.PAAS_DOMAIN = testhost.minus("test-${env.BUILD_NUMBER}-${env.TRIGGER_NAMESPACE}.")
testroute.delete()
}
}
echo "Routes end with ${env.PAAS_DOMAIN}"
}

View File

@@ -1,78 +0,0 @@
stage('Run integration tests') {
stages {
stage('Deploy test environment') {
steps {
script {
{% include "snippets/get_paas_domain.groovy" %}
if (!env.PIPELINE_ID) {
env.PIPELINE_ID = "c3i-mbs-${UUID.randomUUID().toString().take(8)}"
}
openshift.withCluster() {
openshift.withProject(params.PIPELINE_AS_A_SERVICE_BUILD_NAMESPACE) {
c3i.buildAndWait(script: this, objs: "bc/pipeline-as-a-service",
'-e', "DEFAULT_IMAGE_TAG=${env.ENVIRONMENT}",
'-e', "PIPELINE_ID=${env.PIPELINE_ID}",
'-e', "SERVICES_TO_DEPLOY='umb mbs-frontend mbs-backend krb5 ldap koji-hub'",
'-e', "C3IAAS_PROJECT=${env.C3IAAS_PROJECT ?: ''}",
'-e', "MBS_BACKEND_IMAGE=${env.BACKEND_IMAGE_REF}",
'-e', "MBS_FRONTEND_IMAGE=${env.FRONTEND_IMAGE_REF}",
'-e', "PAAS_DOMAIN=${env.PAAS_DOMAIN}"
)
}
}
}
}
}
stage('Run tests') {
steps {
script {
sh "openshift/integration/koji/pipelines/tests/runtests ${env.PIPELINE_ID}"
}
}
post {
success {
echo "All tests successful"
script {
[env.BACKEND_IMAGE_REF, env.FRONTEND_IMAGE_REF].each {
sendToResultsDB(it, 'passed')
}
}
}
failure {
echo "Testcases FAILED"
}
}
}
}
post {
failure {
script {
[env.BACKEND_IMAGE_REF, env.FRONTEND_IMAGE_REF].each {
sendToResultsDB(it, 'failed')
}
openshift.withCluster() {
openshift.withProject(env.PIPELINE_ID) {
echo 'Getting logs from all deployments...'
openshift.selector('pods', ['c3i.redhat.com/pipeline': env.PIPELINE_ID]).logs('--tail 100')
}
}
}
}
cleanup {
script {
if (params.CLEANUP == 'true' && params.USE_C3IAAS != 'true') {
openshift.withCluster() {
openshift.withProject(env.PIPELINE_ID) {
/* Tear down everything we just created */
echo 'Tearing down test resources...'
openshift.selector('all,pvc,configmap,secret',
['c3i.redhat.com/pipeline': env.PIPELINE_ID]).delete('--ignore-not-found=true')
}
}
} else {
echo 'Skipping cleanup'
}
}
}
}
}