**.sw?,
**.orig
-[all.GitCommit]
-bears = GitCommitBear
-ignore_length_regex = Signed-off-by,
- Also-by,
- Co-authored-by,
- http://,
- https://
-
[all.Groovy]
bears = SpaceConsistencyBear
files = **.groovy
- project-view
project-name: ci-jobs
+
+# required by gerrit-sonar-prescan and github-sonar-prescan
+- builder:
+ name: lf-sonar-prescan
+ builders:
+ - shell: "#!/bin/sh"
+
+- project:
+ name: gerrit-sonar-jobs
+ project-name: gerrit-ciman
+ jobs:
+ - gerrit-sonar
+ - gerrit-sonar-prescan
+ - gerrit-sonar-prescan-script:
+ sonar-prescan-script: "#!/bin/sh"
+
+- project:
+ name: github-sonar-jobs
+ project-name: github-ciman
+ jobs:
+ - github-sonar
+ - github-sonar-prescan
+ - github-sonar-prescan-script:
+ sonar-prescan-script: "#!/bin/sh"
hooks:
- id: prettier
+ - repo: https://github.com/jorisroovers/gitlint
+ rev: v0.12.0
+ hooks:
+ - id: gitlint
+
- repo: https://github.com/jumanjihouse/pre-commit-hooks
rev: 1.11.2
hooks:
In the case of template definitions of a parameter below is not passed
the one defined in default clouds will be inherited.
- :IMAGE_NAME: The image name to use for this template.
- (required)
+ :IMAGE_NAME: The image name to use for this template. (required)
:HARDWARE_ID: OpenStack flavor to use. (required)
:LABELS: Labels to assign to the vm. (default: FILE_NAME)
+ :VOLUME_SIZE: Volume size to assign to vm. (default: "")
+ :HARDWARE_ID: Hardware Id to assign to vm. (default: "")
:NETWORK_ID: OpenStack network to use. (default: "")
:USER_DATA_ID: User Data to pass into the instance.
(default: jenkins-init-script)
:START_TIMEOUT: Number of milliseconds to wait for the agent to be
provisioned and connected. (default: 600000)
:KEY_PAIR_NAME: SSH Public Key Pair to use for authentication.
- (default: jenkins)
+ (default: jenkins-ssh)
:NUM_EXECUTORS: Number of executors to enable for the instance.
(default: 1)
- :JVM_OPTIONS: JVM Options to pass to Java. (default: "")
+ :JVM_OPTIONS: JVM Options to pass to Java. (default: null)
:FS_ROOT: File system root for the workspace. (default: "/w")
+ :NODE_PROPERTIES: Node properties. (default: null)
:RETENTION_TIME: Number of minutes to wait for an idle slave to be used
again before it's removed. If set to -1, the slave will be kept
forever. (default: 0)
:CONNECTION_TYPE: The connection type for Jenkins to connect to the build
minion. Valid options: JNLP, SSH. (default: "SSH")
+ :CONFIG_TYPE: Configuration drive. (default: null)
For a live example see the OpenDaylight project jenkins-config directory.
https://github.com/opendaylight/releng-builder/tree/master/jenkins-config
Sonar
-----
-Runs Jenkins Sonarqube plug-in to review for bugs, code smells,
-and security vulnerabilities.
+Runs the Jenkins SonarQube Scanner plug-in to analyze code for bugs,
+code smells and security vulnerabilities, and to upload the result
+(possibly including code-coverage statistics) to a SonarQube server
+or to SonarCloud.io.
Requires ``SonarQube Scanner for Jenkins``
+One of the optional parameters sonar-project-file and sonar-properties
+must be supplied; they cannot both be empty.
+
Plug-in configurations
Manage Jenkins --> Configure System --> SonarQube servers
- Name: Sonar (fixed)
:sonar-properties: Sonar configuration properties. (default: "")
:sonar-java-opts: JVM options. (default: "")
:sonar-additional-args: Additional command line arguments. (default: "")
+
+
+Sonar with Prescan Script
+-------------------------
+
+The same as the Sonar job above, except the caller must supply a shell script
+to run prior to the Sonar scan. This is commonly used to install prerequisites,
+build the project, execute unit tests and generate a code-coverage report.
+
+:Template Names:
+
+ - {project-name}-sonar-prescan-script
+ - gerrit-sonar-prescan-script
+ - github-sonar-prescan-script
+
+:Required Parameters:
+ :sonar-prescan-script: A shell script that will run prior to the Sonar scan.
+
+:Optional Parameters:
+ :sonar-task: Sonar task to run. (default: "")
+ :sonar-project-file: The filename for the project's properties.
+ (default: "sonar-project.properties")
+ :sonar-properties: Sonar configuration properties. (default: "")
+ :sonar-java-opts: JVM options. (default: "")
+ :sonar-additional-args: Additional command line arguments. (default: "")
This template supports Maven and Container release jobs.
+This template uses a git commit choosing strategy that builds the merged
+commit with the release yaml file, not the tip of the target branch, so
+projects can repeat the release action in case of merge job failure.
+
:Template Name: {project-name}-release-merge
:Comment Trigger: remerge
signs the tag and pushes the tag to the git server. The release merge
template accepts neither a branch nor a stream parameter.
+These templates use a git commit choosing strategy that builds the merged
+commit with the release yaml file, not the tip of the target branch, so
+projects can repeat the release action in case of merge job failure.
+
:Template Names:
- {project-name}-pypi-release-merge
PackageCloud Release Verify
~~~~~~~~~~~~~~~~~~~~~~~~~~~
-This template supports PackageCloud release jobs.
+This template supports PackageCloud release jobs. Checks that the specified
+packages are present in the staging repository and absent from the release
+repository.
:Template Name: {project-name}-packagecloud-release-verify
PackageCloud Release Merge
~~~~~~~~~~~~~~~~~~~~~~~~~~
-This template supports PackageCloud release jobs.
+This template supports PackageCloud release jobs. Promotes the specified
+packages from the staging repository to the release repository.
+
+This template uses a git commit choosing strategy that builds the merged
+commit with the release yaml file, not the tip of the target branch, so
+projects can repeat the release action in case of merge job failure.
:template name: {project-name}-packagecloud-release-merge
following repo contains a script that will do this for you. Please refer to the
explanation presented in: https://github.com/lfit-sandbox/test. This is all
currently a beta feature, so feedback is encouraged. The script
-`docs_script.sh` is not needed, you can copy the files by hand if you prefer.
+``docs_script.sh`` is not needed, you can copy the files by hand if you prefer.
The default location of the tox.ini file is in the git repository root
directory. Optionally your documentation lead may decide to store all tox files
If your project's tox dir is "docs/" and not "." the tox.ini must be
reconfigured with the correct relative paths.
+Additionally, you must also modify the doc-dir. For example, from the default
+of ``doc-dir: "docs/_build/html"`` to ``doc-dir: "_build/html"``, as the relative
+path in the tox run has changed.
+
+
Once these files are correctly configured in your repository you can test
locally:
build-node: centos7-builder-1c-1g
default-version: latest
tox-dir: "."
+ doc-dir: "docs/_build/html"
jobs:
- rtdv3-global-verify
stream:
- project:
name: rtdv3-global-merge
default-version: latest
+ tox-dir: "."
+ doc-dir: "docs/_build/html"
build-node: centos7-builder-1c-1g
jobs:
- rtdv3-global-merge
branch: stable/{stream}
Or add both jobs via a job group:
+This real-world example also shows how to configure your builds to use
+a tox.ini that lived inside your docs/ dir
.. code-block:: bash
+ # Global read the docs version 3 jobs
+ #
+ # jobs trigger for all projects, all branches
+ # on any changes to files in a docs/ directory
+ # and publish subprojects to readthedocs.io
+ # using credentials from Jenkins settings
---
+ - project:
+ name: rtdv3-view
+ project-name: rtdv3-global
+ views:
+ - project-view
+
- project:
name: rtdv3-global
default-version: latest
- tox-dir: "."
- build-node: centos7-builder-1c-1g
+ tox-dir: "docs/"
+ doc-dir: "_build/html"
+ build-node: centos7-builder-2c-1g
+ # override the default to ignore ref-updated-event (tag)
+ gerrit_merge_triggers:
+ - change-merged-event
+ - comment-added-contains-event:
+ comment-contains-value: remerge$
jobs:
- - rtdv3-global
+ - rtdv3-global-verify
+ - rtdv3-global-merge
stream:
- master:
- branch: master
-
+ branch: '*'
Github jobs must be per project, and will be covered by a diffrent set of jobs once these are proven.
(default: 10)
:submodule-disable: Disable submodule checkout operation.
(default: false)
-
+ :tox-dir: Directory containing the project's read the docs tox.ini
+ :doc-dir: Relative directory project's docs generated by tox
:gerrit_merge_triggers: Override Gerrit Triggers.
:gerrit_trigger_file_paths: Override file paths filter which checks which
file modifications will trigger a build.
:submodule-disable: Disable submodule checkout operation.
(default: false)
:tox-dir: Directory containing the project's read the docs tox.ini
+ :doc-dir: Relative directory project's docs generated by tox
:gerrit_verify_triggers: Override Gerrit Triggers.
:gerrit_trigger_file_paths: Override file paths filter which checks which
file modifications will trigger a build.
sonar-java-opts: "{sonar-java-opts}"
sonar-additional-args: "{sonar-additional-args}"
+- lf_sonar_builders_prescan_script: &lf_sonar_builders_prescan_script
+ name: lf-sonar-builders-prescan-script
+ builders:
+ - lf-infra-pre-build
+ - shell: "{sonar-prescan-script}"
+ - lf-infra-sonar:
+ sonar-task: "{sonar-task}"
+ sonar-project-file: "{sonar-project-file}"
+ sonar-properties: "{sonar-properties}"
+ sonar-java-opts: "{sonar-java-opts}"
+ sonar-additional-args: "{sonar-additional-args}"
+
- lf_sonar_gerrit_common: &lf_sonar_gerrit_common
name: lf-sonar-gerrit-common
gerrit_sonar_triggers:
<<: *lf_sonar_builders_prescan
# yamllint disable-line rule:key-duplicates
<<: *lf_sonar_github_common
+
+- job-template:
+ name: "{project-name}-sonar-prescan-script"
+ id: gerrit-sonar-prescan-script
+ <<: *lf_sonar_common
+ # yamllint disable-line rule:key-duplicates
+ <<: *lf_sonar_builders_prescan_script
+ # yamllint disable-line rule:key-duplicates
+ <<: *lf_sonar_gerrit_common
+
+- job-template:
+ name: "{project-name}-sonar-prescan-script"
+ id: github-sonar-prescan-script
+ <<: *lf_sonar_common
+ # yamllint disable-line rule:key-duplicates
+ <<: *lf_sonar_builders_prescan_script
+ # yamllint disable-line rule:key-duplicates
+ <<: *lf_sonar_github_common
submodule-recursive: "{submodule-recursive}"
submodule-timeout: "{submodule-timeout}"
submodule-disable: "{submodule-disable}"
- choosing-strategy: gerrit
+ # merge jobs always build from tip
+ choosing-strategy: default
triggers:
- gerrit:
submodule-recursive: "{submodule-recursive}"
submodule-timeout: "{submodule-timeout}"
submodule-disable: "{submodule-disable}"
- choosing-strategy: gerrit
+ # merge jobs always build from tip
+ choosing-strategy: default
triggers:
- timed: "{obj:cron}"
submodule-disable: true
submodule-recursive: false
submodule-timeout: 10
- choosing-strategy: default
+ # release merge jobs build from commit not tip
+ choosing-strategy: gerrit
triggers:
- gerrit:
submodule-recursive: "{submodule-recursive}"
submodule-timeout: "{submodule-timeout}"
submodule-disable: "{submodule-disable}"
+ # release merge jobs build from commit not tip
choosing-strategy: gerrit
triggers:
submodule-disable: true
submodule-recursive: false
submodule-timeout: 10
- choosing-strategy: default
+ # release merge jobs build from commit not tip
+ choosing-strategy: gerrit
triggers:
- gerrit:
- change-merged-event
- comment-added-contains-event:
comment-contains-value: '^Patch Set\s+\d+:\s+remerge\s*$'
- - ref-updated-event
#####################
# Job Configuration #
build-timeout: 15
disable-job: false
git-url: "$GIT_URL/$PROJECT"
- submodule-recursive: true
- submodule-disable: false
+ submodule-recursive: false
+ submodule-disable: true
submodule-timeout: 10
tox-dir: "."
+ doc-dir: "docs/_build/html"
gerrit-skip-vote: false
gerrit_trigger_file_paths:
properties-content: |
TOX_ENVS=docs,docs-linkcheck
TOX_DIR={tox-dir}
+ DOC_DIR={doc-dir}
- lf-infra-tox-run:
parallel: "true"
- lf-rtdv3-build:
submodule-timeout: 10
submodule-disable: false
wss-unified-agent-opts: ""
- wss-unified-agent-version: 19.8.1
+ wss-unified-agent-version: 19.12.2
gerrit_trigger_file_paths:
- compare-type: ANT
--- /dev/null
+#! /bin/bash
+# SPDX-License-Identifier: EPL-1.0
+##############################################################################
+# Copyright (c) 2019 The Linux Foundation and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Eclipse Public License v1.0
+# which accompanies this distribution, and is available at
+# http://www.eclipse.org/legal/epl-v10.html
+##############################################################################
+
+set -euf
+
+# This script can be run on a cost file (.csv) to standardize the formats of the
+# uptime, cost & stack_cost fields.
+
+cost_file=$1
+
+while IFS="," read -r job_name build_number date resource uptime cost stack_cost; do
+ [[ $resource == "0" ]] && resource='unknown'
+ printf "%s,%s,%s,%s,%d,%.2f,%.2f\n" "$job_name" "$build_number" "$date" \
+ "$resource" "$uptime" "$cost" "$stack_cost"
+done < "$cost_file"
--- /dev/null
+---
+fixes:
+ - |
+ Removed broken code that removed leading/trailing white-space from
+ variables. Use lf-activate-venv() to install openstack. Enabled 'set -euf
+ pipefail' and updated code to handle errors. Updated out of date
+ comments. Some minor cleanup of code for clarity.
--- /dev/null
+---
+fixes:
+ - |
+ Call "lftools jenkins" after credentials are set to fix failures due to the
+ call being made without credentials being set first. The previous method
+ did not require credentials, so the failure was introduced when we switched
+ to using lftools. The os_plugin_version variable is not needed before the
+ JENKINS_USER and JENKINS_PASSWORD are set, so no other changes
+ are necessary.
--- /dev/null
+---
+fixes:
+ - |
+ A recent change has made the "null" string a bad value for FLOATING_IP_POOL.
+ By making it an empty string, we recreate the old functionality of having
+ the default floating IP pool set to "No value".
--- /dev/null
+---
+fixes:
+ - |
+ Fix release file detection on commit with multiple parents
--- /dev/null
+---
+fixes:
+ - |
+ Branch discovery and build polling implemented.
+ If a Branch has not been seen by rtd
+ we trigger a build with rtd and poll till that build
+ is complete.
+ we can then enable the branch and trigger a build
+ against it, again polling all builds untill they are
+ complete.
--- /dev/null
+---
+fixes:
+ - |
+ Fix to disable cloning submodules for rtdv3 verify job
--- /dev/null
+---
+fixes:
+ - |
+ Add configurable doc-dir defaults to "docs/_build/html"
+ needed for relative path modifications if you change the tox-dir
+ Modified tox-run.sh with "ARCHIVE_DOC_DIR" variable
+ so that relative paths can be handed when uploading generated
+ docs to the log server
--- /dev/null
+---
+fixes:
+ - |
+ Use git choosing strategy default in tox and pypi merge jobs for
+ gerrit. This makes those jobs consistent with maven and other
+ merge jobs for gerrit that always build from tip of the target
+ branch to create artifacts from the latest & greatest code.
+ Building from tip (not from Gerrit commit/merge point) avoids
+ confusion about content when changes are merged out of order.
+ For example, a fix is submitted and merged, but the merge job
+ fails. In the mean time, a different change that happened
+ earlier in commit history gets merged (or the merge job is
+ retriggered), causing a new artifact to be pushed. But that
+ artifact does not have the expected fix.
+
+ Add comments to release merge jobs why their choosing strategy
+ is not default.
+
+ Document the git commit choosing strategy for the release merge jobs.
--- /dev/null
+---
+fixes:
+ - |
+ Extend shell scripts that invoke pip freeze to show
+ python and pip versions also
--- /dev/null
+---
+fixes:
+ - |
+ Extend release-job.sh to detect if distribution_type is missing
+ from the release yaml file and show a meaningful error. The
+ shell option pipefile causes the script to halt silently if
+ niet fails to find that key, which utterly baffles users.
--- /dev/null
+---
+fixes:
+ - |
+ Use choosing strategy Gerrit Trigger in container/jar and
+ package cloud release merge jobs. This retains the current
+ behavior in the simple merge case, and ensures that a job
+ triggered by a "remerge" comment uses the release file at
+ that commit. The previous choosing strategy, default, uses
+ the tip of the target branch. That does not allow recovery
+ from merge job failure if the target branch has advanced past
+ the commit with the release file.
--- /dev/null
+---
+issues:
+ - |
+ Removes ref-update from rtd-merge jobs which is triggering unnecessary
+ jobs to be queued in Jenkins. This ref-update was originally added to
+ enable Jenkins to trigger builds when a release tag is pushed to update
+ the docs however it's now triggering many unnecessary jobs wasting
+ project CI resources.
--- /dev/null
+---
+features:
+ - |
+ New script has been added: scrape-job-cost. This script will be executed by
+ cron as nexus on the Nexus Server for each project. It will extract cost
+ data from the nexus directory for each Jenkins Builder (production &
+ sandbox). The cost data for each builder will be appended to separate cost
+ files located in ~nexus/cost on the nexus server. The files will be named
+ sandbox-YYYY.csv and production-YYYY.csv.
--- /dev/null
+---
+features:
+ - |
+ New templates gerrit-sonar-prescan-script and github-sonar-prescan-script
+ accept an arbitrary shell-script body that can do work like install
+ prerequisites, build and test to generate a code-coverage report for
+ the Sonar Scanner to find and upload. This adds flexibility that the
+ existing gerrit-sonar-prescan and github-sonar-prescan templates lack.
reno~=2.11.2
-sphinx~=1.7.9
+Sphinx~=2.3.1
+sphinx_bootstrap_theme~=0.7.1
sphinxcontrib-programoutput
-sphinx_bootstrap_theme>=0.6.0
yq
echo "---> gerrit-push-patch.sh"
# Push a change to Gerrit if files modified in repository.
#
-# The script requires to install the minimum version 1.25 of git-review using
-# virtualenv and pip install which supports `--reviewers` option.
+# Need to use git-review >= 1.28 to be compatible with Gerrit 3
#
# The script allows a job to push a patch to Gerrit in an automated fashion.
# This is meant for tasks that creates the same patch regularly and needs the
# ability to detect if an unreviewed patch already exists. In which case it
# will update the existing patch.
#
-# Note: This patch assumes the $WORKSPACE contains the project repo with
-# the files changed already "git add" and waiting for a "git commit" call.
+# Note: This script expects $WORKSPACE to point to a project git repo that
+# may contain staged commits. This script will exit with OK status if no
+# staged commits are present, otherwise the staged commits will be commited and
+# a gerrit review will be created.
#
-# This script requires the following JJB variables to be passed in:
+# This script expects the following environment variables to be set in the
+# JJB configuration
#
# $PROJECT : Gerrit project-name
# $GERRIT_COMMIT_MESSAGE: Commit message to assign to commit
# $GERRIT_USER : Gerrit user
# $REVIEWERS_EMAIL : Reviewers email
-# TODO: remove the workaround when v1.26 is available on all images
-# Workaround for git-review bug in v1.24
-# https://storyboard.openstack.org/#!/story/2001081
-set +u # Allow unbound variables for virtualenv
-virtualenv --quiet "/tmp/v/git-review"
-# shellcheck source=/tmp/v/git-review/bin/activate disable=SC1091
-source "/tmp/v/git-review/bin/activate"
-pip install --quiet --upgrade "pip==9.0.3" setuptools
-pip install --quiet --upgrade git-review
-set -u
-# End git-review workaround
-# Remove any leading or trailing quotes surrounding the strings
-# which can cause parse errors when passed as CLI options to commands
-# shellcheck disable=SC2001
-PROJECT="$(echo "$PROJECT" | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")"
-# shellcheck disable=SC2001
-GERRIT_COMMIT_MESSAGE="$(echo "$GERRIT_COMMIT_MESSAGE" | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")"
-# shellcheck disable=SC2001
-GERRIT_HOST="$(echo "$GERRIT_HOST" | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")"
-# shellcheck disable=SC2001
-GERRIT_TOPIC="$(echo "$GERRIT_TOPIC" | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")"
-# shellcheck disable=SC2001
-GERRIT_USER="$(echo "$GERRIT_USER" | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")"
-# shellcheck disable=SC2001
-REVIEWERS_EMAIL="$(echo "$REVIEWERS_EMAIL" | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")"
-job=$JOB_NAME/$BUILD_NUMBER
+set -eufo pipefail
+
+# No reason to continue if there are no staged commits
+staged_commits=$(git diff --cached --name-only)
+if [[ -z $staged_commits ]]; then
+ echo "INFO: Nothing to commit"
+ exit 0
+fi
+
+echo -e "INFO: Staged for commit:\n$staged_commits\n"
-CHANGE_ID=$(ssh -p 29418 "$GERRIT_USER@$GERRIT_HOST" gerrit query \
- limit:1 owner:self is:open project:"$PROJECT" \
- message: "$GERRIT_COMMIT_MESSAGE" \
- topic: "$GERRIT_TOPIC" | \
- grep 'Change-Id:' | \
- awk '{ print $2 }')
+# shellcheck disable=SC1090
+source ~/lf-env.sh
-if [ -z "$CHANGE_ID" ]; then
- git commit -sm "$GERRIT_COMMIT_MESSAGE" -m "Job: ${job}"
+lf-activate-venv "git-review>=1.28"
+
+# Query for a pre-existing gerrit review
+query_result=$(ssh -p 29418 "$GERRIT_USER@$GERRIT_HOST" gerrit query \
+ limit:1 owner:self is:open project:"$PROJECT" \
+ message: "$GERRIT_COMMIT_MESSAGE" \
+ topic: "$GERRIT_TOPIC")
+
+# Extract the change_id from the query_result
+job=$JOB_NAME/$BUILD_NUMBER
+# If available, add change_id to commit message
+if change_id=$(echo "$query_result" | grep 'Change-Id:' | awk '{print $2}'); then
+ echo "NOTE: Found gerrit review: $change_id"
+ message="Job: $job\nChange-Id: $change_id"
else
- git commit -sm "$GERRIT_COMMIT_MESSAGE" -m "Job: ${job}\nChange-Id: $CHANGE_ID"
+ echo "NOTE: No gerrit review found"
+ message="Job: $job"
fi
+git commit -sm "$GERRIT_COMMIT_MESSAGE" -m "$message"
git status
git remote add gerrit "ssh://$GERRIT_USER@$GERRIT_HOST:29418/$PROJECT.git"
-# if the reviewers email is empty then use a default
-REVIEWERS_EMAIL=${REVIEWERS_EMAIL:-"$GERRIT_USER@$GERRIT_HOST"}
+# If the reviewers email is unset/empty then use a default
+reviewers_email=${REVIEWERS_EMAIL:-"$GERRIT_USER@$GERRIT_HOST"}
-# Don't fail the build if this command fails because it's possible that there
-# is no changes since last update.
-git review --yes -t "$GERRIT_TOPIC" --reviewers "$REVIEWERS_EMAIL" || true
+git review --yes -t "$GERRIT_TOPIC" --reviewers "$reviewers_email"
+++ /dev/null
-#!/bin/bash
-# SPDX-License-Identifier: EPL-1.0
-##############################################################################
-# Copyright (c) 2017 The Linux Foundation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Eclipse Public License v1.0
-# which accompanies this distribution, and is available at
-# http://www.eclipse.org/legal/epl-v10.html
-##############################################################################
-echo "---> git-validate-jira-urls.sh"
-# This script will make sure that there are no JIRA URLs in the commit
-# message. JIRA URLs will break the its-jira plugin
-
-# Ensure we fail the job if any steps fail.
-# Do not treat undefined variables as errors as in this case we are allowed
-# to have JIRA_URL undefined
-set -e -o pipefail
-set +u
-
-if [ -n "${JIRA_URL}" ];
-then
- BASE_URL=$(echo "$JIRA_URL" | awk -F'/' '{print $3}')
- JIRA_LINK=$(git rev-list --format=%B --max-count=1 HEAD | grep -io "http[s]*://$BASE_URL/" || true)
- if [[ -n "$JIRA_LINK" ]]
- then
- echo 'Remove JIRA URLs from commit message'
- echo 'Add jira references as: Issue: <JIRAKEY>-<ISSUE#>, instead of URLs'
- exit 1
- fi
-fi
set -eu -o pipefail
-os_plugin_version="$(lftools jenkins plugins list \
- | grep -i 'OpenStack Cloud Plugin')"
-
testversion() {
local current_val="$1" operator="$2" test_value="$3"
awk -vv1="$current_val" -vv2="$test_value" 'BEGIN {
instance_cap=$(get_cfg "$cfg_file" INSTANCE_CAP "null")
fi
- floating_ip_pool=$(get_cfg "$cfg_file" FLOATING_IP_POOL "null")
+ floating_ip_pool=$(get_cfg "$cfg_file" FLOATING_IP_POOL "")
security_groups=$(get_cfg "$cfg_file" SECURITY_GROUPS "default")
availability_zone=$(get_cfg "$cfg_file" AVAILABILITY_ZONE "")
start_timeout=$(get_cfg "$cfg_file" START_TIMEOUT "600000")
kpn_default="$(get_cfg "$(dirname "$cfg_file")/cloud.cfg" KEY_PAIR_NAME "jenkins-ssh")"
key_pair_name=$(get_cfg "$cfg_file" KEY_PAIR_NAME "$kpn_default")
num_executors=$(get_cfg "$cfg_file" NUM_EXECUTORS "1")
- jvm_options=$(get_cfg "$cfg_file" JVM_OPTIONS "null")
+ jvm_options=$(get_cfg "$cfg_file" JVM_OPTIONS "")
fs_root=$(get_cfg "$cfg_file" FS_ROOT "/w")
connection_type=$(get_cfg "$cfg_file" CONNECTION_TYPE "SSH")
launcher_factory=$(get_launcher_factory "$connection_type")
export JENKINS_USER
export JENKINS_PASSWORD
+ # JENKINS_{URL,USER,PASSWORD} env vars are required for the "lftools jenkins
+ # plugins list" call
+ os_plugin_version="$(lftools jenkins plugins list \
+ | grep -i 'OpenStack Cloud Plugin')"
+
echo "-----> Groovy script $script_file"
for cloud in "${clouds[@]}"; do
cfg_dir="${cloud}"
+++ /dev/null
-#!/bin/bash
-# SPDX-License-Identifier: EPL-1.0
-##############################################################################
-# Copyright (c) 2015 The Linux Foundation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Eclipse Public License v1.0
-# which accompanies this distribution, and is available at
-# http://www.eclipse.org/legal/epl-v10.html
-##############################################################################
-echo "---> jjb-check-unicode.sh"
-
-if LC_ALL=C grep -I -r '[^[:print:][:space:]]' jjb/; then
- echo "Found files containing non-ascii characters."
- exit 1
-fi
-
-echo "All files are ASCII only"
+++ /dev/null
-#!/bin/bash
-# SPDX-License-Identifier: EPL-1.0
-##############################################################################
-# Copyright (c) 2017 The Linux Foundation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Eclipse Public License v1.0
-# which accompanies this distribution, and is available at
-# http://www.eclipse.org/legal/epl-v10.html
-##############################################################################
-echo "---> jjb-cleanup.sh"
-# Cleans up the temporary directory created for the virtualenv but only if it
-# exists under /tmp. This is to ensure we never attempt to blow away '/'
-# through mis-set bash variables.
-
-# Ensure we fail the job if any steps fail.
-# DO NOT set -u as virtualenv's activate script has unbound variables
-set -e +u -o pipefail
-
-# shellcheck source="$WORKSPACE/.jjb.properties" disable=SC1091
-source "$WORKSPACE/.jjb.properties"
-if [[ -n "$JJB_VENV" && "$JJB_VENV" =~ /tmp/.* ]]; then
- rm -rf "$JJB_VENV" && echo "$JJB_VENV removed"
- unset JJB_VENV
-fi
-rm -f "$WORKSPACE/.jjb.properties"
-deactivate
+++ /dev/null
-#!/bin/bash -l
-# SPDX-License-Identifier: EPL-1.0
-##############################################################################
-# Copyright (c) 2017 The Linux Foundation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Eclipse Public License v1.0
-# which accompanies this distribution, and is available at
-# http://www.eclipse.org/legal/epl-v10.html
-##############################################################################
-echo "---> jjb-install.sh"
-
-# Ensure we fail the job if any steps fail.
-# DO NOT set -u as virtualenv's activate script has unbound variables
-set -e -o pipefail
-
-# Create a virtualenv in a temporary directoy and write it down to used
-# or cleaned up later; cleanup is done in the script jjb-cleanup.sh.
-JJB_VENV="$(mktemp -d)"
-export JJB_VENV
-virtualenv "$JJB_VENV"
-echo "JJB_VENV=$JJB_VENV" > "$WORKSPACE/.jjb.properties"
-# shellcheck source=$VENV_DIR/bin/activate disable=SC1091
-source "$JJB_VENV/bin/activate"
-python -m pip install --quiet --upgrade "jenkins-job-builder==$JJB_VERSION"
-
-echo "----> pip freeze"
-pip freeze
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
##############################################################################
-echo "---> build-cost.sh"
+echo "---> job-cost.sh"
set -euf -o pipefail
# shellcheck disable=SC1090
source ~/lf-env.sh
-lf-activate-venv python-openstackclient
+# AWS job cost not supported, exit
+if grep -qi amazon /sys/devices/virtual/dmi/id/bios_vendor ; then
+ echo "INFO: Not able to calculate job cost on AWS"
+ exit 0
+fi
+
+lf-activate-venv zipp==1.1.0 python-openstackclient
if [[ -z ${JOB_NAME:-} ]]; then
lf-echo-error "Required Env Variable Unset/Empty: JOB_NAME"
# Convert to integer by truncating fractional part' and round up by one
((uptime=${uptime%\.*}+1))
+# EC2 and OpenStack have simiar instace metadata APIs at this IP
+# AWS docs: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instancedata-data-retrieval.html
+# Nova docs: https://docs.openstack.org/nova/latest/user/metadata.html
instance_type=$(curl -s http://169.254.169.254/latest/meta-data/instance-type)
echo "INFO: Retrieving Pricing Info for: $instance_type"
url="https://pricing.vexxhost.net/v1/pricing/$instance_type/cost?seconds=$uptime"
json_block=$(curl -s "$url")
-cost=$(jq .cost <<< "$json_block")
-resource=$(jq .resource <<< "$json_block" | tr -d '"')
+# check if JSON returned and can be parsed
+if jq <<< "$json_block" > /dev/null 2>&1; then
+ cost=$(jq .cost <<< "$json_block")
+ resource=$(jq .resource <<< "$json_block" | tr -d '"')
+else
+ echo "ERROR: Pricing API returned invalid json"
+ cost=0
+ resource='unknown'
+fi
# Archive the cost date
mkdir -p "$WORKSPACE/archives/cost"
# This format is readable by spreadsheet and is easily sortable
date=$(TZ=GMT date +'%Y-%m-%d %H:%M:%S')
-cat << EOF > "$WORKSPACE/archives/cost.csv"
-$JOB_NAME,$BUILD_NUMBER,$date,$resource,$uptime,$cost,$stack_cost
-EOF
-
+# Format the uptime, cost & stack_cost fields
+printf "%s,%s,%s,%s,%d,%.2f,%.2f\n" "$JOB_NAME" "$BUILD_NUMBER" "$date" \
+ "$resource" "$uptime" "$cost" "$stack_cost" > "$WORKSPACE/archives/cost.csv"
pip install --quiet --upgrade "pip==9.0.3" setuptools
pip install --quiet --upgrade nodeenv
-echo "----> pip freeze"
+# installs are silent, show version details in log
+python --version
+pip --version
pip freeze
+++ /dev/null
-#!/bin/bash
-# SPDX-License-Identifier: EPL-1.0
-##############################################################################
-# Copyright (c) 2017 The Linux Foundation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Eclipse Public License v1.0
-# which accompanies this distribution, and is available at
-# http://www.eclipse.org/legal/epl-v10.html
-##############################################################################
-# pip install packages into a virtualenv using the first listed package as venv name
-#
-# PIP_PACKAGES is a space separated list of pypi packages to install. The first
-# listed package is used as the virtualenv directory name.
-echo "---> pip-install.sh"
-
-# Ensure we fail the job if any steps fail.
-# DO NOT set -u as virtualenv's activate script has unbound variables
-set -e -o pipefail
-
-# Install git-review using virtualenv to the latest version that supports
-# --reviewers option, available through pip install. Existing minion image has a
-# version that does not have it.
-virtualenv "/tmp/v/${PIP_PACKAGES%% *}"
-# shellcheck source=/tmp/v/venv/bin/activate disable=SC1091
-source "/tmp/v/${PIP_PACKAGES%% *}/bin/activate"
-pip install --quiet --upgrade "pip==9.0.3" setuptools
-pip install --quiet --upgrade pipdeptree
-
-# PIP_PACKAGES needs to be passed through as a space separated list of packages
-# shellcheck disable=SC2086
-pip install --upgrade $PIP_PACKAGES
-
-echo "----> Pip Dependency Tree"
-pipdeptree
python3 -m pip install --user --quiet --no-warn-script-location --upgrade setuptools
python3 -m pip install --user --quiet --no-warn-script-location --upgrade --upgrade-strategy eager -r "$requirements_file"
# installs are silent, show version details in log
+ python3 --version
+ python3 -m pip --version
python3 -m pip freeze
rm -rf "$requirements_file"
touch /tmp/pre-build-complete
echo "INFO: creating virtual environment"
virtualenv -p python3 /tmp/venv
PATH=/tmp/venv/bin:$PATH
-pipup="python -m pip install -q --upgrade pip lftools jsonschema niet twine yq"
+pipup="python -m pip install -q --upgrade pip idna==2.8 lftools jsonschema niet twine yq"
echo "INFO: $pipup"
$pipup
+# show installed versions
+python -m pip --version
+python -m pip freeze
#Functions.
NEXUS_PATH="${SILO}/${JENKINS_HOSTNAME}/"
# Verify if using release file or parameters
if $USE_RELEASE_FILE ; then
- release_files=$(git diff-tree -m --no-commit-id -r "$GIT_COMMIT" --name-only -- "releases/" ".releases/")
+ release_files=$(git diff-tree -m --no-commit-id -r "$GIT_COMMIT" "$GIT_COMMIT^1" \
+ --name-only -- "releases/" ".releases/")
if (( $(grep -c . <<<"$release_files") > 1 )); then
echo "INFO: RELEASE FILES ARE AS FOLLOWS: $release_files"
- echo "ERROR: Committing multiple release files in the same commit OR rename/amend of existing files is not supported."
+ echo "ERROR: Adding multiple release files in the same commit"
+ echo "ERROR: OR rename/amend/delete of existing files is not supported."
exit 1
else
release_file="$release_files"
fi
# Jenkins parameter drop-down defaults DISTRIBUTION_TYPE to None
+ # in the contain/maven release job; get value from release yaml.
+ # Packagecloud and PyPI jobs set the appropriate value.
DISTRIBUTION_TYPE="${DISTRIBUTION_TYPE:-None}"
if [[ $DISTRIBUTION_TYPE == "None" ]]; then
- DISTRIBUTION_TYPE=$(niet ".distribution_type" "$release_file")
+ if ! DISTRIBUTION_TYPE=$(niet ".distribution_type" "$release_file"); then
+ echo "ERROR: Failed to get distribution_type from $release_file"
+ exit 1
+ fi
fi
PATCH_DIR=$(mktemp -d)
echo "---> rtdv3.sh"
set -euo pipefail
+watchbuild(){
+ echo "INFO: Running build against branch $1"
+ local buildid
+ local result
+ buildid=$(lftools rtd project-build-trigger "$rtdproject" "$1" | jq '.build.id')
+
+ result=null
+ while [ $result == null ]; do
+ sleep 10
+ result=$(lftools rtd project-build-details "$rtdproject" "$buildid" | jq '.success')
+ echo "INFO Current result of running build $result"
+ if [[ $result == failed ]]; then
+ echo "INFO: read the docs build completed with status: $result"
+ exit 1
+ fi
+ done
+ echo "INFO: read the docs build completed with status: $result"
+}
+
project_dashed="${PROJECT////-}"
umbrella=$(echo "$GERRIT_URL" | awk -F'.' '{print $2}')
if [[ "$SILO" == "sandbox" ]]; then
lftools rtd project-update "$rtdproject" default_version="$default_version"
fi
- lftools rtd project-build-trigger "$rtdproject" "$GERRIT_BRANCH"
if [[ $GERRIT_BRANCH == "master" ]]; then
- echo "INFO: triggering latest"
- lftools rtd project-build-trigger "$rtdproject" latest
+ echo "INFO: triggering $rtdproject latest"
+ watchbuild latest
else
- echo "INFO: triggering stable"
- lftools rtd project-build-trigger "$rtdproject" stable
+
+ #read the docs only understands lower case branch names
+ branch=$(echo "$GERRIT_BRANCH" | tr '[:upper:]' '[:lower:]')
+ echo "INFO: Checking if read the docs has seen branch $branch"
+
+ #if this is 404. then run discover branch
+ if ! lftools rtd project-version-details "$rtdproject" "$branch" | jq '.active'; then
+ echo "INFO: read the docs has not seen branch $branch for project $rtdproject"
+ echo "INFO: triggering $rtdproject latest to instantiate new branch discovery"
+ watchbuild latest
+ fi
+
+ echo "INFO: triggering $rtdproject $branch"
+ watchbuild "$branch"
+
+ #Make newly discovered branches visible in the u/i
+ isactive=$(lftools rtd project-version-details "$rtdproject" "$branch" | jq '.active')
+ if [[ "$isactive" == false ]]; then
+ echo "INFO: Marking $branch as active for project $rtdproject"
+ lftools rtd project-version-update "$rtdproject" "$branch" true
+ fi
+
fi
fi
--- /dev/null
+#! /bin/bash
+# SPDX-License-Identifier: EPL-1.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Eclipse Public License v1.0
+# which accompanies this distribution, and is available at
+# http://www.eclipse.org/legal/epl-v10.html
+##############################################################################
+
+# This script will be run on the Nexus Server for each project. Typically each
+# project will have multiple nexus silos. This script will find all the job cost
+# files (job_name/job_num/cost.csv) in each silo and append them to the annual
+# cost file (~nexus/cost/$silo.YYYY.csv~nexus/bin . It will then delete all the
+# job cost files.
+
+# Because this file meant to be run by cron, I have restricted normal info
+# logging messages on a single line. Error may be multi-line.
+#
+# Each cost file contains one or more CSV records in the following format:
+#
+# JobName , BuildNumber , Date , InstanceType , Uptime , Cost , StackCost
+#
+# Date format: '%Y-%m-%d %H:%M:%S'
+# Cost format: '%.2f' (1.29)
+#
+##############################################################################
+#
+# Testing/Validation
+#
+# You can validate this script by running as yourself on the Nexus server. You
+# should not have write permission anywhere in the Silo Directory. The
+# Silo Cost File from your test will be created/updated in:
+# ~/cost/$silo-$year.csv. If you run multiple times, duplicate records
+# will be created.
+#
+# To enable debug set envionment variable DEBUG=true. If this is done on the
+# command-line, you do not have to edit this file.
+debug=${DEBUG:-false}
+$debug && echo "DEBUG Enabled"
+#
+##############################################################################
+
+set -eufo pipefail
+
+function get-year-list()
+{
+ # Grab the years for each cost record use sort | uniq to get the
+ # list of unique years found
+ local list
+ list=$(awk -F',' '{print $3}' "$cost_file_records" \
+ | awk -F'-' '{print $1}' | sort | uniq)
+ echo "$list"
+}
+
+########### End of Function Definitions ######################################
+
+if [[ $# != 2 ]]; then
+ echo "usage: $(basename "$0") silo silo_dir"
+ exit 1
+fi
+
+# The Silo Dir is top-level directory that will contain the job directories
+# which will contain the cost files (cost.csv)
+silo=$1
+silo_dir=$2
+
+cost_file_records=/tmp/cost-file-records$$
+cost_file_list=/tmp/cost-file-list$$
+# The directory where the annual cost file will be located
+cost_dir=~/cost
+[[ -d $cost_dir ]] || mkdir $cost_dir
+
+# The Silo Directory for sandbox will get deleted periodically, so
+# gracefully handle that
+if [[ -d $silo_dir ]]; then
+ cd "$silo_dir"
+else
+ echo "$(date +'%Y-%m-%d %H:%M') No Silo Directory, nothing to do"
+ exit 0
+fi
+
+find . -maxdepth 3 -name cost.csv > $cost_file_list
+xargs cat < $cost_file_list | \
+ sort --field-separator=',' --key=3 > $cost_file_records
+num_of_records=$(wc -l < $cost_file_records)
+echo -n "$(date +'%Y-%m-%d %H:%M') Records: $num_of_records "
+
+if [[ $num_of_records == 0 ]]; then
+ echo "Nothing to do"
+ set +f
+ rm -rf /tmp/cost-file-* || true
+ exit 0
+fi
+
+# Append each entry to the silo cost file based on date
+year_list=$(get-year-list)
+for year in $year_list; do
+ echo -n "cost-$year.csv: $(grep -Fc ",$year-" $cost_file_records) "
+ grep -F ",$year-" $cost_file_records >> \
+ "$cost_dir/$silo-$year.csv"
+done
+
+# Rename the job cost files (make them hidden)
+while read -r p; do
+ job_dir=$(dirname "$p")
+ $debug || (cd "$job_dir" ; mv cost.csv .cost.csv)
+done < $cost_file_list
+
+rm -r $cost_file_list $cost_file_records
+
+echo -n "Sorting: "
+# Sort the silo cost file by 'date' (column 3)
+for year in $year_list; do
+ echo -n "cost-$year.csv "
+ sort --field-separator=',' --key=3 \
+ -o "$cost_dir/$silo-$year.csv" \
+ "$cost_dir/$silo-$year.csv"
+done
+
+set +f
+rm -rf /tmp/cost-file-* || true
+
+# Keep track of time initally
+echo "Complete $SECONDS Secs"
# Tox version is pulled in through detox to mitigate version conflict
-
if [[ $PYTHON == "python2" ]]; then
$PYTHON -m pip install --user --quiet --upgrade tox tox-pyenv virtualenv more-itertools~=5.0.0
else
$PYTHON -m pip install --user --quiet --upgrade tox tox-pyenv virtualenv
fi
-
+# installs are silent, show version details in log
+$PYTHON --version
+$PYTHON -m pip --version
$PYTHON -m pip freeze
echo "---> tox-run.sh"
ARCHIVE_TOX_DIR="$WORKSPACE/archives/tox"
+ARCHIVE_DOC_DIR="$WORKSPACE/archives/docs"
mkdir -p "$ARCHIVE_TOX_DIR"
cd "$WORKSPACE/$TOX_DIR" || exit 1
DOC_DIR="${DOC_DIR:-docs/_build/html}"
if [[ -d "$DOC_DIR" ]]; then
echo "---> Archiving generated docs"
- mv "$DOC_DIR" archives/
+ mv "$DOC_DIR" "$ARCHIVE_DOC_DIR"
fi
test "$tox_status" -eq 0 || exit "$tox_status"
pre-commit
commands =
pre-commit run --all-files
+ pre-commit run gitlint --hook-stage commit-msg --commit-msg-filename .git/COMMIT_EDITMSG
#########
# Tools #