From: Guillaume Lambert Date: Tue, 4 May 2021 12:13:45 +0000 (+0200) Subject: Fix: bashate E003 warnings X-Git-Tag: v0.63.1^0 X-Git-Url: https://gerrit.linuxfoundation.org/infra/gitweb?a=commitdiff_plain;h=5beb45586451cb772982de881cd09fd97360be87;p=releng%2Fglobal-jjb.git Fix: bashate E003 warnings Indentations must be aligned to multiple of 4 columns. Signed-off-by: Guillaume Lambert Change-Id: I0bdb8b2fafd1811e55fbbacaa4c4cb6a2b59f822 --- diff --git a/ensure-documented.sh b/ensure-documented.sh index bc1077d5..530c2166 100755 --- a/ensure-documented.sh +++ b/ensure-documented.sh @@ -16,16 +16,16 @@ mapfile -t jjb_files < <(find jjb -name "*.yaml") undocumented_count=0 for file in "${jjb_files[@]}"; do mapfile -t docs_interests < <(grep -e '\- builder:' \ - -e '\- job-template:' \ - -e '\- parameter:' \ - -e '\- property:' \ - -e '\- publisher:' \ - -e '\- scm:' \ - -e '\- trigger:' \ - -e '\- wrapper:' \ - -A1 "$file" \ - | grep 'name: ' | awk -F': ' '{print $2}' | sort | uniq \ - | tr -d "'" | tr -d '"') + -e '\- job-template:' \ + -e '\- parameter:' \ + -e '\- property:' \ + -e '\- publisher:' \ + -e '\- scm:' \ + -e '\- trigger:' \ + -e '\- wrapper:' \ + -A1 "$file" \ + | grep 'name: ' | awk -F': ' '{print $2}' | sort | uniq \ + | tr -d "'" | tr -d '"') for item in "${docs_interests[@]}"; do if ! grep -q "$item" "docs/${file//.yaml/.rst}"; then diff --git a/jenkins-init-scripts/create-jenkins-user.sh b/jenkins-init-scripts/create-jenkins-user.sh index bde227cb..980eacf1 100755 --- a/jenkins-init-scripts/create-jenkins-user.sh +++ b/jenkins-init-scripts/create-jenkins-user.sh @@ -13,9 +13,9 @@ OS=$(facter operatingsystem | tr '[:upper:]' '[:lower:]') OS_RELEASE=$(facter lsbdistrelease | tr '[:upper:]' '[:lower:]') if [[ "$OS_RELEASE" == "18.04" && "$OS" == 'ubuntu' ]]; then - # We do not want var expansion here as profile script expands at runtime. - # shellcheck disable=SC2016 - echo 'export PATH=$HOME/.local/bin:$PATH' >> /etc/profile + # We do not want var expansion here as profile script expands at runtime. + # shellcheck disable=SC2016 + echo 'export PATH=$HOME/.local/bin:$PATH' >> /etc/profile fi useradd -m -s /bin/bash jenkins diff --git a/jenkins-init-scripts/lf-env.sh b/jenkins-init-scripts/lf-env.sh index 01ecda32..24a7637b 100644 --- a/jenkins-init-scripts/lf-env.sh +++ b/jenkins-init-scripts/lf-env.sh @@ -79,9 +79,9 @@ lf-boolean () { true) return 0 ;; false) return 1 ;; '') - lf-echo-stderr "ERROR: ${FUNCNAME[0]}() line:{BASH_LINENO[0]} : A boolean cannot be a empty string" >&2 - return 2 - ;; + lf-echo-stderr "ERROR: ${FUNCNAME[0]}() line:{BASH_LINENO[0]} : A boolean cannot be a empty string" >&2 + return 2 + ;; *) lf-echo-stderr "ERROR: ${FUNCNAME[0]}() line: ${BASH_LINENO[0]} : Invalid value for a boolean: '$bool'" return 2 @@ -201,7 +201,7 @@ lf-activate-venv () { # $pkg_list is expected to be unquoted # shellcheck disable=SC2086 "$lf_venv/bin/pip" install --upgrade --quiet --upgrade-strategy eager \ - $pkg_list || return 1 + $pkg_list || return 1 ;; *) lf-echo-stderr "${FUNCNAME[0]}(): ERROR: No support for: $python" diff --git a/shell/capture-instance-metadata.sh b/shell/capture-instance-metadata.sh index c102d53c..0998dd1a 100644 --- a/shell/capture-instance-metadata.sh +++ b/shell/capture-instance-metadata.sh @@ -25,8 +25,8 @@ fi # AWS not supported, exit cloudtype="$(jq -r .v1.datasource /run/cloud-init/result.json)" if [[ $cloudtype == "DataSourceEc2Local" ]]; then - echo "INFO: Running in AWS, not capturing instance metadata" - exit 0 + echo "INFO: Running in AWS, not capturing instance metadata" + exit 0 fi # Retrive OpenStack instace metadata APIs at this IP. diff --git a/shell/check-info-votes.sh b/shell/check-info-votes.sh index 32384be4..da6b608e 100644 --- a/shell/check-info-votes.sh +++ b/shell/check-info-votes.sh @@ -27,10 +27,10 @@ if [ -d "/opt/pyenv" ]; then export PYTHONPATH latest_version=$(pyenv versions \ - | sed s,*,,g \ - | awk '/[0-9]+/{ print $1 }' \ - | sort --version-sort \ - | awk '/./{line=$0} END{print line}') + | sed s,*,,g \ + | awk '/[0-9]+/{ print $1 }' \ + | sort --version-sort \ + | awk '/./{line=$0} END{print line}') pyenv local "$latest_version" export PYENV_VERSION="3.6.4" diff --git a/shell/comment-to-gerrit.sh b/shell/comment-to-gerrit.sh index 90eadcef..c37647b7 100644 --- a/shell/comment-to-gerrit.sh +++ b/shell/comment-to-gerrit.sh @@ -12,14 +12,14 @@ echo "---> comment-to-gerrit.sh" set -xe -o pipefail if [[ -e gerrit_comment.txt ]] ; then - echo - echo "posting review comment to gerrit..." - echo - cat gerrit_comment.txt - echo - ssh -p 29418 "$GERRIT_HOST" \ - "gerrit review -p $GERRIT_PROJECT \ - -m '$(cat gerrit_comment.txt)' \ - $GERRIT_PATCHSET_REVISION \ - --notify NONE" + echo + echo "posting review comment to gerrit..." + echo + cat gerrit_comment.txt + echo + ssh -p 29418 "$GERRIT_HOST" \ + "gerrit review -p $GERRIT_PROJECT \ + -m '$(cat gerrit_comment.txt)' \ + $GERRIT_PATCHSET_REVISION \ + --notify NONE" fi diff --git a/shell/deploy-maven-file.sh b/shell/deploy-maven-file.sh index 9721ee22..e4559abb 100644 --- a/shell/deploy-maven-file.sh +++ b/shell/deploy-maven-file.sh @@ -41,10 +41,10 @@ while IFS="" read -r file; do file_size=$(stat --printf="%s" "${file}") echo "Deploy ${file##*/} with ${file_size} bytes." lftools deploy maven-file "$MAVEN_REPO_URL" \ - "$REPO_ID" \ - "$file" \ - -b "$MVN" \ - -g "$GROUP_ID" \ - -p "$MAVEN_OPTIONS $MAVEN_PARAMS" \ - |& tee "$DEPLOY_LOG" + "$REPO_ID" \ + "$file" \ + -b "$MVN" \ + -g "$GROUP_ID" \ + -p "$MAVEN_OPTIONS $MAVEN_PARAMS" \ + |& tee "$DEPLOY_LOG" done < <(find "$UPLOAD_FILES_PATH" -xtype f -name "*") diff --git a/shell/gerrit-push-patch.sh b/shell/gerrit-push-patch.sh index aae77f59..ab9d57bc 100644 --- a/shell/gerrit-push-patch.sh +++ b/shell/gerrit-push-patch.sh @@ -51,9 +51,9 @@ lf-activate-venv "git-review==1.28" # Query for a pre-existing gerrit review query_result=$(ssh -p 29418 "$GERRIT_USER@$GERRIT_HOST" gerrit query \ - limit:1 owner:self is:open project:"$PROJECT" \ - message: "$GERRIT_COMMIT_MESSAGE" \ - topic: "$GERRIT_TOPIC") + limit:1 owner:self is:open project:"$PROJECT" \ + message: "$GERRIT_COMMIT_MESSAGE" \ + topic: "$GERRIT_TOPIC") # Extract the change_id from the query_result job=$JOB_NAME/$BUILD_NUMBER diff --git a/shell/gpg-verify-git-signature.sh b/shell/gpg-verify-git-signature.sh index 9970fcc5..20bb7456 100644 --- a/shell/gpg-verify-git-signature.sh +++ b/shell/gpg-verify-git-signature.sh @@ -11,9 +11,9 @@ echo "---> gpg-verify-git-signature.sh" if git log --show-signature -1 | grep -E -q 'gpg: Signature made.*key ID'; then - echo "Git commit is GPG signed." + echo "Git commit is GPG signed." else - echo "WARNING: GPG signature missing for the commit." + echo "WARNING: GPG signature missing for the commit." fi # Do NOT fail the job for unsigned commits diff --git a/shell/info-file-validate.sh b/shell/info-file-validate.sh index 8a4bc6b3..40d96b6d 100755 --- a/shell/info-file-validate.sh +++ b/shell/info-file-validate.sh @@ -32,12 +32,12 @@ python yaml-verify-schema.py \ REPO_LIST="$(yq -r '.repositories[]' INFO.yaml)" while IFS= read -r project; do - if [[ "$project" == "$PROJECT" ]]; then - echo "$project is valid" - else - echo "ERROR: $project is invalid" - echo "INFO.yaml file may only list one repository" - echo "Repository must match $PROJECT" - exit 1 - fi + if [[ "$project" == "$PROJECT" ]]; then + echo "$project is valid" + else + echo "ERROR: $project is invalid" + echo "INFO.yaml file may only list one repository" + echo "Repository must match $PROJECT" + exit 1 + fi done <<< "$REPO_LIST" diff --git a/shell/jenkins-configure-clouds.sh b/shell/jenkins-configure-clouds.sh index cc6a4eec..18589c0b 100644 --- a/shell/jenkins-configure-clouds.sh +++ b/shell/jenkins-configure-clouds.sh @@ -49,13 +49,13 @@ set -eu -o pipefail testversion() { local current_val="$1" operator="$2" test_value="$3" awk -vv1="$current_val" -vv2="$test_value" 'BEGIN { - split(v1, a, /\:/); - if (a[2] == '"$test_value"') { - exit (a[2] == '"$test_value"') ? 0 : 1 - } - else { - exit (a[2] '"$operator"' '"$test_value"') ? 0 : 1 - } + split(v1, a, /\:/); + if (a[2] == '"$test_value"') { + exit (a[2] == '"$test_value"') ? 0 : 1 + } + else { + exit (a[2] '"$operator"' '"$test_value"') ? 0 : 1 + } }' } @@ -237,11 +237,11 @@ get_minion_options() { # Handle specifying the minimum instance count across different versions if testversion "$os_plugin_version" '>=' '2.47' then - instance_min=$(get_cfg "$cfg_file" INSTANCE_MIN "null") - echo " $instance_min," + instance_min=$(get_cfg "$cfg_file" INSTANCE_MIN "null") + echo " $instance_min," else - instance_min=$(get_cfg "$cfg_file" INSTANCE_MIN_CAPMAX "null") - echo " $instance_min," + instance_min=$(get_cfg "$cfg_file" INSTANCE_MIN_CAPMAX "null") + echo " $instance_min," fi echo " \"$floating_ip_pool\"," @@ -256,11 +256,11 @@ get_minion_options() { if testversion "$os_plugin_version" '>=' '2.47' then - echo " $node_properties," - echo " $retention_time", - echo " $config_drive" + echo " $node_properties," + echo " $retention_time", + echo " $config_drive" else - echo " $retention_time" + echo " $retention_time" fi @@ -322,7 +322,7 @@ for silo in $silos; do # JENKINS_{URL,USER,PASSWORD} env vars are required for the "lftools jenkins # plugins list" call os_plugin_version="$(lftools jenkins plugins list \ - | grep -i 'OpenStack Cloud Plugin')" + | grep -i 'OpenStack Cloud Plugin')" echo "-----> Groovy script $script_file" for cloud in "${clouds[@]}"; do diff --git a/shell/jenkins-verify-images.sh b/shell/jenkins-verify-images.sh index 663dfb6e..483a51b7 100755 --- a/shell/jenkins-verify-images.sh +++ b/shell/jenkins-verify-images.sh @@ -18,43 +18,43 @@ error=false verify_images() { - echo "Verifying images on $1" - for file in "$1"/*; do - # Set the $IMAGE_NAME variable to the the file's IMAGE_NAME value - export "$(grep ^IMAGE_NAME= "$file")" - # The image should be listed as active + echo "Verifying images on $1" + for file in "$1"/*; do + # Set the $IMAGE_NAME variable to the the file's IMAGE_NAME value + export "$(grep ^IMAGE_NAME= "$file")" + # The image should be listed as active - if ! openstack image list --property name="$IMAGE_NAME" | grep "active"; then - echo "ERROR: No matching image found for $IMAGE_NAME" - error=true - fi - # Set the $HARDWARE_ID variable to the the file's HARDWARE_ID value - export "$(grep ^HARDWARE_ID= "$file")" - # The flavor should be listed. Spaces in grep string ensure complete match. + if ! openstack image list --property name="$IMAGE_NAME" | grep "active"; then + echo "ERROR: No matching image found for $IMAGE_NAME" + error=true + fi + # Set the $HARDWARE_ID variable to the the file's HARDWARE_ID value + export "$(grep ^HARDWARE_ID= "$file")" + # The flavor should be listed. Spaces in grep string ensure complete match. - if ! openstack flavor list | grep " $HARDWARE_ID "; then - echo "ERROR: No matching flavor found for $HARDWARE_ID" - error=true - fi - done + if ! openstack flavor list | grep " $HARDWARE_ID "; then + echo "ERROR: No matching flavor found for $HARDWARE_ID" + error=true + fi + done } echo "Verifying that cloud has a master configuration file" if [[ -d jenkins-config/clouds/openstack ]]; then - for cloud in jenkins-config/clouds/openstack/*; do - if [[ -f $cloud/cloud.cfg ]]; then - # Get the OS_CLOUD variable from cloud config - if ! os_cloud=$(grep -E "^OS_CLOUD=" "$cloud/cloud.cfg" | cut -d'=' -f2); then - os_cloud="vex" - fi - OS_CLOUD=$os_cloud verify_images "$cloud" - else - echo "ERROR: No cloud.cfg for $cloud" - error=true - fi - done + for cloud in jenkins-config/clouds/openstack/*; do + if [[ -f $cloud/cloud.cfg ]]; then + # Get the OS_CLOUD variable from cloud config + if ! os_cloud=$(grep -E "^OS_CLOUD=" "$cloud/cloud.cfg" | cut -d'=' -f2); then + os_cloud="vex" + fi + OS_CLOUD=$os_cloud verify_images "$cloud" + else + echo "ERROR: No cloud.cfg for $cloud" + error=true + fi + done fi if $error; then - exit 1 + exit 1 fi diff --git a/shell/jjb-verify-build-nodes.sh b/shell/jjb-verify-build-nodes.sh index 93d85be8..9c5bfecb 100644 --- a/shell/jjb-verify-build-nodes.sh +++ b/shell/jjb-verify-build-nodes.sh @@ -34,20 +34,20 @@ jjbdir="jjb" # function to test if the argument is empty, # is two double quotes, or has unwanted suffix isBadLabel () { - local label="$1" - [[ -z "$label" ]] || [[ $label = "\"\"" ]] || [[ $label = *"$suffix" ]] + local label="$1" + [[ -z "$label" ]] || [[ $label = "\"\"" ]] || [[ $label = *"$suffix" ]] } # function to search an array for a value # $1 is value # $2 is array, passed via ${array[@]} isValueInArray () { - local e match="$1" - shift - for e; do - [[ "$e" == "$match" ]] && return 0 - done - return 1 + local e match="$1" + shift + for e; do + [[ "$e" == "$match" ]] && return 0 + done + return 1 } # check prereqs diff --git a/shell/job-cost.sh b/shell/job-cost.sh index 4648a953..b856f6a5 100644 --- a/shell/job-cost.sh +++ b/shell/job-cost.sh @@ -25,8 +25,8 @@ fi # AWS job cost not supported, exit cloudtype="$(jq -r .v1.datasource /run/cloud-init/result.json)" if [[ $cloudtype == "DataSourceEc2Local" ]]; then - echo "INFO: Not able to calculate job cost on AWS" - exit 0 + echo "INFO: Not able to calculate job cost on AWS" + exit 0 fi lf-activate-venv zipp==1.1.0 python-openstackclient diff --git a/shell/maven-fetch-metadata.sh b/shell/maven-fetch-metadata.sh index 3c9b78f4..4f719e31 100644 --- a/shell/maven-fetch-metadata.sh +++ b/shell/maven-fetch-metadata.sh @@ -28,9 +28,9 @@ set -xeu -o pipefail project=$(xmlstarlet sel \ -N "x=http://maven.apache.org/POM/4.0.0" -t \ --if "/x:project/x:groupId" \ - -v "/x:project/x:groupId" \ + -v "/x:project/x:groupId" \ --elif "/x:project/x:parent/x:groupId" \ - -v "/x:project/x:parent/x:groupId" \ + -v "/x:project/x:parent/x:groupId" \ --else -o "" "$pom_path") project_path="${project//.//}" diff --git a/shell/maven-javadoc-generate.sh b/shell/maven-javadoc-generate.sh index 4a66eece..1d0682b9 100644 --- a/shell/maven-javadoc-generate.sh +++ b/shell/maven-javadoc-generate.sh @@ -29,15 +29,15 @@ maven_dir_abs=$(readlink -f "$MAVEN_DIR") # shellcheck disable=SC2086 # Use -x via subshell to show maven invocation details in the log (set -x - $MVN clean install javadoc:aggregate \ - -f "$maven_dir_abs" \ - -e -Pq -Dmaven.javadoc.skip=false \ - -DskipTests=true \ - -Dcheckstyle.skip=true \ - -Dfindbugs.skip=true \ - --global-settings "$GLOBAL_SETTINGS_FILE" \ - --settings "$SETTINGS_FILE" \ - $MAVEN_OPTIONS $MAVEN_PARAMS + $MVN clean install javadoc:aggregate \ + -f "$maven_dir_abs" \ + -e -Pq -Dmaven.javadoc.skip=false \ + -DskipTests=true \ + -Dcheckstyle.skip=true \ + -Dfindbugs.skip=true \ + --global-settings "$GLOBAL_SETTINGS_FILE" \ + --settings "$SETTINGS_FILE" \ + $MAVEN_OPTIONS $MAVEN_PARAMS ) mv "$WORKSPACE/$MAVEN_DIR/target/site/apidocs" "$JAVADOC_DIR" diff --git a/shell/openstack-cleanup-orphaned-ports.sh b/shell/openstack-cleanup-orphaned-ports.sh index 3030fdf2..080b5907 100644 --- a/shell/openstack-cleanup-orphaned-ports.sh +++ b/shell/openstack-cleanup-orphaned-ports.sh @@ -16,12 +16,12 @@ os_cloud="${OS_CLOUD:-vex}" set -eux -o pipefail mapfile -t os_ports_ts < <(openstack --os-cloud "$os_cloud" port list \ - -f value \ - -c ID \ - -c status \ - -c created_at \ - | grep -E DOWN \ - | awk -F' ' '{print $1 " " $3}') + -f value \ + -c ID \ + -c status \ + -c created_at \ + | grep -E DOWN \ + | awk -F' ' '{print $1 " " $3}') if [ ${#os_ports_ts[@]} -eq 0 ]; then echo "No orphaned ports found." diff --git a/shell/openstack-kubernetes-create.sh b/shell/openstack-kubernetes-create.sh index 3d9d00b1..2f1a2b7f 100755 --- a/shell/openstack-kubernetes-create.sh +++ b/shell/openstack-kubernetes-create.sh @@ -32,46 +32,52 @@ cluster_settle_time="${CLUSTER_SETTLE_TIME:-1m}" # Create the template for the cluster first. Returns the cluster ID as $template_uuid template_uuid=$(openstack coe cluster template create "$cluster_template_name" \ - --os-cloud "$os_cloud" \ - --image "$base_image" \ - --keypair "$keypair" \ - --external-network public \ - --fixed-network "$fixed_network" \ - --fixed-subnet "$fixed_subnet" \ - --floating-ip-disabled \ - --master-flavor "$master_flavor" \ - --flavor "$node_flavor" \ - --docker-volume-size "$docker_volume_size" \ - --network-driver flannel \ - --master-lb-enabled \ - --volume-driver cinder \ - --labels boot_volume_type=ssd,boot_volume_size="${boot_volume_size}",kube_version="${k8s_version}",kube_tag="${k8s_version}" \ - --coe kubernetes \ - -f value -c uuid | tail -1) + --os-cloud "$os_cloud" \ + --image "$base_image" \ + --keypair "$keypair" \ + --external-network public \ + --fixed-network "$fixed_network" \ + --fixed-subnet "$fixed_subnet" \ + --floating-ip-disabled \ + --master-flavor "$master_flavor" \ + --flavor "$node_flavor" \ + --docker-volume-size "$docker_volume_size" \ + --network-driver flannel \ + --master-lb-enabled \ + --volume-driver cinder \ + --labels \ +boot_volume_type=ssd,boot_volume_size="${boot_volume_size}",\ +kube_version="${k8s_version}",kube_tag="${k8s_version}" \ + --coe kubernetes \ + -f value -c uuid | tail -1) # Create the kubernetes cluster cluster_uuid=$(openstack coe cluster create "$cluster_name" \ - --os-cloud "$os_cloud" \ - --master-count "$master_count" \ - --node-count "$node_count" \ - --cluster-template "$template_uuid" | awk -F ' ' '{print $5}') + --os-cloud "$os_cloud" \ + --master-count "$master_count" \ + --node-count "$node_count" \ + --cluster-template "$template_uuid" | awk -F ' ' '{print $5}') # Sleep for a little, because sometimes OpenStack has to catch up with itself sleep 15 -while [ "$(openstack --os-cloud "$os_cloud" coe cluster show "$cluster_uuid" -c status -f value)" == "CREATE_IN_PROGRESS" ]; do - # echo "sleeping $(date)" - sleep 2m +while \ +[ "$(openstack --os-cloud "$os_cloud" coe cluster show "$cluster_uuid" \ +-c status -f value)" == "CREATE_IN_PROGRESS" ]; do + # echo "sleeping $(date)" + sleep 2m done -if [ "$(openstack --os-cloud "$os_cloud" coe cluster show "$cluster_uuid" -c status -f value)" == "CREATE_FAILED" ]; then - echo "Failed to create cluster: $cluster_uuid $(date)" - openstack --os-cloud "$os_cloud" coe cluster delete "$cluster_uuid" - sleep 5m - openstack --os-cloud "$os_cloud" coe cluster template delete "$template_uuid" - exit 1 +if [ "$(openstack --os-cloud "$os_cloud" coe cluster show "$cluster_uuid" \ +-c status -f value)" == "CREATE_FAILED" ]; then + echo "Failed to create cluster: $cluster_uuid $(date)" + openstack --os-cloud "$os_cloud" coe cluster delete "$cluster_uuid" + sleep 5m + openstack --os-cloud "$os_cloud" coe cluster template delete "$template_uuid" + exit 1 fi -if [ "$(openstack --os-cloud "$os_cloud" coe cluster show "$cluster_uuid" -c status -f value)" == "CREATE_COMPLETE" ]; then - echo "Successfully created cluster: $cluster_uuid." +if [ "$(openstack --os-cloud "$os_cloud" coe cluster show "$cluster_uuid" \ +-c status -f value)" == "CREATE_COMPLETE" ]; then + echo "Successfully created cluster: $cluster_uuid." fi diff --git a/shell/openstack-protect-in-use-images.sh b/shell/openstack-protect-in-use-images.sh index e2299060..468ea0d8 100644 --- a/shell/openstack-protect-in-use-images.sh +++ b/shell/openstack-protect-in-use-images.sh @@ -23,16 +23,16 @@ images=() while read -r -d $'\n' ; do images+=("$REPLY") done < <(grep -r IMAGE_NAME --include \*.cfg jenkins-config \ - | awk -F'=' '{print $2}' \ - | sort -u) + | awk -F'=' '{print $2}' \ + | sort -u) jjbimages=() while read -r -d $'\n' ; do jjbimages+=("$REPLY") done < <(grep -r 'ZZCI - ' --include \*.yaml jjb \ - | awk -F": " '{print $3}' \ - | sed -e "s:'::;s:'$::;/^$/d" -e 's/^"//' -e 's/"$//' \ - | sort -u) + | awk -F": " '{print $3}' \ + | sed -e "s:'::;s:'$::;/^$/d" -e 's/^"//' -e 's/"$//' \ + | sort -u) if ! [[ ${#images[@]} -eq 0 ]]; then echo "INFO: There are images to protect defined in jenkins-config." diff --git a/shell/openstack-stack-copy-ssh-keys.sh b/shell/openstack-stack-copy-ssh-keys.sh index a1a01cb1..79d75d91 100644 --- a/shell/openstack-stack-copy-ssh-keys.sh +++ b/shell/openstack-stack-copy-ssh-keys.sh @@ -53,9 +53,9 @@ copy_ssh_keys () { # IP Addresses are returned as a space separated list so word splitting is ok # shellcheck disable=SC2207 ip_addresses=($(openstack --os-cloud "$os_cloud" stack show -f json -c outputs "$stack_name" | - jq -r '.outputs[] | - select(.output_key | match("^vm_[0-9]+_ips$")) | - .output_value | .[]')) + jq -r '.outputs[] | + select(.output_key | match("^vm_[0-9]+_ips$")) | + .output_value | .[]')) pids="" for ip in "${ip_addresses[@]}"; do ( copy_ssh_keys "$ip" ) & diff --git a/shell/packer-install.sh b/shell/packer-install.sh index fea704a9..49077a10 100644 --- a/shell/packer-install.sh +++ b/shell/packer-install.sh @@ -39,10 +39,10 @@ version_ge() { test "$(echo "$@" | tr " " "\n" | sort -rV | head -n 1)" == "$1"; if hash packer.io 2>/dev/null; then CURRENT_VERSION="$(packer.io --version)" if version_lt "$CURRENT_VERSION" "$PACKER_VERSION"; then - echo "Packer version $CURRENT_VERSION installed is less than $PACKER_VERSION available, updating Packer." - packer_install + echo "Packer version $CURRENT_VERSION installed is less than $PACKER_VERSION available, updating Packer." + packer_install else - echo "Packer version installed $CURRENT_VERSION is greater than or equal to the required minimum version $PACKER_VERSION." + echo "Packer version installed $CURRENT_VERSION is greater than or equal to the required minimum version $PACKER_VERSION." fi else echo "Packer binary not available, installing Packer version $PACKER_VERSION." diff --git a/shell/python-tools-install.sh b/shell/python-tools-install.sh index e0e4eec1..72baf8a3 100644 --- a/shell/python-tools-install.sh +++ b/shell/python-tools-install.sh @@ -43,12 +43,12 @@ EOF #Python 3.5 in Ubuntu 16.04 workaround if [[ -f /etc/lsb-release ]]; then - # shellcheck disable=SC1091 - source /etc/lsb-release - if [[ $DISTRIB_RELEASE == "16.04" && $DISTRIB_ID == "Ubuntu" ]]; then - echo "WARNING: Python projects should move to Ubuntu 18.04 to continue receiving support" - echo "zipp==1.1.0" >> "$requirements_file" - fi + # shellcheck disable=SC1091 + source /etc/lsb-release + if [[ $DISTRIB_RELEASE == "16.04" && $DISTRIB_ID == "Ubuntu" ]]; then + echo "WARNING: Python projects should move to Ubuntu 18.04 to continue receiving support" + echo "zipp==1.1.0" >> "$requirements_file" + fi fi python3 -m pip install --user --quiet --upgrade pip diff --git a/shell/release-job.sh b/shell/release-job.sh index 80521924..3a732e0b 100644 --- a/shell/release-job.sh +++ b/shell/release-job.sh @@ -26,25 +26,25 @@ python -m pip freeze ########################## set_variables_artifact(){ - echo "INFO: Setting artifact variables" - if [[ -z ${VERSION:-} ]]; then - VERSION=$(yq -r ".version" "$release_file") - fi - if [[ -z ${GIT_TAG:-} ]]; then - if grep -q "git_tag" "$release_file" ; then - GIT_TAG=$(yq -r ".git_tag" "$release_file") - else - GIT_TAG="$VERSION" - fi - fi - if [[ -z ${REF:-} ]]; then - REF=$(yq -r ".ref" "$release_file") - fi - - printf "\t%-30s\n" RELEASE_ARTIFACT_INFO: - printf "\t%-30s %s\n" GERRIT_REF_TO_TAG: "$REF" - printf "\t%-30s %s\n" VERSION: "$VERSION" - printf "\t%-30s %s\n" GIT_TAG: "$GIT_TAG" + echo "INFO: Setting artifact variables" + if [[ -z ${VERSION:-} ]]; then + VERSION=$(yq -r ".version" "$release_file") + fi + if [[ -z ${GIT_TAG:-} ]]; then + if grep -q "git_tag" "$release_file" ; then + GIT_TAG=$(yq -r ".git_tag" "$release_file") + else + GIT_TAG="$VERSION" + fi + fi + if [[ -z ${REF:-} ]]; then + REF=$(yq -r ".ref" "$release_file") + fi + + printf "\t%-30s\n" RELEASE_ARTIFACT_INFO: + printf "\t%-30s %s\n" GERRIT_REF_TO_TAG: "$REF" + printf "\t%-30s %s\n" VERSION: "$VERSION" + printf "\t%-30s %s\n" GIT_TAG: "$GIT_TAG" } set_variables_common(){ @@ -59,13 +59,13 @@ set_variables_common(){ release_files=$(git diff-tree -m --no-commit-id -r "$GIT_COMMIT" "$GIT_COMMIT^1" \ --name-only -- "releases/" ".releases/") if (( $(grep -c . <<<"$release_files") > 1 )); then - echo "INFO: RELEASE FILES ARE AS FOLLOWS: $release_files" - echo "ERROR: Adding multiple release files in the same commit" - echo "ERROR: OR rename/amend/delete of existing files is not supported." - exit 1 + echo "INFO: RELEASE FILES ARE AS FOLLOWS: $release_files" + echo "ERROR: Adding multiple release files in the same commit" + echo "ERROR: OR rename/amend/delete of existing files is not supported." + exit 1 else - release_file="$release_files" - echo "INFO: RELEASE FILE: $release_files" + release_file="$release_files" + echo "INFO: RELEASE FILE: $release_files" fi else echo "INFO: This job is built with parameters, no release file needed." @@ -130,7 +130,7 @@ set_variables_container(){ else GIT_TAG="$VERSION" fi - fi + fi if grep -q "container_pull_registry" "$release_file" ; then CONTAINER_PULL_REGISTRY=$(yq -r ".container_pull_registry" "$release_file") fi @@ -178,33 +178,33 @@ set_variables_maven(){ } set_variables_packagecloud(){ - echo "INFO: Setting packagecloud variables" - if [[ -z ${VERSION:-} ]]; then - VERSION=$(yq -r ".version" "$release_file") - fi - if [[ -z ${GIT_TAG:-} ]]; then - if grep -q "git_tag" $release_file ; then - GIT_TAG=$(yq -r ".git_tag" "$release_file") - else - GIT_TAG="$VERSION" - fi - fi - if [[ -z ${LOG_DIR:-} ]]; then - LOG_DIR=$(yq -r ".log_dir" "$release_file") - fi - if [[ -z ${REF:-} ]]; then - REF=$(yq -r ".ref" "$release_file") - fi - if [[ -z ${PACKAGE_NAME:-} ]]; then - PACKAGE_NAME=$(yq -r ".package_name" "$release_file") - fi - - printf "\t%-30s %s\n" PACKAGE_NAME: "$PACKAGE_NAME" - printf "\t%-30s %s\n" LOG_DIR: "$LOG_DIR" - printf "\t%-30s %s\n" LOGS_URL: "$logs_url" - printf "\t%-30s %s\n" GIT_REF_TO_TAG: "$REF" - printf "\t%-30s %s\n" VERSION: "$VERSION" - printf "\t%-30s %s\n" GIT_TAG: "$GIT_TAG" + echo "INFO: Setting packagecloud variables" + if [[ -z ${VERSION:-} ]]; then + VERSION=$(yq -r ".version" "$release_file") + fi + if [[ -z ${GIT_TAG:-} ]]; then + if grep -q "git_tag" $release_file ; then + GIT_TAG=$(yq -r ".git_tag" "$release_file") + else + GIT_TAG="$VERSION" + fi + fi + if [[ -z ${LOG_DIR:-} ]]; then + LOG_DIR=$(yq -r ".log_dir" "$release_file") + fi + if [[ -z ${REF:-} ]]; then + REF=$(yq -r ".ref" "$release_file") + fi + if [[ -z ${PACKAGE_NAME:-} ]]; then + PACKAGE_NAME=$(yq -r ".package_name" "$release_file") + fi + + printf "\t%-30s %s\n" PACKAGE_NAME: "$PACKAGE_NAME" + printf "\t%-30s %s\n" LOG_DIR: "$LOG_DIR" + printf "\t%-30s %s\n" LOGS_URL: "$logs_url" + printf "\t%-30s %s\n" GIT_REF_TO_TAG: "$REF" + printf "\t%-30s %s\n" VERSION: "$VERSION" + printf "\t%-30s %s\n" GIT_TAG: "$GIT_TAG" } set_variables_pypi(){ @@ -228,7 +228,7 @@ set_variables_pypi(){ else GIT_TAG="$VERSION" fi - fi + fi # Continuing displaying Release Information (pypi) printf "\t%-30s\n" RELEASE_PYPI_INFO: @@ -321,8 +321,8 @@ verify_version_match_release(){ # TODO: write tag-github-repo function tag-git-repo(){ if [[ $TAG_RELEASE == false ]]; then - echo "INFO: Skipping code repo tag" - return + echo "INFO: Skipping code repo tag" + return fi if [[ -z ${GERRIT_URL:-} ]]; then @@ -478,10 +478,10 @@ nexus_release(){ #Run the loop twice, to catch errors on either nexus repo if [[ "$JOB_NAME" =~ "merge" ]] && [[ "$DRY_RUN" = false ]]; then for staging_url in $(zcat "$PATCH_DIR"/staging-repo.txt.gz | awk -e '{print $2}'); do - NEXUS_URL=$(echo "$staging_url" | sed -e 's|^[^/]*//||' -e 's|/.*$||') - STAGING_REPO=${staging_url#*repositories/} - echo "INFO: Promoting $STAGING_REPO on $NEXUS_URL." - lftools nexus release --server https://"$NEXUS_URL" "$STAGING_REPO" + NEXUS_URL=$(echo "$staging_url" | sed -e 's|^[^/]*//||' -e 's|/.*$||') + STAGING_REPO=${staging_url#*repositories/} + echo "INFO: Promoting $STAGING_REPO on $NEXUS_URL." + lftools nexus release --server https://"$NEXUS_URL" "$STAGING_REPO" done fi } diff --git a/shell/rtdv3.sh b/shell/rtdv3.sh index eb229b33..e4e0b3c8 100644 --- a/shell/rtdv3.sh +++ b/shell/rtdv3.sh @@ -12,30 +12,30 @@ echo "---> rtdv3.sh" set -euo pipefail watchbuild(){ - echo "INFO: Running build against branch $1" - local buildid - local result - buildid=$(lftools rtd project-build-trigger "$rtdproject" "$1" | jq '.build.id') - - result=null - while [[ "$result" == null ]]; do - sleep 10 - result=$(lftools rtd project-build-details "$rtdproject" "$buildid" | jq '.success') - echo "INFO Current result of running build $result" - if [[ $result == failed ]]; then - echo "INFO: read the docs build completed with status: $result" - exit 1 - fi - done - echo "INFO: read the docs build completed with status: $result" + echo "INFO: Running build against branch $1" + local buildid + local result + buildid=$(lftools rtd project-build-trigger "$rtdproject" "$1" | jq '.build.id') + + result=null + while [[ "$result" == null ]]; do + sleep 10 + result=$(lftools rtd project-build-details "$rtdproject" "$buildid" | jq '.success') + echo "INFO Current result of running build $result" + if [[ $result == failed ]]; then + echo "INFO: read the docs build completed with status: $result" + exit 1 + fi + done + echo "INFO: read the docs build completed with status: $result" } project_dashed="${PROJECT////-}" umbrella=$(echo "$GERRIT_URL" | awk -F'.' '{print $2}') if [[ "$SILO" == "sandbox" ]]; then - rtdproject="$umbrella-$project_dashed-test" + rtdproject="$umbrella-$project_dashed-test" else - rtdproject="$umbrella-$project_dashed" + rtdproject="$umbrella-$project_dashed" fi #MASTER_RTD_PROJECT as a global jenkins cnt @@ -43,10 +43,10 @@ masterproject="$umbrella-$MASTER_RTD_PROJECT" #Exceptions needed for onap, due to importing their old docs. if [[ $masterproject == "onap-doc" ]]; then - masterproject="onap" + masterproject="onap" fi if [[ $rtdproject == "onap-doc" ]]; then - rtdproject="onap" + rtdproject="onap" fi @@ -57,104 +57,103 @@ echo "INFO: Read the Docs Master Project: https://$masterproject.readthedocs.io" if [[ "$JOB_NAME" =~ "verify" ]]; then - if [[ "$(lftools rtd project-details "$rtdproject" | yq -r '.detail')" == "Not found." ]]; then - echo "INFO: Project not found, merge will create project https://$rtdproject.readthedocs.io" - fi - -echo "INFO: Verify job completed" - + if [[ "$(lftools rtd project-details "$rtdproject" | yq -r '.detail')" == "Not found." ]]; then + echo "INFO: Project not found, merge will create project https://$rtdproject.readthedocs.io" + fi + echo "INFO: Verify job completed" fi if [[ "$JOB_NAME" =~ "merge" ]]; then -echo "INFO: Performing merge action" + echo "INFO: Performing merge action" + + # This retuns null if project exists. + project_exists=false + project_created=false + + declare -i cnt=0 + while [[ $project_exists == "false" ]]; do + if [[ "$(lftools rtd project-details "$rtdproject" | yq -r '.detail')" == "Not found." ]]; then + echo "INFO: Project not found" + if [[ $project_created == "false" ]]; then + echo "INFO: Creating project https://$rtdproject.readthedocs.io" + lftools rtd project-create "$rtdproject" "$GERRIT_URL/$PROJECT" \ + git "https://$rtdproject.readthedocs.io" py en + project_created="true" + fi + echo "INFO sleeping for 30 seconds $cnt times" + sleep 30 + cnt=$((cnt+1)) + if (( cnt >= 20 )); then + echo "INFO: Job has timed out" + exit 1 + fi + else + echo "INFO: Project exists in read the docs as https://$rtdproject.readthedocs.io" + project_exists="true" + fi + done + + if [[ "$rtdproject" != "$masterproject" ]]; then + subproject_exists=false + while read -r subproject; do + if [[ "$subproject" == "$rtdproject" ]]; then + subproject_exists=true + break + fi + done < <(lftools rtd subproject-list "$masterproject") + + if $subproject_exists; then + echo "INFO: subproject $rtdproject relationship already created" + else + echo "INFO: Creating subproject relationship" + lftools rtd subproject-create "$masterproject" "$rtdproject" + echo "INFO sleeping for 10 seconds" + sleep 10 + fi + fi - # This retuns null if project exists. - project_exists=false - project_created=false + # api v3 method does not update /latest/ when master is triggered. + # Also, when we build anything other than master we want to trigger /stable/ as well. + # allow projects to change their landing page from latest to branch_name - declare -i cnt=0 - while [[ $project_exists == "false" ]]; do - if [[ "$(lftools rtd project-details "$rtdproject" | yq -r '.detail')" == "Not found." ]]; then - echo "INFO: Project not found" - if [[ $project_created == "false" ]]; then - echo "INFO: Creating project https://$rtdproject.readthedocs.io" - lftools rtd project-create "$rtdproject" "$GERRIT_URL/$PROJECT" git "https://$rtdproject.readthedocs.io" py en - project_created="true" - fi - echo "INFO sleeping for 30 seconds $cnt times" - sleep 30 - cnt=$((cnt+1)) - if (( cnt >= 20 )); then - echo "INFO: Job has timed out" + current_version="$(lftools rtd project-details "$rtdproject" | yq -r .default_version)" + if [[ -z ${DEFAULT_VERSION:-} ]]; then + echo "DEFAULT_VERSION (default-version) value cannot be empty" exit 1 - fi - else - echo "INFO: Project exists in read the docs as https://$rtdproject.readthedocs.io" - project_exists="true" fi - done - - if [[ "$rtdproject" != "$masterproject" ]]; then - subproject_exists=false - while read -r subproject; do - if [[ "$subproject" == "$rtdproject" ]]; then - subproject_exists=true - break - fi - done < <(lftools rtd subproject-list "$masterproject") - - if $subproject_exists; then - echo "INFO: subproject $rtdproject relationship already created" - else - echo "INFO: Creating subproject relationship" - lftools rtd subproject-create "$masterproject" "$rtdproject" - echo "INFO sleeping for 10 seconds" - sleep 10 - fi - fi - - # api v3 method does not update /latest/ when master is triggered. - # Also, when we build anything other than master we want to trigger /stable/ as well. - # allow projects to change their landing page from latest to branch_name - - current_version="$(lftools rtd project-details "$rtdproject" | yq -r .default_version)" - if [[ -z ${DEFAULT_VERSION:-} ]]; then - echo "DEFAULT_VERSION (default-version) value cannot be empty" - exit 1 - fi - default_version="${DEFAULT_VERSION}" - - echo "INFO: current default version $current_version" - if [[ $current_version != "$default_version" ]]; then - echo "INFO: Setting rtd landing page to $default_version" - lftools rtd project-update "$rtdproject" default_version="$default_version" - fi - - if [[ $GERRIT_BRANCH == "master" ]]; then - echo "INFO: triggering $rtdproject latest" - watchbuild latest - else - - #read the docs only understands lower case branch names - branch=$(echo "$GERRIT_BRANCH" | tr '[:upper:]' '[:lower:]') - echo "INFO: Checking if read the docs has seen branch $branch" - - #if this is 200 "null" Then run discover branch - if [[ $(lftools rtd project-version-details "$rtdproject" "$branch" | jq '.active') == "null" ]]; then - echo "INFO: read the docs has not seen branch $branch for project $rtdproject" - echo "INFO: triggering $rtdproject latest to instantiate new branch discovery" - watchbuild latest + default_version="${DEFAULT_VERSION}" + + echo "INFO: current default version $current_version" + if [[ $current_version != "$default_version" ]]; then + echo "INFO: Setting rtd landing page to $default_version" + lftools rtd project-update "$rtdproject" default_version="$default_version" fi - echo "INFO: triggering $rtdproject $branch" - watchbuild "$branch" + if [[ $GERRIT_BRANCH == "master" ]]; then + echo "INFO: triggering $rtdproject latest" + watchbuild latest + else - #Make newly discovered branches visible in the u/i - isactive=$(lftools rtd project-version-details "$rtdproject" "$branch" | jq '.active') - if [[ "$isactive" == false ]]; then - echo "INFO: Marking $branch as active for project $rtdproject" - lftools rtd project-version-update "$rtdproject" "$branch" true - fi + #read the docs only understands lower case branch names + branch=$(echo "$GERRIT_BRANCH" | tr '[:upper:]' '[:lower:]') + echo "INFO: Checking if read the docs has seen branch $branch" - fi + #if this is 200 "null" Then run discover branch + if [[ $(lftools rtd project-version-details "$rtdproject" "$branch" | jq '.active') == "null" ]]; then + echo "INFO: read the docs has not seen branch $branch for project $rtdproject" + echo "INFO: triggering $rtdproject latest to instantiate new branch discovery" + watchbuild latest + fi + + echo "INFO: triggering $rtdproject $branch" + watchbuild "$branch" + + #Make newly discovered branches visible in the u/i + isactive=$(lftools rtd project-version-details "$rtdproject" "$branch" | jq '.active') + if [[ "$isactive" == false ]]; then + echo "INFO: Marking $branch as active for project $rtdproject" + lftools rtd project-version-update "$rtdproject" "$branch" true + fi + + fi fi diff --git a/shell/scrape-job-cost.sh b/shell/scrape-job-cost.sh index e51474b3..e940bc6a 100755 --- a/shell/scrape-job-cost.sh +++ b/shell/scrape-job-cost.sh @@ -74,7 +74,7 @@ cost_dir=~/cost # The Silo Directory for sandbox will get deleted periodically, so # gracefully handle that if [[ -d $silo_dir ]]; then - cd "$silo_dir" + cd "$silo_dir" else echo "$(date +'%Y-%m-%d %H:%M') No Silo Directory, nothing to do" exit 0 diff --git a/shell/sysstat.sh b/shell/sysstat.sh index c72e23ae..bbc502c6 100644 --- a/shell/sysstat.sh +++ b/shell/sysstat.sh @@ -23,7 +23,7 @@ case "$OS" in ;; 14.04) if [[ ! -f /etc/default/sysstat ]] || \ - ! grep --quiet 'ENABLED="true"' /etc/default/sysstat; then + ! grep --quiet 'ENABLED="true"' /etc/default/sysstat; then exit 0 fi ;;