From dbd05cd4100a27a545cc2c7f0388018449d982c3 Mon Sep 17 00:00:00 2001 From: Thanh Ha Date: Wed, 7 Aug 2019 22:32:05 -0400 Subject: [PATCH] Resolve ShellCheck errors in shell scripts * SC2086: Double quote to prevent globbing and word splitting. * SC2027: The surrounding quotes actually unquote this. Remove or escape them. * SC2016: Expressions don't expand in single quotes, use double quotes for that. * SC2236: Use -n instead of ! -z. * SC2233: Remove superfluous (..) around condition. Signed-off-by: Thanh Ha Change-Id: I73207aa87646472bdb1c7848ba9608ac702bda53 --- jenkins-init-scripts/create-jenkins-user.sh | 2 ++ shell/cmake-sonar.sh | 10 +++++----- shell/docker-build.sh | 4 ++-- shell/docker-login.sh | 6 +++--- shell/docker-push.sh | 4 ++-- shell/gerrit-branch-lock.sh | 2 +- shell/gerrit-push-patch.sh | 12 ++++++------ shell/git-validate-jira-urls.sh | 4 ++-- shell/jenkins-configure-clouds.sh | 30 ++++++++++++++--------------- shell/jenkins-configure-global-vars.sh | 6 +++--- shell/jenkins-verify-images.sh | 4 ++-- shell/jjb-verify-job.sh | 2 +- shell/maven-fetch-metadata.sh | 8 ++++---- shell/nexus-iq-cli.sh | 6 +++--- shell/openstack-cleanup-orphaned-ports.sh | 2 +- shell/package-listing.sh | 4 ++-- shell/packer-install.sh | 2 +- shell/puppet-lint.sh | 2 +- shell/python-tools-install.sh | 12 ++++++------ shell/release-job.sh | 14 +++++++------- shell/rtd-trigger-build.sh | 2 +- shell/tox-run.sh | 6 +++--- shell/whitesource-unified-agent-cli.sh | 10 +++++----- 23 files changed, 78 insertions(+), 76 deletions(-) diff --git a/jenkins-init-scripts/create-jenkins-user.sh b/jenkins-init-scripts/create-jenkins-user.sh index e1e0b1ec..bde227cb 100755 --- a/jenkins-init-scripts/create-jenkins-user.sh +++ b/jenkins-init-scripts/create-jenkins-user.sh @@ -13,6 +13,8 @@ OS=$(facter operatingsystem | tr '[:upper:]' '[:lower:]') OS_RELEASE=$(facter lsbdistrelease | tr '[:upper:]' '[:lower:]') if [[ "$OS_RELEASE" == "18.04" && "$OS" == 'ubuntu' ]]; then + # We do not want var expansion here as profile script expands at runtime. + # shellcheck disable=SC2016 echo 'export PATH=$HOME/.local/bin:$PATH' >> /etc/profile fi diff --git a/shell/cmake-sonar.sh b/shell/cmake-sonar.sh index cd7ee715..0a41a95a 100644 --- a/shell/cmake-sonar.sh +++ b/shell/cmake-sonar.sh @@ -38,12 +38,12 @@ cd "$build_dir" || exit 1 eval cmake -DCMAKE_INSTALL_PREFIX="$INSTALL_PREFIX" $cmake_opts .. /opt/build-wrapper/build-wrapper-linux-x86-64 --out-dir "$WORKSPACE/bw-output" \ - make $make_opts + make "$make_opts" /opt/sonar-scanner/bin/sonar-scanner \ - -Dsonar.projectKey=${PROJECT_KEY} \ - -Dsonar.organization=${PROJECT_ORGANIZATION} \ + -Dsonar.projectKey="${PROJECT_KEY}" \ + -Dsonar.organization="${PROJECT_ORGANIZATION}" \ -Dsonar.sources=. \ -Dsonar.cfamily.build-wrapper-output="$WORKSPACE/bw-output" \ - -Dsonar.host.url=${SONAR_HOST_URL} \ - -Dsonar.login=${API_TOKEN} + -Dsonar.host.url="${SONAR_HOST_URL}" \ + -Dsonar.login="${API_TOKEN}" diff --git a/shell/docker-build.sh b/shell/docker-build.sh index e7730586..8453dcd1 100644 --- a/shell/docker-build.sh +++ b/shell/docker-build.sh @@ -17,5 +17,5 @@ cd "$DOCKER_ROOT" # DOCKER_IMAGE_TAG variable gets constructed after lf-docker-get-container-tag builder step # is executed. It constructs the image name and the appropriate tag in the same varaiable. docker_build_command="docker build ${DOCKER_ARGS:-} -t "$CONTAINER_PUSH_REGISTRY/$DOCKER_NAME:$DOCKER_IMAGE_TAG" ." -echo $docker_build_command -eval $docker_build_command | tee "$WORKSPACE/docker_build_log.txt" +echo "$docker_build_command" +eval "$docker_build_command" | tee "$WORKSPACE/docker_build_log.txt" diff --git a/shell/docker-login.sh b/shell/docker-login.sh index 08ec941a..ca4758f4 100644 --- a/shell/docker-login.sh +++ b/shell/docker-login.sh @@ -79,7 +79,7 @@ set_creds() { # Login to the registry do_login() { docker_version=$( docker -v | awk '{print $3}') - if version_lt $docker_version "17.06.0" && \ + if version_lt "$docker_version" "17.06.0" && \ "$DOCKERHUB_REGISTRY" == "docker.io" && \ "$DOCKERHUB_EMAIL:-none" != 'none' then @@ -101,7 +101,7 @@ then # docker login requests an email address if nothing is passed to it # Nexus, however, does not need this and ignores the value - set_creds $REGISTRY + set_creds "$REGISTRY" do_login "$REGISTRY" none done fi @@ -109,7 +109,7 @@ fi # Login to docker.io after determining if email is needed. if [ "${DOCKERHUB_REGISTRY:-none}" != 'none' ] then - set_creds $DOCKERHUB_REGISTRY + set_creds "$DOCKERHUB_REGISTRY" if [ "${DOCKERHUB_EMAIL:-none}" != 'none' ] then do_login "$DOCKERHUB_REGISTRY" "$DOCKERHUB_EMAIL" diff --git a/shell/docker-push.sh b/shell/docker-push.sh index b04f71d4..fa30ea03 100644 --- a/shell/docker-push.sh +++ b/shell/docker-push.sh @@ -15,5 +15,5 @@ echo "---> docker-push.sh" set -ue -o pipefail echo "---> Pushing image: $CONTAINER_PUSH_REGISTRY/$DOCKER_NAME:$DOCKER_IMAGE_TAG" docker_push_command="docker push "$CONTAINER_PUSH_REGISTRY/$DOCKER_NAME:$DOCKER_IMAGE_TAG"" -echo $docker_push_command -eval $docker_push_command +echo "$docker_push_command" +eval "$docker_push_command" diff --git a/shell/gerrit-branch-lock.sh b/shell/gerrit-branch-lock.sh index f884c1d5..8cfbe4a1 100644 --- a/shell/gerrit-branch-lock.sh +++ b/shell/gerrit-branch-lock.sh @@ -26,7 +26,7 @@ install_gerrit_hook() { ssh_port=$(git remote show origin | grep Fetch | grep 'ssh://' \ | awk -F'/' '{print $3}' | awk -F':' '{print $2}') - if [ -z $ssh_url ]; then + if [ -z "$ssh_url" ]; then echo "ERROR: Gerrit SSH URL not found." exit 1 fi diff --git a/shell/gerrit-push-patch.sh b/shell/gerrit-push-patch.sh index e732b220..b7065130 100644 --- a/shell/gerrit-push-patch.sh +++ b/shell/gerrit-push-patch.sh @@ -45,12 +45,12 @@ set -u # Remove any leading or trailing quotes surrounding the strings # which can cause parse errors when passed as CLI options to commands -PROJECT="$(echo $PROJECT | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")" -GERRIT_COMMIT_MESSAGE="$(echo $GERRIT_COMMIT_MESSAGE | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")" -GERRIT_HOST="$(echo $GERRIT_HOST | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")" -GERRIT_TOPIC="$(echo $GERRIT_TOPIC | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")" -GERRIT_USER="$(echo $GERRIT_USER | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")" -REVIEWERS_EMAIL="$(echo $REVIEWERS_EMAIL | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")" +PROJECT="$(echo "$PROJECT" | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")" +GERRIT_COMMIT_MESSAGE="$(echo "$GERRIT_COMMIT_MESSAGE" | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")" +GERRIT_HOST="$(echo "$GERRIT_HOST" | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")" +GERRIT_TOPIC="$(echo "$GERRIT_TOPIC" | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")" +GERRIT_USER="$(echo "$GERRIT_USER" | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")" +REVIEWERS_EMAIL="$(echo "$REVIEWERS_EMAIL" | sed "s/^\([\"']\)\(.*\)\1\$/\2/g")" CHANGE_ID=$(ssh -p 29418 "$GERRIT_USER@$GERRIT_HOST" gerrit query \ limit:1 owner:self is:open project:"$PROJECT" \ diff --git a/shell/git-validate-jira-urls.sh b/shell/git-validate-jira-urls.sh index 1a8f1ca1..4a107722 100644 --- a/shell/git-validate-jira-urls.sh +++ b/shell/git-validate-jira-urls.sh @@ -20,9 +20,9 @@ set +u if [ -n "${JIRA_URL}" ]; then - BASE_URL=$(echo $JIRA_URL | awk -F'/' '{print $3}') + BASE_URL=$(echo "$JIRA_URL" | awk -F'/' '{print $3}') JIRA_LINK=$(git rev-list --format=%B --max-count=1 HEAD | grep -io "http[s]*://$BASE_URL/" || true) - if [[ ! -z "$JIRA_LINK" ]] + if [[ -n "$JIRA_LINK" ]] then echo 'Remove JIRA URLs from commit message' echo 'Add jira references as: Issue: -, instead of URLs' diff --git a/shell/jenkins-configure-clouds.sh b/shell/jenkins-configure-clouds.sh index 18c3d896..5a4e7a06 100644 --- a/shell/jenkins-configure-clouds.sh +++ b/shell/jenkins-configure-clouds.sh @@ -69,7 +69,7 @@ get_cfg() { export get_cfg get_cloud_cfg() { - if [ -z $1 ]; then + if [ -z "$1" ]; then >&2 echo "Usage: get_cloud_cfg CFG_DIR" exit 1 fi @@ -100,7 +100,7 @@ get_cloud_cfg() { } get_launcher_factory() { - if [ -z $1 ]; then + if [ -z "$1" ]; then >&2 echo "Usage: get_launcher_factory JNLP|SSH" exit 1 fi @@ -118,7 +118,7 @@ get_launcher_factory() { } get_minion_options() { - if [ -z $1 ]; then + if [ -z "$1" ]; then >&2 echo "Usage: get_minion_options CFG_FILE" exit 1 fi @@ -179,7 +179,7 @@ get_minion_options() { hardware_id=$(get_cfg "$cfg_file" HARDWARE_ID "") network_id=$(get_cfg "$cfg_file" NETWORK_ID "") - udi_default="$(get_cfg "$(dirname $cfg_file)/cloud.cfg" USER_DATA_ID "jenkins-init-script")" + udi_default="$(get_cfg "$(dirname "$cfg_file")/cloud.cfg" USER_DATA_ID "jenkins-init-script")" user_data_id=$(get_cfg "$cfg_file" USER_DATA_ID "$udi_default") # Handle Sandbox systems that might have a different cap. @@ -194,7 +194,7 @@ get_minion_options() { availability_zone=$(get_cfg "$cfg_file" AVAILABILITY_ZONE "") start_timeout=$(get_cfg "$cfg_file" START_TIMEOUT "600000") - kpn_default="$(get_cfg "$(dirname $cfg_file)/cloud.cfg" KEY_PAIR_NAME "jenkins-ssh")" + kpn_default="$(get_cfg "$(dirname "$cfg_file")/cloud.cfg" KEY_PAIR_NAME "jenkins-ssh")" key_pair_name=$(get_cfg "$cfg_file" KEY_PAIR_NAME "$kpn_default") num_executors=$(get_cfg "$cfg_file" NUM_EXECUTORS "1") @@ -208,7 +208,7 @@ get_minion_options() { | grep -i 'OpenStack Cloud Plugin' \ | awk -F':' '{print $2}' | awk -F' ' '{print $1}')" if version_ge "$OS_PLUGIN_VER" "2.35"; then - if [ ! -z "$volume_size" ]; then + if [ -n "$volume_size" ]; then echo " new BootSource.VolumeFromImage(\"$image_name\", $volume_size)," else echo " new BootSource.Image(\"$image_name\")," @@ -230,7 +230,7 @@ get_minion_options() { echo " $retention_time" else # SlaveOptions() structure for versions <= 2.34 - if [ ! -z "$volume_size" ]; then + if [ -n "$volume_size" ]; then echo " new BootSource.VolumeFromImage(\"$image_name\", $volume_size)," else echo " new BootSource.Image(\"$image_name\")," @@ -253,7 +253,7 @@ get_minion_options() { } get_template_cfg() { - if [ -z $2 ]; then + if [ -z "$2" ]; then >&2 echo "Usage: get_template_cfg CFG_FILE SILO [MINION_PREFIX]" exit 1 fi @@ -263,7 +263,7 @@ get_template_cfg() { local minion_prefix="${3:-}" - template_name=$(basename $cfg_file .cfg) + template_name=$(basename "$cfg_file" .cfg) labels=$(get_cfg "$cfg_file" LABELS "") echo "minion_options = new SlaveOptions(" @@ -278,7 +278,7 @@ get_template_cfg() { echo ")" } -mapfile -t clouds < <(ls -d1 $OS_CLOUD_DIR/*/) +mapfile -t clouds < <(ls -d1 "$OS_CLOUD_DIR"/*/) for silo in $silos; do @@ -308,18 +308,18 @@ for silo in $silos; do for cloud in "${clouds[@]}"; do cfg_dir="${cloud}" echo "Processing $cfg_dir" - insert_file="$SCRIPT_DIR/$silo/$(basename $cloud)/cloud-cfg.txt" - mkdir -p "$(dirname $insert_file)" + insert_file="$SCRIPT_DIR/$silo/$(basename "$cloud")/cloud-cfg.txt" + mkdir -p "$(dirname "$insert_file")" rm -f "$insert_file" echo "" >> "$insert_file" echo "//////////////////////////////////////////////////" >> "$insert_file" - echo "// Cloud config for $(basename $cloud)" >> "$insert_file" + echo "// Cloud config for $(basename "$cloud")" >> "$insert_file" echo "//////////////////////////////////////////////////" >> "$insert_file" echo "" >> "$insert_file" - echo "templates = []" >> $insert_file - mapfile -t templates < <(find $cfg_dir -maxdepth 1 -not -type d -not -name "cloud.cfg") + echo "templates = []" >> "$insert_file" + mapfile -t templates < <(find "$cfg_dir" -maxdepth 1 -not -type d -not -name "cloud.cfg") for template in "${templates[@]}"; do get_template_cfg "$template" "$silo" "$node_prefix" >> "$insert_file" echo "templates.add(template)" >> "$insert_file" diff --git a/shell/jenkins-configure-global-vars.sh b/shell/jenkins-configure-global-vars.sh index eb25de0a..5e988082 100644 --- a/shell/jenkins-configure-global-vars.sh +++ b/shell/jenkins-configure-global-vars.sh @@ -49,7 +49,7 @@ for silo in $silos; do exit 1 fi - mapfile -t vars < <(cat $global_vars) + mapfile -t vars < <(cat "$global_vars") rm -f insert.txt for var in "${vars[@]}"; do @@ -58,8 +58,8 @@ for silo in $silos; do continue fi - key=$(echo $var | cut -d\= -f1) - value=$(echo $var | cut -d\= -f2) + key=$(echo "$var" | cut -d\= -f1) + value=$(echo "$var" | cut -d\= -f2) echo " '$key': '$value'," >> insert.txt done diff --git a/shell/jenkins-verify-images.sh b/shell/jenkins-verify-images.sh index c97f0630..75453232 100755 --- a/shell/jenkins-verify-images.sh +++ b/shell/jenkins-verify-images.sh @@ -18,7 +18,7 @@ error=false for file in jenkins-config/clouds/openstack/*/*; do # Set the $IMAGE_NAME variable to the the file's IMAGE_NAME value - export "$(grep ^IMAGE_NAME= $file)" + export "$(grep ^IMAGE_NAME= "$file")" # The image should be listed as active if ! openstack image list --property name="$IMAGE_NAME" | grep "active"; then @@ -26,7 +26,7 @@ for file in jenkins-config/clouds/openstack/*/*; do error=true fi # Set the $HARDWARE_ID variable to the the file's HARDWARE_ID value - export "$(grep ^HARDWARE_ID= $file)" + export "$(grep ^HARDWARE_ID= "$file")" # The flavor should be listed. Spaces in grep string ensure complete match. if ! openstack flavor list | grep " $HARDWARE_ID "; then diff --git a/shell/jjb-verify-job.sh b/shell/jjb-verify-job.sh index a1e76c9f..5bee095a 100644 --- a/shell/jjb-verify-job.sh +++ b/shell/jjb-verify-job.sh @@ -29,7 +29,7 @@ do done popd -if [ ! -z "$(ls -A archives/job-configs)" ]; then +if [ -n "$(ls -A archives/job-configs)" ]; then tar cJvf archives/job-configs.tar.xz archives/job-configs rm -rf archives/job-configs fi diff --git a/shell/maven-fetch-metadata.sh b/shell/maven-fetch-metadata.sh index 270056f4..45f1a9b9 100644 --- a/shell/maven-fetch-metadata.sh +++ b/shell/maven-fetch-metadata.sh @@ -13,9 +13,9 @@ echo "---> maven-fetch-metadata.sh" # Check for "-f" maven param, indicating a change in pom location. pom_path="pom.xml" -file_path=$(echo $MAVEN_PARAMS | grep -E "\-f \S+" | awk '{ print $2 }') -if [ ! -z $file_path ]; then - if [ -d $file_path ]; then +file_path=$(echo "$MAVEN_PARAMS" | grep -E "\-f \S+" | awk '{ print $2 }') +if [ -n "$file_path" ]; then + if [ -d "$file_path" ]; then pom_path="$file_path/pom.xml" else pom_path="$file_path" @@ -31,7 +31,7 @@ project=$(xmlstarlet sel \ -v "/x:project/x:groupId" \ --elif "/x:project/x:parent/x:groupId" \ -v "/x:project/x:parent/x:groupId" \ - --else -o "" $pom_path) + --else -o "" "$pom_path") project_path="${project//.//}" mkdir -p "$WORKSPACE/m2repo/$project_path" diff --git a/shell/nexus-iq-cli.sh b/shell/nexus-iq-cli.sh index dd621ae5..6dd44309 100644 --- a/shell/nexus-iq-cli.sh +++ b/shell/nexus-iq-cli.sh @@ -15,9 +15,9 @@ echo "---> nexus-iq-cli.sh" set +x CLI_LOCATION="/tmp/nexus-iq-cli-${NEXUS_IQ_CLI_VERSION}.jar" -wget -nv https://download.sonatype.com/clm/scanner/nexus-iq-cli-${NEXUS_IQ_CLI_VERSION}.jar -O ${CLI_LOCATION} +wget -nv "https://download.sonatype.com/clm/scanner/nexus-iq-cli-${NEXUS_IQ_CLI_VERSION}.jar" -O "${CLI_LOCATION}" echo "-a" > cli-auth.txt echo "${CLM_USER}:${CLM_PASSWORD}" >> cli-auth.txt -java -jar ${CLI_LOCATION} @cli-auth.txt -xc -i ${CLM_PROJECT_NAME} -s https://nexus-iq.wl.linuxfoundation.org -t build . +java -jar "${CLI_LOCATION}" @cli-auth.txt -xc -i "${CLM_PROJECT_NAME}" -s https://nexus-iq.wl.linuxfoundation.org -t build . rm cli-auth.txt -rm ${CLI_LOCATION} +rm "${CLI_LOCATION}" diff --git a/shell/openstack-cleanup-orphaned-ports.sh b/shell/openstack-cleanup-orphaned-ports.sh index fac8d202..c47228f8 100644 --- a/shell/openstack-cleanup-orphaned-ports.sh +++ b/shell/openstack-cleanup-orphaned-ports.sh @@ -15,7 +15,7 @@ os_cloud="${OS_CLOUD:-vex}" set -eux -o pipefail -mapfile -t os_ports < <(openstack --os-cloud "$os_cloud" port list -f value -c ID -c status | egrep DOWN | awk '{print $1}') +mapfile -t os_ports < <(openstack --os-cloud "$os_cloud" port list -f value -c ID -c status | grep -E DOWN | awk '{print $1}') if [ ${#os_ports[@]} -eq 0 ]; then echo "No orphaned ports found." diff --git a/shell/package-listing.sh b/shell/package-listing.sh index 82ee902b..37603259 100755 --- a/shell/package-listing.sh +++ b/shell/package-listing.sh @@ -28,7 +28,7 @@ DIFF_PACKAGES=/tmp/packages_diff.txt # Swap to creating END_PACKAGES if we are running in a CI job (determined by if # we have a workspace env) or if the starting packages listing already exists. PACKAGES="${START_PACKAGES}" -if ( [ "${workspace}" ] || [ -f "${START_PACKAGES}" ] ) +if [ "${workspace}" ] || [ -f "${START_PACKAGES}" ] then PACKAGES="${END_PACKAGES}" fi @@ -47,7 +47,7 @@ case "${OS_FAMILY}" in ;; esac -if ( [ -f "${START_PACKAGES}" ] && [ -f "${END_PACKAGES}" ] ) +if [ -f "${START_PACKAGES}" ] && [ -f "${END_PACKAGES}" ] then # ` || true` Ignore exit code because diff exits 1 when there is a diff diff "${START_PACKAGES}" "${END_PACKAGES}" > "${DIFF_PACKAGES}" || true diff --git a/shell/packer-install.sh b/shell/packer-install.sh index 86ddf82e..fea704a9 100644 --- a/shell/packer-install.sh +++ b/shell/packer-install.sh @@ -38,7 +38,7 @@ version_ge() { test "$(echo "$@" | tr " " "\n" | sort -rV | head -n 1)" == "$1"; if hash packer.io 2>/dev/null; then CURRENT_VERSION="$(packer.io --version)" - if version_lt $CURRENT_VERSION $PACKER_VERSION; then + if version_lt "$CURRENT_VERSION" "$PACKER_VERSION"; then echo "Packer version $CURRENT_VERSION installed is less than $PACKER_VERSION available, updating Packer." packer_install else diff --git a/shell/puppet-lint.sh b/shell/puppet-lint.sh index 0ceb5b0e..01333938 100644 --- a/shell/puppet-lint.sh +++ b/shell/puppet-lint.sh @@ -18,6 +18,6 @@ ARCHIVE_PUPPETLINT_DIR="$WORKSPACE/archives/puppet-lint" mkdir -p "$ARCHIVE_PUPPETLINT_DIR" cd "$WORKSPACE/$PUPPET_DIR" -gem install puppet-lint -v $PUPPET_LINT_VERSION +gem install puppet-lint -v "$PUPPET_LINT_VERSION" echo "---> Running puppet-lint" "$BINDIR/puppet-lint" . | tee -a "$ARCHIVE_PUPPETLINT_DIR/puppet-lint.log" diff --git a/shell/python-tools-install.sh b/shell/python-tools-install.sh index 5237df6d..74075377 100644 --- a/shell/python-tools-install.sh +++ b/shell/python-tools-install.sh @@ -25,15 +25,15 @@ if [[ -f $pip_list_pre ]]; then | tee $pip_list_diffs; then echo "No diffs" | tee $pip_list_diffs fi - mkdir -p $WORKSPACE/archives - cp $pip_list_pre $pip_list_post $pip_list_diffs $WORKSPACE/archives - rm -rf $pip_list_pre $pip_list_post $pip_list_diffs - ls $WORKSPACE/archives + mkdir -p "$WORKSPACE/archives" + cp "$pip_list_pre" "$pip_list_post" "$pip_list_diffs" "$WORKSPACE/archives" + rm -rf "$pip_list_pre" "$pip_list_post" "$pip_list_diffs" + ls "$WORKSPACE/archives" # Would just like to 'exit 0' here but we can't because the # log-deploy.sh script is 'appended' to this file and it would not # be executed. else - pip list > $pip_list_pre + pip list > "$pip_list_pre" # These 'pip installs' only need to be executed during pre-build requirements_file=$(mktemp /tmp/requirements-XXXX.txt) @@ -56,5 +56,5 @@ EOF python -m pip install --user --quiet --upgrade pip python -m pip install --user --quiet --upgrade setuptools python -m pip install --user --quiet --upgrade -r "$requirements_file" - rm -rf $requirements_file + rm -rf "$requirements_file" fi diff --git a/shell/release-job.sh b/shell/release-job.sh index dfc65db7..7c572cd7 100644 --- a/shell/release-job.sh +++ b/shell/release-job.sh @@ -40,7 +40,7 @@ NEXUS_URL="${NEXUSPROXY:-$NEXUS_URL}" # Fetch the release-schema.yaml wget -q https://raw.githubusercontent.com/lfit/releng-global-jjb/master/schema/release-schema.yaml -release_files=$(git diff-tree --no-commit-id -r $GERRIT_PATCHSET_REVISION --name-only -- "releases/") +release_files=$(git diff-tree --no-commit-id -r "$GERRIT_PATCHSET_REVISION" --name-only -- "releases/") echo "RELEASE FILES ARE AS FOLLOWS: $release_files" if (( $(grep -c . <<<"$release_files") > 1 )); then @@ -52,7 +52,7 @@ else fi echo "--> Verifying $release_file schema." -lftools schema verify $release_file release-schema.yaml +lftools schema verify "$release_file" release-schema.yaml VERSION="$(niet ".version" "$release_file")" LOG_DIR="$(niet ".log_dir" "$release_file")" @@ -61,13 +61,13 @@ LOGS_URL="${LOGS_SERVER}/${NEXUS_PATH}${LOG_DIR}" PATCH_DIR="$(mktemp -d)" LOGS_URL=${LOGS_URL%/} # strip any trailing '/' -echo "wget -P "$PATCH_DIR" "${LOGS_URL}/"staging-repo.txt.gz" +echo "wget -P $PATCH_DIR ${LOGS_URL}/staging-repo.txt.gz" wget -P "$PATCH_DIR" "${LOGS_URL}/"staging-repo.txt.gz nexus_release(){ for staging_url in $(zcat "$PATCH_DIR"/staging-repo.txt.gz | awk -e '{print $2}'); do # extract the domain name from URL - NEXUS_URL=$(echo $staging_url | sed -e 's|^[^/]*//||' -e 's|/.*$||') + NEXUS_URL=$(echo "$staging_url" | sed -e 's|^[^/]*//||' -e 's|/.*$||') # extract the staging repo from URL STAGING_REPO=${staging_url#*repositories/} echo "Merge will run" @@ -96,7 +96,7 @@ echo "VERSION: $VERSION" echo "LOG DIR: $LOG_DIR" pushd "$PATCH_DIR" - echo "wget "${LOGS_URL}"/patches/{"${PROJECT//\//-}".bundle,taglist.log.gz}" + echo "wget ${LOGS_URL}/patches/{${PROJECT//\//-}.bundle,taglist.log.gz}" wget "${LOGS_URL}"/patches/{"${PROJECT//\//-}".bundle,taglist.log.gz} gunzip taglist.log.gz cat "$PATCH_DIR"/taglist.log @@ -107,7 +107,7 @@ popd allowed_version_regex="^((v?)([0-9]+)\.([0-9]+)\.([0-9]+))$" if [[ ! $VERSION =~ $allowed_version_regex ]]; then echo "The version $VERSION is not a semantic valid version" - echo "Allowed versions are "v#.#.#" or "#.#.#" aka SemVer" + echo "Allowed versions are \"v#.#.#\" or \"#.#.#\" aka SemVer" echo "See https://semver.org/ for more details on SemVer" exit 1 fi @@ -126,7 +126,7 @@ sigul --batch -c "$SIGUL_CONFIG" sign-git-tag "$SIGUL_KEY" "$VERSION" < "$SIGUL_ echo "Showing latest signature for $PROJECT:" gpg --import "$SIGNING_PUBKEY" -echo "git tag -v "$VERSION"" +echo "git tag -v $VERSION" git tag -v "$VERSION" ########## Merge Part ############## diff --git a/shell/rtd-trigger-build.sh b/shell/rtd-trigger-build.sh index 673d0f9a..6af08d7d 100644 --- a/shell/rtd-trigger-build.sh +++ b/shell/rtd-trigger-build.sh @@ -28,7 +28,7 @@ last_char=${RTD_BUILD_URL:length-1:1} [[ $last_char != "/" ]] && RTD_BUILD_URL="$RTD_BUILD_URL/"; : json=$(curl -X POST -d "branches=${GERRIT_BRANCH}" -d "token=$RTD_TOKEN" "$RTD_BUILD_URL") -build_triggered=$(echo $json | jq -r .build_triggered) +build_triggered=$(echo "$json" | jq -r .build_triggered) if [ "$build_triggered" != "true" ]; then echo "ERROR: Build was not triggered." diff --git a/shell/tox-run.sh b/shell/tox-run.sh index 3f1f92a9..90bd5465 100644 --- a/shell/tox-run.sh +++ b/shell/tox-run.sh @@ -33,7 +33,7 @@ set +e # Allow detox to fail so that we can collect the logs in the next step PARALLEL="${PARALLEL:-true}" if [ "${PARALLEL}" = true ]; then - if [ ! -z "$TOX_ENVS" ]; then + if [ -n "$TOX_ENVS" ]; then detox -e "$TOX_ENVS" | tee -a "$ARCHIVE_TOX_DIR/detox.log" tox_status="${PIPESTATUS[0]}" else @@ -41,7 +41,7 @@ if [ "${PARALLEL}" = true ]; then tox_status="${PIPESTATUS[0]}" fi else - if [ ! -z "$TOX_ENVS" ]; then + if [ -n "$TOX_ENVS" ]; then tox -e "$TOX_ENVS" | tee -a "$ARCHIVE_TOX_DIR/tox.log" tox_status="${PIPESTATUS[0]}" else @@ -53,7 +53,7 @@ fi # Disable SC2116 as we want to echo a space separated list of TOX_ENVS # shellcheck disable=SC2116 for i in .tox/*/log; do - tox_env=$(echo $i | awk -F'/' '{print $2}') + tox_env=$(echo "$i" | awk -F'/' '{print $2}') cp -r "$i" "$ARCHIVE_TOX_DIR/$tox_env" done set -e # Logs collected so re-enable diff --git a/shell/whitesource-unified-agent-cli.sh b/shell/whitesource-unified-agent-cli.sh index c092ad97..ff6db6e0 100644 --- a/shell/whitesource-unified-agent-cli.sh +++ b/shell/whitesource-unified-agent-cli.sh @@ -19,9 +19,9 @@ set -u echo "---> whitesource-unified-agent-cli.sh" jar_location="/tmp/wss-unified-agent-${WSS_UNIFIED_AGENT_VERSION}.jar" wss_unified_agent_url="https://s3.amazonaws.com/unified-agent/wss-unified-agent-${WSS_UNIFIED_AGENT_VERSION}.jar" -wget -nv ${wss_unified_agent_url} -O ${jar_location} +wget -nv "${wss_unified_agent_url}" -O "${jar_location}" echo "---> Running WhiteSource Unified Agent CLI ..." -java -jar ${jar_location} -c wss-unified-agent.config \ - -product ${WSS_PRODUCT_NAME} -project ${WSS_PROJECT_NAME} \ - -projectVersion ${GERRIT_BRANCH} ${WSS_UNIFIED_AGENT_OPTIONS:-} -rm ${jar_location} +java -jar "${jar_location}" -c wss-unified-agent.config \ + -product "${WSS_PRODUCT_NAME}" -project "${WSS_PROJECT_NAME}" \ + -projectVersion "${GERRIT_BRANCH}" "${WSS_UNIFIED_AGENT_OPTIONS:-}" +rm "${jar_location}" -- 2.16.6