v0.4.7 introduces some new rules which now need to be handled.
Change-Id: If87fceb79e384e26f021a9c85a2e9d7413ce421e
Signed-off-by: Anil Belur <abelur@linuxfoundation.org>
"../packer/templates/$PACKER_TEMPLATE.json"
# Retrive the list of cloud providers
-clouds=($(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json"))
+mapfile -t clouds < <(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json")
# Split public/private clouds logs
for cloud in "${clouds[@]}"; do
"../packer/templates/$PACKER_TEMPLATE.json"
# Retrive the list of cloud providers
-clouds=($(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json"))
+mapfile -t clouds < <(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json")
# Split public/private clouds logs
for cloud in "${clouds[@]}"; do
"../packer/templates/$PACKER_TEMPLATE.json"
# Retrive the list of cloud providers
-clouds=($(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json"))
+mapfile -t clouds < <(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json")
# Split public/private clouds logs
for cloud in "${clouds[@]}"; do
"../packer/templates/$PACKER_TEMPLATE.json"
# Retrive the list of cloud providers
-clouds=($(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json"))
+mapfile -t clouds < <(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json")
# Split public/private clouds logs
for cloud in "${clouds[@]}"; do
# Remove metadata files that were not updated.
set +e # Temporarily disable to run diff command.
-metadata_files=($(diff -s -r "$m2repo_dir" "$WORKSPACE/m2repo-backup" \
- | grep 'Files .* and .* are identical' | awk '{print $2}'))
+IFS=" " read -r -a metadata_files <<< "$(diff -s -r "$m2repo_dir" "$WORKSPACE/m2repo-backup" \
+ | grep 'Files .* and .* are identical' \
+ | awk '{print $2}')"
set -e # Re-enable.
set +u # $metadata_files could be unbound if project is new.
REPOS_DIR="$WORKSPACE/.repos"
-PATCHES=($(echo "$GERRIT_EVENT_COMMENT_TEXT" | grep 'recheck:' | awk -F: '{print $2}'))
+IFS=" " read -r -a PATCHES <<< "$(echo "$GERRIT_EVENT_COMMENT_TEXT" | grep 'recheck:' | awk -F: '{print $2}')"
projects=()
for patch in $(echo "${PATCHES[@]}"); do
set -e -o pipefail
set +u
-PROJECTS=($(echo "$DEPENDENCY_BUILD_ORDER"))
+IFS=" " read -r -a PROJECTS <<< "$DEPENDENCY_BUILD_ORDER"
REPOS_DIR="$WORKSPACE/.repos"
export MAVEN_OPTS
fi
}
-TOX_ENVS=(${TOX_ENVS//,/ })
+IFS=" " read -r -a TOX_ENVS <<< "${TOX_ENVS//,/ }"
if hash parallel 2>/dev/null; then
export -f run_tox
parallel --jobs 200% "run_tox $ARCHIVE_TOX_DIR {}" ::: ${TOX_ENVS[*]}
fi
if [ -f "$ARCHIVE_TOX_DIR/failed-envs.log" ]; then
- failed_envs=($(cat "$ARCHIVE_TOX_DIR/failed-envs.log"))
+ mapfile -t failed_envs < <(cat "$ARCHIVE_TOX_DIR/failed-envs.log")
for e in "${failed_envs[@]}"; do
echo "cat $ARCHIVE_TOX_DIR/tox-$e.log"
cat "$ARCHIVE_TOX_DIR/tox-$e.log"
"../packer/templates/$PACKER_TEMPLATE.json"
# Retrive the list of cloud providers
-clouds=($(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json"))
+mapfile -t clouds < <(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json")
# Split public/private clouds logs
for cloud in "${clouds[@]}"; do
"../packer/templates/$PACKER_TEMPLATE.json"
# Retrive the list of cloud providers
-clouds=($(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json"))
+mapfile -t clouds < <(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json")
# Split public/private clouds logs
for cloud in "${clouds[@]}"; do
"../packer/templates/$PACKER_TEMPLATE.json"
# Retrive the list of cloud providers
-clouds=($(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json"))
+mapfile -t clouds < <(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json")
# Split public/private clouds logs
for cloud in "${clouds[@]}"; do
"../packer/templates/$PACKER_TEMPLATE.json"
# Retrive the list of cloud providers
-clouds=($(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json"))
+mapfile -t clouds < <(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json")
# Split public/private clouds logs
for cloud in "${clouds[@]}"; do
# Remove metadata files that were not updated.
set +e # Temporarily disable to run diff command.
-metadata_files=($(diff -s -r "$m2repo_dir" "$WORKSPACE/m2repo-backup" \
- | grep 'Files .* and .* are identical' | awk '{print $2}'))
+IFS=" " read -r -a metadata_files <<< "$(diff -s -r "$m2repo_dir" "$WORKSPACE/m2repo-backup" \
+ | grep 'Files .* and .* are identical' \
+ | awk '{print $2}')"
set -e # Re-enable.
set +u # $metadata_files could be unbound if project is new.
fi
}
-TOX_ENVS=(${TOX_ENVS//,/ })
+IFS=" " read -r -a TOX_ENVS <<< "${TOX_ENVS//,/ }"
if hash parallel 2>/dev/null; then
export -f run_tox
parallel --jobs 200% "run_tox $ARCHIVE_TOX_DIR {}" ::: ${TOX_ENVS[*]}
fi
if [ -f "$ARCHIVE_TOX_DIR/failed-envs.log" ]; then
- failed_envs=($(cat "$ARCHIVE_TOX_DIR/failed-envs.log"))
+ mapfile -t failed_envs < <(cat "$ARCHIVE_TOX_DIR/failed-envs.log")
for e in "${failed_envs[@]}"; do
echo "cat $ARCHIVE_TOX_DIR/tox-$e.log"
cat "$ARCHIVE_TOX_DIR/tox-$e.log"
# Checks for JJB documentation interest points and ensures they are documented.
-jjb_files=($(find jjb -name "*.yaml"))
+mapfile -t jjb_files < <(find jjb -name "*.yaml")
undocumented_count=0
for file in "${jjb_files[@]}"; do
REPOS_DIR="$WORKSPACE/.repos"
-PATCHES=($(echo "$GERRIT_EVENT_COMMENT_TEXT" | grep 'recheck:' | awk -F: '{print $2}'))
+IFS=" " read -r -a PATCHES <<< "$(echo "$GERRIT_EVENT_COMMENT_TEXT" | grep 'recheck:' | awk -F: '{print $2}')"
projects=()
for patch in $(echo "${PATCHES[@]}"); do
set -e -o pipefail
set +u
-PROJECTS=($(echo "$DEPENDENCY_BUILD_ORDER"))
+IFS=" " read -r -a PROJECTS <<< "$DEPENDENCY_BUILD_ORDER"
REPOS_DIR="$WORKSPACE/.repos"
export MAVEN_OPTS
# Remove metadata files that were not updated.
set +e # Temporarily disable to run diff command.
-metadata_files=($(diff -s -r "$m2repo_dir" "$WORKSPACE/m2repo-backup" \
- | grep 'Files .* and .* are identical' | awk '{print $2}'))
+IFS=" " read -r -a metadata_files <<< "$(diff -s -r "$m2repo_dir" "$WORKSPACE/m2repo-backup" \
+ | grep 'Files .* and .* are identical' \
+ | awk '{print $2}')"
set -e # Re-enable.
set +u # $metadata_files could be unbound if project is new.
"../packer/templates/$PACKER_TEMPLATE.json"
# Retrive the list of cloud providers
-clouds=($(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json"))
+mapfile -t clouds < <(jq -r '.builders[].name' "../packer/templates/$PACKER_TEMPLATE.json")
# Split public/private clouds logs
for cloud in "${clouds[@]}"; do
fi
}
-TOX_ENVS=(${TOX_ENVS//,/ })
+IFS=" " read -r -a TOX_ENVS <<< "${TOX_ENVS//,/ }"
if hash parallel 2>/dev/null; then
export -f run_tox
parallel --jobs 200% "run_tox $ARCHIVE_TOX_DIR {}" ::: ${TOX_ENVS[*]}
fi
if [ -f "$ARCHIVE_TOX_DIR/failed-envs.log" ]; then
- failed_envs=($(cat "$ARCHIVE_TOX_DIR/failed-envs.log"))
+ mapfile -t failed_envs < <(cat "$ARCHIVE_TOX_DIR/failed-envs.log")
for e in "${failed_envs[@]}"; do
echo "cat $ARCHIVE_TOX_DIR/tox-$e.log"
cat "$ARCHIVE_TOX_DIR/tox-$e.log"