summaryrefslogtreecommitdiffstats
path: root/tools/update-verify/release/common/cached_download.sh
diff options
context:
space:
mode:
Diffstat (limited to 'tools/update-verify/release/common/cached_download.sh')
-rw-r--r--tools/update-verify/release/common/cached_download.sh16
1 files changed, 10 insertions, 6 deletions
diff --git a/tools/update-verify/release/common/cached_download.sh b/tools/update-verify/release/common/cached_download.sh
index 7cb3c42f8d..4522b5504d 100644
--- a/tools/update-verify/release/common/cached_download.sh
+++ b/tools/update-verify/release/common/cached_download.sh
@@ -1,10 +1,12 @@
+#!/bin/bash
# this library works like a wrapper around wget, to allow downloads to be cached
# so that if later the same url is retrieved, the entry from the cache will be
# returned.
-pushd `dirname $0` &>/dev/null
+pushd "$(dirname "$0")" &>/dev/null || exit
cache_dir="$(pwd)/cache"
-popd &>/dev/null
+popd &>/dev/null || exit
+retry="$MY_DIR/../../../../mach python -m redo.cmd -s 1 -a 3"
# Deletes all files in the cache directory
# We don't support folders or .dot(hidden) files
@@ -12,7 +14,7 @@ popd &>/dev/null
# which are the only workaround to poor mount r/w performance on MacOS
# Reference: https://forums.docker.com/t/file-access-in-mounted-volumes-extremely-slow-cpu-bound/8076/288
clear_cache () {
- rm -rf "${cache_dir}/*"
+ rm -rf "${cache_dir:?}/*"
}
# download method - you pass a filename to save the file under, and the url to call
@@ -20,9 +22,10 @@ cached_download () {
local output_file="${1}"
local url="${2}"
- if fgrep -x "${url}" "${cache_dir}/urls.list" >/dev/null; then
+ if grep -Fx "${url}" "${cache_dir}/urls.list" >/dev/null; then
echo "Retrieving '${url}' from cache..."
- local line_number="$(fgrep -nx "${url}" "${cache_dir}/urls.list" | sed 's/:.*//')"
+ local line_number
+ line_number="$(grep -Fnx "${url}" "${cache_dir}/urls.list" | sed 's/:.*//')"
cp "${cache_dir}/obj_$(printf "%05d\n" "${line_number}").cache" "${output_file}"
else
echo "Downloading '${url}' and placing in cache..."
@@ -31,7 +34,8 @@ cached_download () {
local exit_code=$?
if [ "${exit_code}" == 0 ]; then
echo "${url}" >> "${cache_dir}/urls.list"
- local line_number="$(fgrep -nx "${url}" "${cache_dir}/urls.list" | sed 's/:.*//')"
+ local line_number
+ line_number="$(grep -Fnx "${url}" "${cache_dir}/urls.list" | sed 's/:.*//')"
cp "${output_file}" "${cache_dir}/obj_$(printf "%05d\n" "${line_number}").cache"
else
return "${exit_code}"