aboutsummaryrefslogtreecommitdiff
path: root/reproduce/analysis/bash
diff options
context:
space:
mode:
authorMohammad Akhlaghi <mohammad@akhlaghi.org>2020-02-11 00:38:11 +0100
committerMohammad Akhlaghi <mohammad@akhlaghi.org>2020-02-11 00:38:11 +0100
commit6d68865a5d083b8892c3f4854284bb2036f1efdf (patch)
tree1eb2874d93efc7ba0ec16d4528d241a6a340122a /reproduce/analysis/bash
parent24be94568eeaeb5c51071687070a4c0ffa06a1ef (diff)
Using backup server when original download server fails
Until now, the main download script could only check one server for the given URL. However, ultimately the actual server that a file is downloaded from is irrelevant for this project: we actually check its checksum. Especially in the case of software (which are distributed over many servers), this can usually be very annoying: the servers may not properly communicate with the running system and even the 10 trials won't be enough. With this commit, the download script `reproduce/analysis/bash/download-multi-try' can take a new optional argument (a 5th argument). It assumes this argument is a space-separated list of server(s) to use as backup for the original URL. When downloading from the original URL fails, it will look into this list and try downloading the same file from each given server.
Diffstat (limited to 'reproduce/analysis/bash')
-rwxr-xr-xreproduce/analysis/bash/download-multi-try22
1 files changed, 21 insertions, 1 deletions
diff --git a/reproduce/analysis/bash/download-multi-try b/reproduce/analysis/bash/download-multi-try
index dec2539..7a5f8f2 100755
--- a/reproduce/analysis/bash/download-multi-try
+++ b/reproduce/analysis/bash/download-multi-try
@@ -51,11 +51,13 @@ set -e
-# Input arguments and necessary sanity checks.
+# Input arguments and necessary sanity checks. Note that the 5th argument
+# (backup servers) isn't mandatory.
inurl="$3"
outname="$4"
lockfile="$2"
downloader="$1"
+backupservers="$5"
if [ "x$downloader" = x ]; then
echo "$0: downloader (first argument) not given."; exit 1;
fi
@@ -73,6 +75,13 @@ fi
+# Separate the actual filename, to possibly use backup server.
+urlfile=$(echo "$inurl" | awk -F "/" '{print $NF}')
+
+
+
+
+
# Try downloading multiple times before crashing.
counter=0
maxcounter=10
@@ -107,8 +116,19 @@ while [ ! -f "$outname" ]; do
if [ x"$lockfile" = xnolock ]; then
if ! $downloader $outname $inurl; then rm -f $outname; fi
else
+ # Try downloading from the requested URL.
flock "$lockfile" bash -c \
"if ! $downloader $outname $inurl; then rm -f $outname; fi"
+
+ # If it failed, try the backup server(s).
+ if [ ! -f "$outname" ]; then
+ if [ x"$backupservers" != x ]; then
+ for bs in "$backupservers"; do
+ flock "$lockfile" bash -c \
+ "if ! $downloader $outname $bs/$urlfile; then rm -f $outname; fi"
+ done
+ fi
+ fi
fi
done