From 1c508e636b90ae170213ccf71771711156dd8f52 Mon Sep 17 00:00:00 2001 From: Mohammad Akhlaghi Date: Wed, 6 Feb 2019 18:08:19 +0000 Subject: Wrapper script for multiple attempts at downloading inputs Until now, downloading was treated similar to any other operation in the Makefile: if it crashes, the pipeline would crash. But network errors aren't like processing errors: attempting to download a second time will probably not crash (network relays are very complex and not reproducible and packages get lost all the time)! This is usually not felt in downloading one or two files, but when downloading many thousands of files, it will happen every once and a while and its a real waste of time until you check to just press enter again! With this commit we have the `reproduce/src/bash/download-multi-try.sh' script in the pipeline which will repeat the downoad several times (with incrasing time intervals) before crashing and thus fix the problem. --- reproduce/src/make/download.mk | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'reproduce/src/make') diff --git a/reproduce/src/make/download.mk b/reproduce/src/make/download.mk index f83ad6f..1fc51e6 100644 --- a/reproduce/src/make/download.mk +++ b/reproduce/src/make/download.mk @@ -52,6 +52,7 @@ # process with a file and make sure that only one downloading event is in # progress at every moment. $(indir):; mkdir $@ +downloadwrapper = $(srcdir)/bash/download-multi-try.sh inputdatasets = $(foreach i, wfpc2, $(indir)/$(i).fits) $(inputdatasets): $(indir)/%.fits: | $(indir) $(lockdir) @@ -68,8 +69,7 @@ $(inputdatasets): $(indir)/%.fits: | $(indir) $(lockdir) ln -s $(INDIR)/$$origname $@ else touch $(lockdir)/download - flock $(lockdir)/download bash -c \ - "if ! wget -O$@ $$url/$$origname; then rm -f $@; exit 1; fi" + $(downloadwrapper) wget $(lockdir)/download $$url/$$origname $@ fi # Check the md5 sum to see if this is the proper dataset. -- cgit v1.2.1