#! /bin/bash # # Necessary preparations/configurations for the reproduction pipeline. # # Original author: # Mohammad Akhlaghi # Contributing author(s): # Your name # Copyright (C) 2018-2019, Your Name. # # This script is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This script is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # A copy of the GNU General Public License is available at # . # Script settings # --------------- # Stop the script if there are any errors. set -e # Output of --help # ---------------- me=$0 # Executable file name. help_print() { if [ x"$build_dir" = x ]; then bdir_status="NOT SET" else bdir_status="$build_dir" fi if [ x"$input_dir" = x ]; then indir_status="NOT SET" else indir_status="$input_dir" fi if [ x"$software_dir" = x ]; then software_status="NOT SET" else software_status="$software_dir" fi if [ $in_minmapsize = 0 ]; then mm_status="NOT SET" else mm_status="$in_minmapsize" fi if [ $jobs = "0" ]; then jobs_status="NUMBER OF THREADS ON SYSTEM" else jobs_status=$jobs fi if [ $existing_conf = 1 ]; then ec_status="ACTIVATED" else ec_status="NOT SET" fi # Print the output. cat < $1 then echo "#" >> $1 echo "# This file was created during the reproduction" >> $1 echo "# pipeline's configuration ('./configure'). Therefore," >> $1 echo "# it is not under version control and any manual " >> $1 echo "# changes to it will be over-written if the pipeline " >> $1 echo "# is re-configured." >> $1 echo "#" >> $1 else echo; echo "Can't write to $1"; echo; exit 1 fi } # Get absolute address # -------------------- # # Since the build directory will go into a symbolic link, we want it to be # an absolute address. With this function we can make sure of that. function absolute_dir() { echo "$(cd "$(dirname "$1")" && pwd )/$(basename "$1")" } # Inform the user # --------------- # # Print some basic information so the user gets a feeling of what is going # on and is prepared on what will happen next. cat < /dev/null 2>/dev/null; then name=$(which wget) # By default Wget keeps the remote file's timestamp, so we'll have # to disable it manually. downloader="$name --no-use-server-timestamps -O"; elif type curl > /dev/null 2>/dev/null; then name=$(which curl) # - cURL doesn't keep the remote file's timestamp by default. # - With the `-L' option, we tell cURL to follow redirects. downloader="$name -L -o" else cat < /dev/null; then bdir=$(absolute_dir $build_dir) echo " -- Build directory: '$bdir'" rm -rf $build_dir/$junkname else echo " -- Can't write in '$build_dir'" fi else if mkdir $build_dir 2> /dev/null; then bdir=$(absolute_dir $build_dir) echo " -- Build directory set to (the newly created): '$bdir'" else echo " -- Can't create '$build_dir'" fi fi # Reset `build_dir' to blank, so it continues asking when the # previous value wasn't usable. build_dir= done fi # Input directory # --------------- if [ x"$input_dir" = x ]; then indir=$optionaldir else indir=$input_dir fi wfpc2name=$(awk '!/^#/ && $1=="WFPC2IMAGE" {print $3}' $pdir/INPUTS.mk) wfpc2md5=$(awk '!/^#/ && $1=="WFPC2MD5" {print $3}' $pdir/INPUTS.mk) wfpc2size=$(awk '!/^#/ && $1=="WFPC2SIZE" {print $3}' $pdir/INPUTS.mk) wfpc2url=$(awk '!/^#/ && $1=="WFPC2URL" {print $3}' $pdir/INPUTS.mk) if [ $rewritepconfig = yes ] && [ x"$input_dir" = x ]; then cat <> $pconf else # Read the values from existing configuration file. inbdir=$(awk '$1=="BDIR" {print $3}' $pconf) # The downloader command may contain multiple elements, so we'll just # change the (in memory) first and second tokens to empty space and # write the full line (the original file is unchanged). downloader=$(awk '$1=="DOWNLOADER" {$1=""; $2=""; print $0}' $pconf) # Make sure all necessary variables have a value err=0 verr=0 novalue="" if [ x"$inbdir" = x ]; then novalue="BDIR, "; fi if [ x"$downloader" = x ]; then novalue="$novalue"DOWNLOADER; fi if [ x"$novalue" != x ]; then verr=1; err=1; fi # Make sure `bdir' is an absolute path and it exists. berr=0 ierr=0 bdir=$(absolute_dir $inbdir) if ! [ -d $bdir ]; then if ! mkdir $bdir; then berr=1; err=1; fi; fi if [ $err = 1 ]; then cat <> $glconf echo " minmapsize $minmapsize" >> $glconf echo >> $glconf echo "# Version of Gnuastro that must be used." >> $glconf echo " onlyversion $gversion" >> $glconf else ingversion=$(awk '$1=="onlyversion" {print $NF}' $glconf) if [ x$ingversion != x$gversion ]; then echo "______________________________________________________" echo "!!!!!!!!!!!!!!!!!!CONFIGURATION ERROR!!!!!!!!!!!!!!!!!" echo echo "Gnuastro's version in '$glconf' ($ingversion) doesn't match the tarball version that this pipeline was designed to use in '$depverfile' ($gversion). Please re-run after removing the former file:" echo echo " $ rm $glconf" echo " $ ./configure" echo echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo exit 1 fi fi # ------------------------------------------ # Setup the top-level directories # ------------------------------- rm -f $lbdir ln -s $bdir $lbdir depdir=$bdir/dependencies if ! [ -d $depdir ]; then mkdir $depdir; fi tardir=$depdir/tarballs if ! [ -d $tardir ]; then mkdir $tardir; fi instdir=$depdir/installed if ! [ -d $instdir ]; then mkdir $instdir; fi rm -f $installedlink ln -s $instdir $installedlink # --------- Delete for no Gnuastro --------- rm -f .gnuastro ln -s $(pwd)/reproduce/config/gnuastro .gnuastro # ------------------------------------------ # See if the C compiler can build static libraries # ------------------------------------------------ # We are manually only working with shared libraries: because some # high-level programs like Wget and cURL need dynamic linking and if we # build the libraries statically, our own builds will be ignored and these # programs will go and find their necessary libraries on the host system. # # Another good advantage of shared libraries is that we can actually use # the shared library tool of the system (`ldd' with GNU C Library) and see # exactly where each linked library comes from. But in static building, # unless you follow the build closely, its not easy to see if the source of # the library came from the system or our build. static_build=no #oprog=$depdir/static-test #cprog=$depdir/static-test.c #echo "#include " > $cprog #echo "int main(void) {return 0;}" >> $cprog #if [ x$CC = x ]; then CC=gcc; fi; #if $CC $cprog -o$oprog -static &> /dev/null; then # export static_build="yes" #else # export static_build="no" #fi #rm -f $oprog $cprog #if [ $printnotice = yes ] && [ $static_build = "no" ]; then # cat <" > $cprog echo "int main(void) {return 0;}" >> $cprog if [ x$CC = x ]; then CC=gcc; fi; if $CC $cprog -o$oprog -Wl,-rpath-link &> /dev/null; then export rpath_command="-Wl,-rpath-link=$instdir/lib" else export rpath_command="" fi rm -f $oprog $cprog # See if we need the dynamic-linker (-ldl) # ---------------------------------------- # # Some programs (like Wget) need dynamic loading (using `libdl'). On # GNU/Linux systems, we'll need the `-ldl' flag to link such programs. But # Mac OS doesn't need any explicit calling. So we'll check here to use in # the building of programs. oprog=$depdir/ldl-test cprog=$depdir/ldl-test.c cat > $cprog < #include int main(void) { void *handle=dlopen ("/lib/CEDD_LIB.so.6", RTLD_LAZY); return 0; } EOF if gcc $cprog -o$oprog &> /dev/null; then needs_ldl=no; else needs_ldl=yes; fi rm -f $oprog $cprog # inform the user that the build process is starting # ------------------------------------------------- if [ $printnotice = yes ]; then tsec=10 cat < /dev/null 2>/dev/null; then on_mac_os=yes else on_mac_os=no fi # Build `flock' as first program # ------------------------------ # # Flock (or file-lock) is a unique program in the pipeline that is # necessary to serialize the (generally parallel) processing of make when # necessary. GNU/Linux machines have it as part of their `util-linux' # programs. But to be consistent, we will be using our own build. # # The reason its sepecial is that we need it to serialize the download # process of the dependency tarballs. flockversion=$(awk '/flock-version/{print $3}' \ reproduce/config/pipeline/dependency-versions.mk) flocktar=flock-$flockversion.tar.gz flockurl=http://github.com/discoteq/flock/releases/download/v$flockversion/ # Prepare/download the tarball. if ! [ -f $tardir/$flocktar ]; then if [ -f $ddir/$flocktar ]; then cp $ddir/$flocktar $tardir/$flocktar else if ! $downloader $tardir/$flocktar $flockurl/$flocktar; then rm -f $tardir/$flocktar; echo echo "DOWNLOAD ERROR: Couldn't download the 'flock' tarball:" echo " $flockurl" echo echo "You can manually place it in '$ddir' to avoid downloading." exit 1 fi fi fi # If the tarball is newer than the (possibly existing) program, then delete # the program. if [ -f .local/bin/flock ]; then if [ $tardir/$flocktar -nt .local/bin/flock ]; then rm .local/bin/flock fi fi # Build `flock' if necessary. if ! [ -f .local/bin/flock ]; then cd $depdir tar xf $tardir/$flocktar cd flock-$flockversion ./configure --prefix=$instdir make; make install cd $topdir rm -rf $depdir/flock-$flockversion fi # Build Basic dependencies # ------------------------ # # Since the system might not have GNU Make at this stage, and other Make # implementations can't deal with parallel build properly, we'll just # default to 1 thread. This is because some versions of Make complain about # not having enough 'pipe' (memory) on some systems. After some searching, # I found out its because of too many threads. GNU Make will be present on # GNU systems (that have `nproc', part of GNU Coreutils). So to simplify # the test for GNU Make, we'll just try running `nproc'. if which nproc > /dev/null 2>/dev/null; then if [ $jobs = 0 ]; then numthreads=$(nproc --all); else numthreads=$jobs fi else numthreads=1; fi make -f reproduce/src/make/dependencies-basic.mk \ rpath_command=$rpath_command \ static_build=$static_build \ needs_ldl=$needs_ldl \ on_mac_os=$on_mac_os \ numthreads=$numthreads \ -j$numthreads # Rest of dependencies # -------------------- # # We will be making all the dependencies before running the top-level # Makefile. To make the job easier, we'll do it in a Makefile, not a # script. Bash and Make were the tools we need to run Makefiles, so we had # to build them in this script. But after this, we can rely on Makefiles. numthreads=$($instdir/bin/nproc) ./.local/bin/make -f reproduce/src/make/dependencies.mk \ rpath_command=$rpath_command \ static_build=$static_build \ on_mac_os=$on_mac_os \ numthreads=$numthreads \ -j$numthreads # Python dependencies # ------------------- # # Python has its own installation program. To help in managing the # dependencies we make the installation of packages as a separate # Makefile. ./.local/bin/make -f reproduce/src/make/dependencies-python.mk \ rpath_command=$rpath_command \ static_build=$static_build \ on_mac_os=$on_mac_os \ numthreads=$numthreads \ -j$numthreads # Make sure TeX Live installed successfully # ----------------------------------------- # # TeX Live is managed over the internet, so if there isn't any, or it # suddenly gets cut, it can't be built. However, when TeX Live isn't # installed, the pipeline and can do all its processing independent of # it. It will just stop at the stage when all the processing is complete # and it is only necessary to build the PDF. So we don't want to stop the # pipeline if its not present. texlive_result=$(cat $instdir/bin/texlive-ready-tlmgr) if [ x"$texlive_result" = x"NOT!" ]; then cat <