aboutsummaryrefslogtreecommitdiff
path: root/reproduce
diff options
context:
space:
mode:
Diffstat (limited to 'reproduce')
-rw-r--r--reproduce/analysis/make/initialize.mk20
-rw-r--r--reproduce/analysis/make/paper.mk6
-rw-r--r--reproduce/analysis/make/top.mk12
-rwxr-xr-xreproduce/software/bash/configure.sh1224
-rw-r--r--reproduce/software/config/installation/LOCAL.mk.in4
-rwxr-xr-xreproduce/software/make/atlas-multiple.mk4
-rwxr-xr-xreproduce/software/make/atlas-single.mk4
-rw-r--r--reproduce/software/make/basic.mk10
-rw-r--r--reproduce/software/make/high-level.mk13
-rw-r--r--reproduce/software/make/python.mk4
10 files changed, 1269 insertions, 32 deletions
diff --git a/reproduce/analysis/make/initialize.mk b/reproduce/analysis/make/initialize.mk
index d2b026e..f8a09e6 100644
--- a/reproduce/analysis/make/initialize.mk
+++ b/reproduce/analysis/make/initialize.mk
@@ -84,8 +84,6 @@ tikzdir = $(texbdir)/tikz
# Before defining the local sub-environment here, we'll need to save the
# system's environment for some scenarios (for example after `clean'ing the
# built programs).
-sys-path := $(PATH)
-sys-rm := $(shell which rm)
curdir := $(shell echo $$(pwd))
@@ -97,7 +95,7 @@ curdir := $(shell echo $$(pwd))
#
# We want the full recipe to be executed in one call to the shell. Also we
# want Make to run the specific version of Bash that we have installed
-# during `./configure' time.
+# during `./project configure' time.
#
# Regarding the directories, this project builds its major dependencies
# itself and doesn't use the local system's default tools. With these
@@ -116,7 +114,11 @@ export LDFLAGS := -L$(installdir)/lib
export SHELL := $(installdir)/bin/bash
export CPPFLAGS := -I$(installdir)/include
export LD_LIBRARY_PATH := $(installdir)/lib
-export DYLD_LIBRARY_PATH := $(installdir)/lib
+
+# RPATH is automatically written in macOS, so `DYLD_LIBRARY_PATH' is
+# ultimately redundant. But on some systems, even having a single value
+# causes crashs (see bug #56682). So we'll just give it no value at all.
+export DYLD_LIBRARY_PATH :=
@@ -190,15 +192,17 @@ clean: clean-mmap
# features like ignoring the listing of a file with `!()' that we
# are using afterwards.
shopt -s extglob
- rm -rf $(BDIR)/!(software)
+ rm -rf $(BDIR)/tex/macros/!(dependencies.tex|dependencies-bib.tex)
+ rm -rf $(BDIR)/!(software|tex) $(BDIR)/tex/!(macros|build)
+ rm -rf $(BDIR)/tex/build/!(tikz) $(BDIR)/tex/build/tikz/*
distclean: clean
# We'll be deleting the built environent programs and just need the
# `rm' program. So for this recipe, we'll use the host system's
# `rm', not our own.
- $(sys-rm) -rf $(BDIR)
- $(sys-rm) -f Makefile .gnuastro .local .build
- $(sys-rm) -f $(pconfdir)/LOCAL.mk $(gconfdir)/gnuastro-local.conf
+ $$sys-rm -rf $(BDIR)
+ $$sys-rm -f Makefile .gnuastro .local .build
+ $$sys-rm -f $(pconfdir)/LOCAL.mk $(gconfdir)/gnuastro-local.conf
diff --git a/reproduce/analysis/make/paper.mk b/reproduce/analysis/make/paper.mk
index c17028a..436bac4 100644
--- a/reproduce/analysis/make/paper.mk
+++ b/reproduce/analysis/make/paper.mk
@@ -64,11 +64,9 @@ $(mtexdir)/project.tex: $(foreach s, $(subst paper,,$(makesrc)), $(mtexdir)/$(s)
echo "'reproduce/analysis/config/pdf-build.mk'."
echo
echo "If you don't have LaTeX within the project, please re-run"
- echo "'./configure' when you have internet access. To speed it up,"
- echo "you can keep the previous configuration files (answer 'n'"
- echo "when it asks about re-writing previous configuration files)."
+ echo "'./project configure -e' when you have internet access."
else
- echo "For more, run './.local/bin/make more-on-building-pdf=1'"
+ echo "For more, run './project make more-on-building-pdf=1'"
fi
echo
echo "" > $@
diff --git a/reproduce/analysis/make/top.mk b/reproduce/analysis/make/top.mk
index b753979..7d20800 100644
--- a/reproduce/analysis/make/top.mk
+++ b/reproduce/analysis/make/top.mk
@@ -19,7 +19,8 @@
-# Load the local configuration (created after running `./configure').
+# Load the local configuration (created after running
+# `./project configure').
include reproduce/software/config/installation/LOCAL.mk
@@ -49,9 +50,8 @@ include reproduce/software/config/installation/LOCAL.mk
#
# Controlling this requires two variables that are available at this stage:
#
-# - `GROUP-NAME': from `LOCAL.mk' (which was built by `./configure').
-# - `reproducible_paper_group_name': from the `./for-group' script (if it
-# was used to call Make).
+# - `GROUP-NAME': from `LOCAL.mk' (which was built by `./project configure').
+# - `reproducible_paper_group_name': value to the `--group' option.
#
# The analysis is only done when both have the same group name. Note that
# when the project isn't being built for a group, both variables will be an
@@ -70,10 +70,10 @@ else
all:
@if [ "x$(GROUP-NAME)" = x ]; then \
echo "Project is NOT configured for groups, please run"; \
- echo " $$ .local/bin/make"; \
+ echo " $$ ./project make"; \
else \
echo "Project is configured for groups, please run"; \
- echo " $$ ./for-group $(GROUP-NAME) make -j8"; \
+ echo " $$ ./project make --group=$(GROUP-NAME) -j8"; \
fi
endif
diff --git a/reproduce/software/bash/configure.sh b/reproduce/software/bash/configure.sh
new file mode 100755
index 0000000..65b728e
--- /dev/null
+++ b/reproduce/software/bash/configure.sh
@@ -0,0 +1,1224 @@
+#! /bin/bash
+#
+# Necessary preparations/configurations for the reproducible project.
+#
+# Copyright (C) 2018-2019 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+#
+# This script is free software: you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+#
+# This script is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
+# Public License for more details.
+#
+# A copy of the GNU General Public License is available at
+# <http://www.gnu.org/licenses/>.
+
+
+# Script settings
+# ---------------
+# Stop the script if there are any errors.
+set -e
+
+
+
+
+
+# Internal directories
+# --------------------
+#
+# These are defined to help make this script more readable.
+topdir=$(pwd)
+optionaldir="/optional/path"
+adir=reproduce/analysis/config
+cdir=reproduce/software/config
+
+sbdir=$cdir/installation
+
+pconf=$sbdir/LOCAL.mk
+ptconf=$sbdir/LOCAL_tmp.mk
+poconf=$sbdir/LOCAL_old.mk
+depverfile=$cdir/installation/versions.mk
+# --------- Delete for no Gnuastro ---------
+glconf=$cdir/gnuastro/gnuastro-local.conf
+# ------------------------------------------
+
+
+
+
+
+# Notice for top of generated files
+# ---------------------------------
+#
+# In case someone opens the files output from the configuration scripts in
+# a text editor and wants to edit them, it is important to let them know
+# that their changes are not going to be permenant.
+function create_file_with_notice() {
+ if echo "# IMPORTANT: file can be RE-WRITTEN after './project configure'" > "$1"
+ then
+ echo "#" >> "$1"
+ echo "# This file was created during configuration" >> "$1"
+ echo "# ('./project configure'). Therefore, it is not under" >> "$1"
+ echo "# version control and any manual changes to it will be" >> "$1"
+ echo "# over-written if the project re-configured." >> "$1"
+ echo "#" >> "$1"
+ else
+ echo; echo "Can't write to $1"; echo;
+ exit 1
+ fi
+}
+
+
+
+
+
+# Get absolute address
+# --------------------
+#
+# Since the build directory will go into a symbolic link, we want it to be
+# an absolute address. With this function we can make sure of that.
+function absolute_dir() {
+ if stat "$1" 1> /dev/null; then
+ echo "$(cd "$(dirname "$1")" && pwd )/$(basename "$1")"
+ else
+ exit 1;
+ fi
+}
+
+
+
+
+
+# Inform the user
+# ---------------
+#
+# Print some basic information so the user gets a feeling of what is going
+# on and is prepared on what will happen next.
+cat <<EOF
+
+-----------------------------
+Project's local configuration
+-----------------------------
+
+Local configuration includes things like top-level directories, or
+processing steps. It is STRONGLY recommended to read the comments, and set
+the best values for your system (where necessary).
+
+EOF
+
+
+
+
+
+# What to do with possibly existing configuration file
+# ----------------------------------------------------
+#
+# `LOCAL.mk' is the top-most local configuration for the project. If it
+# already exists when this script is run, we'll make a copy of it as backup
+# (for example the user might have ran `./project configure' by mistake).
+printnotice=yes
+rewritepconfig=yes
+rewritegconfig=yes
+if [ -f $pconf ] || [ -f $glconf ]; then
+ if [ $existing_conf = 1 ]; then
+ printnotice=no
+ if [ -f $pconf ]; then rewritepconfig=no; fi
+ if [ -f $glconf ]; then rewritegconfig=no; fi
+ fi
+fi
+
+
+
+
+# Make sure the group permissions satisfy the previous configuration (if it
+# exists and we don't want to re-write it).
+if [ $rewritepconfig = no ]; then
+ oldgroupname=$(awk '/GROUP-NAME/ {print $3; exit 0}' $pconf)
+ if [ "x$oldgroupname" = "x$reproducible_paper_group_name" ]; then
+ just_a_place_holder_to_avoid_not_equal_test=1;
+ else
+ echo "-----------------------------"
+ echo "!!!!!!!! ERROR !!!!!!!!"
+ echo "-----------------------------"
+ if [ "x$oldgroupname" = x ]; then
+ status="NOT configured for groups"
+ confcommand="./project configure"
+ else
+ status="configured for '$oldgroupname' group"
+ confcommand="./project configure --group=$oldgroupname"
+ fi
+ echo "Project was previously $status!"
+ echo "Either enable re-write of this configuration file,"
+ echo "or re-run this configuration like this:"
+ echo
+ echo " $confcommand"; echo
+ exit 1
+ fi
+fi
+
+
+
+
+
+# Identify the downloader tool
+# ----------------------------
+#
+# After this script finishes, we will have both Wget and cURL for
+# downloading any necessary dataset during the processing. However, to
+# complete the configuration, we may also need to download the source code
+# of some necessary software packages (including the downloaders). So we
+# need to check the host's available tool for downloading at this step.
+if [ $rewritepconfig = yes ]; then
+ if type wget > /dev/null 2>/dev/null; then
+ name=$(which wget)
+
+ # By default Wget keeps the remote file's timestamp, so we'll have
+ # to disable it manually.
+ downloader="$name --no-use-server-timestamps -O";
+ elif type curl > /dev/null 2>/dev/null; then
+ name=$(which curl)
+
+ # - cURL doesn't keep the remote file's timestamp by default.
+ # - With the `-L' option, we tell cURL to follow redirects.
+ downloader="$name -L -o"
+ else
+ cat <<EOF
+
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+!!!!!!!!!!!!!!!!!!!!!! Warning !!!!!!!!!!!!!!!!!!!!!!
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+Couldn't find GNU Wget, or cURL on this system. These programs are used for
+downloading necessary programs and data if they aren't already present (in
+directories that you can specify with this configure script). Therefore if
+the necessary files are not present, the project will crash.
+
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+EOF
+ downloader="no-downloader-found"
+ fi;
+fi
+
+
+
+
+
+# Build directory
+# ---------------
+if [ $rewritepconfig = yes ]; then
+ cat <<EOF
+
+===============
+Build directory
+===============
+
+The project's "source" (this directory) and "build" directories are treated
+separately. This greatly helps in managing the many intermediate files that
+are created during the build. The intermediate build files don't need to be
+archived or backed up: you can always re-build them with the contents of
+the source directory. The build directory also needs a relatively large
+amount of free space (atleast serveral Giga-bytes), while the source
+directory (all plain text) will usually be a mega-byte or less.
+
+'.build' (a symbolic link to the build directory) will also be created
+during this configuration. It can help encourage you to set the actual
+build directory in a very different address from this one (one that can be
+deleted and has large volume), while having easy access to it from here.
+
+EOF
+ bdir=
+ junkname=pure-junk-974adfkj38
+ while [ x$bdir == x ]
+ do
+ # Ask the user (if not already set on the command-line).
+ if [ x"$build_dir" = x ]; then
+ read -p"Please enter the top build directory: " build_dir
+ fi
+
+ # If it exists, see if we can write in it. If not, try making it.
+ if [ -d $build_dir ]; then
+ if mkdir $build_dir/$junkname 2> /dev/null; then
+ bdir=$(absolute_dir $build_dir)
+ echo " -- Build directory: '$bdir'"
+ rm -rf $build_dir/$junkname
+ else
+ echo " -- Can't write in '$build_dir'"
+ fi
+ else
+ if mkdir $build_dir 2> /dev/null; then
+ bdir=$(absolute_dir $build_dir)
+ echo " -- Build directory set to (the newly created): '$bdir'"
+ else
+ echo " -- Can't create '$build_dir'"
+ fi
+ fi
+
+ # Reset `build_dir' to blank, so it continues asking when the
+ # previous value wasn't usable.
+ build_dir=
+ done
+fi
+
+
+
+
+
+# Input directory
+# ---------------
+if [ x"$input_dir" = x ]; then
+ indir=$optionaldir
+else
+ indir=$input_dir
+fi
+wfpc2name=$(awk '!/^#/ && $1=="WFPC2IMAGE" {print $3}' $adir/INPUTS.mk)
+wfpc2md5=$(awk '!/^#/ && $1=="WFPC2MD5" {print $3}' $adir/INPUTS.mk)
+wfpc2size=$(awk '!/^#/ && $1=="WFPC2SIZE" {print $3}' $adir/INPUTS.mk)
+wfpc2url=$(awk '!/^#/ && $1=="WFPC2URL" {print $3}' $adir/INPUTS.mk)
+if [ $rewritepconfig = yes ] && [ x"$input_dir" = x ]; then
+ cat <<EOF
+
+----------------------------------
+(OPTIONAL) Input dataset directory
+----------------------------------
+
+This project needs the dataset(s) listed below. If you already have them,
+please specify the directory hosting them on this system. If you don't,
+they will be downloaded automatically. Each file is shown with its total
+volume and its 128-bit MD5 checksum in parenthesis.
+
+ $wfpc2name ($wfpc2size, $wfpc2md5):
+ A 100x100 Hubble Space Telescope WFPC II image used in the FITS
+ standard webpage as a demonstration of this file format.
+ URL: $wfpc2url/$wfpc2name
+
+NOTE I: This directory, or the datasets above, are optional. If it doesn't
+exist, the files will be downloaded in the build directory and used.
+
+NOTE II: This directory (if given) will only be read, nothing will be
+written into it, so no writing permissions are necessary.
+
+TIP: If you have these files in multiple directories on your system and
+don't want to download them or make duplicates, you can create symbolic
+links to them and put those symbolic links in the given top-level
+directory.
+
+EOF
+ read -p"(OPTIONAL) Input datasets directory ($indir): " inindir
+ if [ x$inindir != x ]; then
+ indir=$inindir
+ echo " -- Using '$indir'"
+ fi
+fi
+
+
+
+
+
+# Dependency tarball directory
+# ----------------------------
+if [ x"$software_dir" = x ]; then
+ ddir=$optionaldir
+else
+ ddir=$software_dir
+fi
+if [ $rewritepconfig = yes ] && [ x"$software_dir" = x ]; then
+ cat <<EOF
+
+---------------------------------------
+(OPTIONAL) Software tarball directory
+---------------------------------------
+
+To ensure an identical build environment, the project will use its own
+build of the programs it needs. Therefore the tarball of the relevant
+programs are necessary. If a tarball isn't present in the specified
+directory, *IT WILL BE DOWNLOADED* automatically.
+
+If you don't specify any directory here, or it doesn't contain the tarball
+of a dependency, it is necessary to have an internet connection. The
+project will download the tarballs it needs automatically.
+
+EOF
+ read -p"(OPTIONAL) Directory of dependency tarballs ($ddir): " tmpddir
+ if [ x"$tmpddir" != x ]; then
+ ddir=$tmpddir
+ echo " -- Using '$ddir'"
+ fi
+fi
+
+
+
+
+
+# Write the parameters into the local configuration file.
+if [ $rewritepconfig = yes ]; then
+
+ # Add commented notice.
+ create_file_with_notice $pconf
+
+ # Write the values.
+ sed -e's|@bdir[@]|'"$bdir"'|' \
+ -e's|@indir[@]|'"$indir"'|' \
+ -e's|@ddir[@]|'"$ddir"'|' \
+ -e's|@downloader[@]|'"$downloader"'|' \
+ -e's|@groupname[@]|'"$reproducible_paper_group_name"'|' \
+ $pconf.in >> $pconf
+else
+ # Read the values from existing configuration file.
+ inbdir=$(awk '$1=="BDIR" {print $3}' $pconf)
+
+ # Read the software directory.
+ ddir=$(awk '$1=="DEPENDENCIES-DIR" {print $3}' $pconf)
+
+ # The downloader command may contain multiple elements, so we'll just
+ # change the (in memory) first and second tokens to empty space and
+ # write the full line (the original file is unchanged).
+ downloader=$(awk '$1=="DOWNLOADER" {$1=""; $2=""; print $0}' $pconf)
+
+ # Make sure all necessary variables have a value
+ err=0
+ verr=0
+ novalue=""
+ if [ x"$inbdir" = x ]; then novalue="BDIR, "; fi
+ if [ x"$downloader" = x ]; then novalue="$novalue"DOWNLOADER; fi
+ if [ x"$novalue" != x ]; then verr=1; err=1; fi
+
+ # Make sure `bdir' is an absolute path and it exists.
+ berr=0
+ ierr=0
+ bdir=$(absolute_dir $inbdir)
+
+ if ! [ -d $bdir ]; then if ! mkdir $bdir; then berr=1; err=1; fi; fi
+ if [ $err = 1 ]; then
+ cat <<EOF
+
+#################################################################
+######## ERORR reading existing configuration file ############
+#################################################################
+EOF
+ if [ $verr = 1 ]; then
+ cat <<EOF
+
+These variables have no value: $novalue.
+EOF
+ fi
+ if [ $berr = 1 ]; then
+ cat <<EOF
+
+Couldn't create the build directory '$bdir' (value to 'BDIR') in
+'$pconf'.
+EOF
+ fi
+
+ cat <<EOF
+
+Please run the configure script again (accepting to re-write existing
+configuration file) so all the values can be filled and checked.
+#################################################################
+EOF
+ fi
+fi
+
+
+
+
+
+# --------- Delete for no Gnuastro ---------
+# Get the version of Gnuastro that must be used.
+gversion=$(awk '$1=="gnuastro-version" {print $NF}' $depverfile)
+
+# Gnuastro's local configuration settings
+if [ $rewritegconfig = yes ]; then
+ create_file_with_notice $glconf
+ echo "# Minimum number of bytes to use HDD/SSD instead of RAM." >> $glconf
+ echo " minmapsize $minmapsize" >> $glconf
+ echo >> $glconf
+ echo "# Version of Gnuastro that must be used." >> $glconf
+ echo " onlyversion $gversion" >> $glconf
+else
+ ingversion=$(awk '$1=="onlyversion" {print $NF}' $glconf)
+ if [ x$ingversion != x$gversion ]; then
+ cat <<EOF
+______________________________________________________
+!!!!!!!!!!!!!!!!!!CONFIGURATION ERROR!!!!!!!!!!!!!!!!!
+
+Gnuastro's version in '$glconf' ($ingversion) doesn't match the tarball
+version that this project was designed to use in '$depverfile'
+($gversion). Please re-run after removing the former file:
+
+ $ rm $glconf
+ $ ./project configure
+
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+EOF
+ exit 1
+ fi
+fi
+# ------------------------------------------
+
+
+
+
+
+# Delete final configuration target
+# ---------------------------------
+#
+# We only want to start running the project later if this script has
+# completed successfully. To make sure it hasn't crashed in the middle
+# (without the user noticing), in the end of this script we make a file and
+# we'll delete it here (at the start). Therefore if the script crashed in
+# the middle that file won't exist.
+sdir=$bdir/software
+finaltarget=$sdir/configuration-done.txt
+if ! [ -d $sdir ]; then mkdir $sdir; fi
+rm -f $finaltarget
+
+
+
+
+
+# Project's top-level directories
+# -------------------------------
+#
+# These directories are possibly needed by many steps of process, so to
+# avoid too many directory dependencies throughout the software and
+# analysis Makefiles (thus making them hard to read), we are just building
+# them here
+# Software tarballs
+tardir=$sdir/tarballs
+if ! [ -d $tardir ]; then mkdir $tardir; fi
+
+# Installed software
+instdir=$sdir/installed
+if ! [ -d $instdir ]; then mkdir $instdir; fi
+
+# To record software versions and citation.
+verdir=$instdir/version-info
+if ! [ -d $verdir ]; then mkdir $verdir; fi
+
+# Program and library versions and citation.
+ibidir=$verdir/proglib
+if ! [ -d $ibidir ]; then mkdir $ibidir; fi
+
+# Python module versions and citation.
+ipydir=$verdir/python
+if ! [ -d $ipydir ]; then mkdir $ipydir; fi
+
+# Used software BibTeX entries.
+ictdir=$verdir/cite
+if ! [ -d $ictdir ]; then mkdir $ictdir; fi
+
+# TeXLive versions.
+itidir=$verdir/tex
+if ! [ -d $itidir ]; then mkdir $itidir; fi
+
+# Top-level LaTeX.
+texdir=$bdir/tex
+if ! [ -d $texdir ]; then mkdir $texdir; fi
+
+# LaTeX macros.
+mtexdir=$texdir/macros
+if ! [ -d $mtexdir ]; then mkdir $mtexdir; fi
+
+
+# TeX build directory. If built in a group scenario, the TeX build
+# directory must be separate for each member (so they can work on their
+# relevant parts of the paper without conflicting with each other).
+if [ "x$reproducible_paper_group_name" = x ]; then
+ texbdir=$texdir/build
+else
+ user=$(whoami)
+ texbdir=$texdir/build-$user
+fi
+if ! [ -d $texbdir ]; then mkdir $texbdir; fi
+
+# TiKZ (for building figures within LaTeX).
+tikzdir=$texbdir/tikz
+if ! [ -d $tikzdir ]; then mkdir $tikzdir; fi
+
+
+# Set the symbolic links for easy access to the top project build
+# directories. Note that these are put in each user's source/cloned
+# directory, not in the build directory (which can be shared between many
+# users and thus may already exist).
+#
+# Note: if we don't delete them first, it can happen that an extra link
+# will be created in each directory that points to its parent. So to be
+# safe, we are deleting all the links on each re-configure of the project.
+rm -f .build .local tex/build tex/tikz .gnuastro
+ln -s $bdir .build
+ln -s $instdir .local
+ln -s $texdir tex/build
+ln -s $tikzdir tex/tikz
+# --------- Delete for no Gnuastro ---------
+ln -s $topdir/reproduce/software/config/gnuastro .gnuastro
+# ------------------------------------------
+
+
+# Temporary software un-packing/build directory: if the host has the
+# standard `/dev/shm' mounting-point, we'll do it in shared memory (on the
+# RAM), to avoid harming/over-using the HDDs/SSDs. The RAM of most systems
+# today (>8GB) is large enough for the parallel building of the software.
+#
+# For the name of the directory under `/dev/shm' (for this project), we'll
+# use the names of the two parent directories to the current/running
+# directory, separated by a `-' instead of `/'. We'll then appended that
+# with the user's name (in case multiple users may be working on similar
+# project names). Maybe later, we can use something like `mktemp' to add
+# random characters to this name and make it unique to every run (even for
+# a single user).
+tmpblddir=$sdir/build-tmp
+rm -rf $tmpblddir/* $tmpblddir # If its a link, we need to empty its
+ # contents first, then itself.
+
+# Set the top-level shared memory location.
+if [ -d /dev/shm ]; then shmdir=/dev/shm
+else shmdir=""
+fi
+
+# If a shared memory mounted directory exists and there is enough space
+# there (in RAM), build a temporary directory for this project.
+needed_space=2000000
+if [ x"$shmdir" != x ]; then
+ available_space=$(df $shmdir | awk 'NR==2{print $4}')
+ if [ $available_space -gt $needed_space ]; then
+ dirname=$(pwd | sed -e's/\// /g' \
+ | awk '{l=NF-1; printf("%s-%s",$l, $NF)}')
+ tbshmdir=$shmdir/"$dirname"-$(whoami)
+ if ! [ -d $tbshmdir ]; then mkdir $tbshmdir; fi
+ fi
+else
+ tbshmdir=""
+fi
+
+# If a shared memory directory was created set `build-tmp' to be a
+# symbolic link to it. Otherwise, just build the temporary build
+# directory under the project build directory.
+if [ x$tbshmdir = x ]; then mkdir $tmpblddir;
+else ln -s $tbshmdir $tmpblddir;
+fi
+
+
+
+
+
+# Check for C/C++ compilers
+# -------------------------
+#
+# To build the software, we'll need some basic tools (the compilers in
+# particular) to be present.
+hascc=0;
+if type cc > /dev/null 2>/dev/null; then
+ if type c++ > /dev/null 2>/dev/null; then hascc=1; fi
+else
+ if type gcc > /dev/null 2>/dev/null; then
+ if type g++ > /dev/null 2>/dev/null; then hascc=1; fi
+ else
+ if type clang > /dev/null 2>/dev/null; then
+ if type clang++ > /dev/null 2>/dev/null; then hascc=1; fi
+ fi
+ fi
+fi
+if [ $hascc = 0 ]; then
+ cat <<EOF
+______________________________________________________
+!!!!!!! C/C++ Compiler NOT FOUND !!!!!!!
+
+To build the project's software, the host system needs to have basic C and
+C++ compilers. The executables that were checked are 'cc', 'gcc' and
+'clang' for a C compiler, and 'c++', 'g++' and 'clang++' for a C++
+compiler. If you have a relevant compiler that is not checked, please get
+in touch with us (with the form below) so we add it:
+
+ https://savannah.nongnu.org/support/?func=additem&group=reproduce
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+EOF
+ exit 1
+fi
+
+
+
+
+
+# See if the C compiler can build static libraries
+# ------------------------------------------------
+
+# We are manually only working with shared libraries: because some
+# high-level programs like Wget and cURL need dynamic linking and if we
+# build the libraries statically, our own builds will be ignored and these
+# programs will go and find their necessary libraries on the host system.
+#
+# Another good advantage of shared libraries is that we can actually use
+# the shared library tool of the system (`ldd' with GNU C Library) and see
+# exactly where each linked library comes from. But in static building,
+# unless you follow the build closely, its not easy to see if the source of
+# the library came from the system or our build.
+static_build=no
+
+#oprog=$sdir/static-test
+#cprog=$sdir/static-test.c
+#echo "#include <stdio.h>" > $cprog
+#echo "int main(void) {return 0;}" >> $cprog
+#if [ x$CC = x ]; then CC=gcc; fi;
+#if $CC $cprog -o$oprog -static &> /dev/null; then
+# export static_build="yes"
+#else
+# export static_build="no"
+#fi
+#rm -f $oprog $cprog
+#if [ $printnotice = yes ] && [ $static_build = "no" ]; then
+# cat <<EOF
+#_________________________________________________________________________
+#!!!!!!!!!!!!!!!!!!!!!!!!!!!! WARNING !!!!!!!!!!!!!!!!!!!!!!!!!!
+#
+#Your system's C compiler ('$CC') doesn't support building static
+#libraries. Therefore the dependencies will be built dynamically. This means
+#that they will depend more strongly on changes/updates in the host
+#system. For high-level applications (like most research projects in natural
+#sciences), this shouldn't be a significant problem.
+#
+#But generally, for reproducibility, its better to build static libraries
+#and programs. For more on their difference (and generally an introduction
+#on linking), please see the link below:
+#
+#https://www.gnu.org/software/gnuastro/manual/html_node/Linking.html
+#
+#If you have other compilers on your system, you can select a different
+#compiler by setting the 'CC' environment variable before running
+#'./project configure'.
+#
+#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+#
+#EOF
+# sleep 5
+#fi
+
+
+
+
+
+# See if the linker accepts -Wl,-rpath-link
+# -----------------------------------------
+#
+# `-rpath-link' is used to write the information of the linked shared
+# library into the shared object (library or program). But some versions of
+# LLVM's linker don't accept it an can cause problems.
+oprog=$sdir/rpath-test
+cprog=$sdir/rpath-test.c
+echo "#include <stdio.h>" > $cprog
+echo "int main(void) {return 0;}" >> $cprog
+if [ x$CC = x ]; then CC=gcc; fi;
+if $CC $cprog -o$oprog -Wl,-rpath-link &> /dev/null; then
+ export rpath_command="-Wl,-rpath-link=$instdir/lib"
+else
+ export rpath_command=""
+fi
+rm -f $oprog $cprog
+
+
+
+
+
+# See if we need the dynamic-linker (-ldl)
+# ----------------------------------------
+#
+# Some programs (like Wget) need dynamic loading (using `libdl'). On
+# GNU/Linux systems, we'll need the `-ldl' flag to link such programs. But
+# Mac OS doesn't need any explicit linking. So we'll check here to see if
+# it is present (thus necessary) or not.
+oprog=$sdir/ldl-test
+cprog=$sdir/ldl-test.c
+cat > $cprog <<EOF
+#include <stdio.h>
+#include <dlfcn.h>
+int
+main(void) {
+ void *handle=dlopen ("/lib/CEDD_LIB.so.6", RTLD_LAZY);
+ return 0;
+}
+EOF
+if gcc $cprog -o$oprog &> /dev/null; then needs_ldl=no; else needs_ldl=yes; fi
+rm -f $oprog $cprog
+
+
+
+
+
+# inform the user that the build process is starting
+# -------------------------------------------------
+if [ $printnotice = yes ]; then
+ tsec=10
+ cat <<EOF
+
+-------------------------
+Building dependencies ...
+-------------------------
+
+Necessary dependency programs and libraries will be built in $tsec sec.
+
+NOTE: the built software will NOT BE INSTALLED on your system (no root
+access is required). They are only for local usage by this project. They
+will be installed in:
+
+ $sdir/installed
+
+EOF
+ sleep $tsec
+fi
+
+
+
+
+
+# If we are on a Mac OS system
+# ----------------------------
+#
+# For the time being, we'll use the existance of `otool' to see if we are
+# on a Mac OS system or not. Some tools (for example OpenSSL) need to know
+# this.
+#
+# On Mac OS, the building of GCC crashes sometimes while building libiberty
+# with CLang's `g++'. Until we find a solution, we'll just use the host's C
+# compiler.
+if type otool > /dev/null 2>/dev/null; then
+ host_cc=1
+ on_mac_os=yes
+else
+ on_mac_os=no
+fi
+
+
+
+
+
+# Build `flock'
+# -------------
+#
+# Flock (or file-lock) is a unique program that is necessary to serialize
+# the (generally parallel) processing of make when necessary. GNU/Linux
+# machines have it as part of their `util-linux' programs. But to be
+# consistent in non-GNU/Linux systems, we will be using our own build.
+#
+# The reason that `flock' is sepecial is that we need it to serialize the
+# download process of the software tarballs.
+flockversion=$(awk '/flock-version/{print $3}' $depverfile)
+flocktar=flock-$flockversion.tar.gz
+flockurl=http://github.com/discoteq/flock/releases/download/v$flockversion/
+
+# Prepare/download the tarball.
+if ! [ -f $tardir/$flocktar ]; then
+ if [ -f $ddir/$flocktar ]; then
+ cp $ddir/$flocktar $tardir/$flocktar
+ else
+ if ! $downloader $tardir/$flocktar $flockurl/$flocktar; then
+ rm -f $tardir/$flocktar;
+ echo
+ echo "DOWNLOAD ERROR: Couldn't download the 'flock' tarball:"
+ echo " $flockurl"
+ echo
+ echo "You can manually place it in '$ddir' to avoid downloading."
+ exit 1
+ fi
+ fi
+fi
+
+# If the tarball is newer than the (possibly existing) program (the version
+# has changed), then delete the program.
+if [ -f .local/bin/flock ]; then
+ if [ $tardir/$flocktar -nt $ibidir/flock ]; then
+ rm $ibidir/flock
+ fi
+fi
+
+# Build `flock' if necessary.
+if ! [ -f $ibidir/flock ]; then
+ cd $tmpblddir
+ tar xf $tardir/$flocktar
+ cd flock-$flockversion
+ ./configure --prefix=$instdir
+ make
+ make install
+ cd $topdir
+ rm -rf $tmpblddir/flock-$flockversion
+ echo "Discoteq flock $flockversion" > $ibidir/flock
+fi
+
+
+
+
+
+# See if GCC can be built
+# -----------------------
+#
+# On some GNU/Linux distros, the C compiler is broken into `multilib' (for
+# 32-bit and 64-bit support, with their own headers). On these systems,
+# `/usr/include/sys/cdefs.h' and `/usr/lib/libc.a' are not available by
+# default. So GCC will crash with different ugly errors! The only solution
+# is that user manually installs the `multilib' part as root, before
+# running the configure script.
+#
+# Note that `sys/cdefs.h' may be available in other directories (for
+# example `/usr/include/x86_64-linux-gnu/') that are automatically included
+# in an installed GCC. HOWEVER during the build of GCC, all those other
+# directories are ignored. So even if they exist, they are useless.
+warningsleep=0
+if [ $host_cc = 0 ]; then
+ if ! [ -f /usr/include/sys/cdefs.h ]; then
+ host_cc=1
+ warningsleep=1
+ cat <<EOF
+
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+!!!!!!!!!!!!!!!!!!!!!! Warning !!!!!!!!!!!!!!!!!!!!!!
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+This system doesn't have '/usr/include/sys/cdefs.h'. Because of this, the
+project can't build its custom GCC to ensure better reproducibility. We
+strongly recommend installing the proper package (for your operating
+system) that installs this necessary file. For example on some Debian-based
+GNU/Linux distros, you need these two packages: 'gcc-multilib' and
+'g++-multilib'.
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+EOF
+ fi
+
+ if [ -f /usr/lib/libc.a ] || [ -f /usr/lib64/libc.a ]; then
+ # This is just a place holder
+ host_cc=$host_cc
+ else
+ host_cc=1
+ warningsleep=1
+ cat <<EOF
+
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+!!!!!!!!!!!!!!!!!!!!!! Warning !!!!!!!!!!!!!!!!!!!!!!
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+This system doesn't have '/usr/lib/libc.a' or '/usr/lib64/libc.a'. Because
+of this, the project can't build its custom GCC to ensure better
+reproducibility. We strongly recommend installing the proper package (for
+your operating system) that installs this necessary file.
+
+Some possible solutions:
+ 1. On some Debian-based GNU/Linux distros, these two packages may fix the
+ problem: 'gcc-multilib' and 'g++-multilib'.
+ 2. (BE CAREFUL!) If you have '/usr/lib/x86_64-linux-gnu' but don't have
+ '/usr/lib64', then running the following command might fix this
+ particular problem by making a symbolic link.
+ $ sudo ln -s /usr/lib/x86_64-linux-gnu /usr/lib64
+ After the configure script is finished, delete the link with 'rm
+ /usr/lib64' (you won't need it any more as far as this project is
+ concerned).
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+EOF
+ fi
+
+ if [ $warningsleep = 1 ]; then
+ cat <<EOF
+
+PLEASE SEE THE WARNINGS ABOVE.
+
+Since GCC is pretty low-level, this configuration script will continue in 5
+seconds and use your system's C compiler (it won't build a custom GCC). But
+please consider installing the necessary package(s) to complete your C
+compiler, then re-run './project configure'.
+
+EOF
+ sleep 5
+ fi
+fi
+
+
+
+
+
+# Fortran compiler
+# ----------------
+#
+# If GCC is ultimately build within the project, the user won't need to
+# have a fortran compiler, we'll build it internally for high-level
+# programs. However, when the host C compiler is to be used, the user needs
+# to have a Fortran compiler available.
+if [ $host_cc = 1 ]; then
+ hasfc=0;
+ if type gfortran > /dev/null 2>/dev/null; then hasfc=1; fi
+
+ if [ $hasfc = 0 ]; then
+ cat <<EOF
+______________________________________________________
+!!!!!!! Fortran Compiler NOT FOUND !!!!!!!
+
+Because the project won't be building its own GCC (which includes a Fortran
+compiler), you need to have a Fortran compiler available. Fortran is
+commonly necessary for many lower-level scientific programs. Currently we
+search for 'gfortran'. If you have a Fortran compiler that is not checked,
+please get in touch with us (with the form below) so we add it:
+
+ https://savannah.nongnu.org/support/?func=additem&group=reproduce
+
+Note: GCC will not be built because you are either using the '--host-cc'
+option, or you are using an operating system that currently has bugs when
+building GCC.
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+EOF
+ exit 1
+ fi
+fi
+
+
+
+
+
+# Number of threads for basic builds
+# ----------------------------------
+#
+# Since the system might not have GNU Make at this stage, and other Make
+# implementations can't deal with parallel build properly, we'll just
+# default to 1 thread. This is because some versions of Make complain about
+# not having enough 'pipe' (memory) on some systems. After some searching,
+# I found out its because of too many threads. GNU Make will be present on
+# GNU systems (that have `nproc', part of GNU Coreutils). So to simplify
+# the test for GNU Make, we'll just try running `nproc'.
+if which nproc &> /dev/null; then
+ if [ $jobs = 0 ]; then
+ numthreads=$(nproc --all);
+ else
+ numthreads=$jobs
+ fi
+else
+ numthreads=1;
+fi
+
+
+
+
+
+# Build basic software
+# --------------------
+#
+# When building these software we don't have our own un-packing software,
+# Bash, Make, or AWK. In this step, we'll install such low-level basic
+# tools, but we have to be very portable (and use minimal features in all).
+make -f reproduce/software/make/basic.mk \
+ rpath_command=$rpath_command \
+ static_build=$static_build \
+ needs_ldl=$needs_ldl \
+ on_mac_os=$on_mac_os \
+ numthreads=$numthreads \
+ host_cc=$host_cc \
+ -j$numthreads
+
+
+
+
+
+# All other software
+# ------------------
+#
+# We will be making all the dependencies before running the top-level
+# Makefile. To make the job easier, we'll do it in a Makefile, not a
+# script. Bash and Make were the tools we need to run Makefiles, so we had
+# to build them in this script. But after this, we can rely on Makefiles.
+if [ $jobs = 0 ]; then
+ numthreads=$($instdir/bin/nproc --all)
+else
+ numthreads=$jobs
+fi
+.local/bin/env -i HOME=$bdir \
+ .local/bin/make -f reproduce/software/make/high-level.mk \
+ rpath_command=$rpath_command \
+ static_build=$static_build \
+ numthreads=$numthreads \
+ on_mac_os=$on_mac_os \
+ host_cc=$host_cc \
+ -j$numthreads
+
+
+
+
+
+# Make sure TeX Live installed successfully
+# -----------------------------------------
+#
+# TeX Live is managed over the internet, so if there isn't any, or it
+# suddenly gets cut, it can't be built. However, when TeX Live isn't
+# installed, the project can do all its processing independent of it. It
+# will just stop at the stage when all the processing is complete and it is
+# only necessary to build the PDF. So we don't want to stop the project's
+# configuration and building if its not present.
+if [ -f $itidir/texlive-ready-tlmgr ]; then
+ texlive_result=$(cat $itidir/texlive-ready-tlmgr)
+else
+ texlive_result="NOT!"
+fi
+if [ x"$texlive_result" = x"NOT!" ]; then
+ cat <<EOF
+
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+!!!!!!!!!!!!!!!!!!!!!! Warning !!!!!!!!!!!!!!!!!!!!!!
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+TeX Live couldn't be installed during the configuration (probably because
+there were downloading problems). TeX Live is only necessary in making the
+final PDF (which is only done after all the analysis has been complete). It
+is not used at all during the analysis.
+
+Therefore, if you don't need the final PDF, and just want to do the
+analysis, you can safely ignore this warning and continue.
+
+If you later have internet access and would like to add TeX live to your
+project, please delete the respective files, then re-run configure as shown
+below. Within configure, answer 'n' (for "no") when asked to re-write the
+configuration files.
+
+ rm .local/version-info/tex/texlive-ready-tlmgr
+ ./project configure
+
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+EOF
+fi
+
+
+
+
+
+# Citation of installed software
+#
+# After everything is installed, we'll put all the names and versions in a
+# human-readable paragraph and also prepare the BibTeX citation for the
+# software.
+function prepare_name_version() {
+
+ # First see if the (possible) `*' in the input arguments corresponds to
+ # anything. Note that some of the given directories may be empty (no
+ # software installed).
+ hasfiles=0
+ for f in $@; do
+ if [ -f $f ]; then hasfiles=1; break; fi;
+ done
+
+ # If there are any files, merge all the names in a paragraph.
+ if [ $hasfiles = 1 ]; then
+
+ # Count how many names there are. This is necessary to identify the
+ # last element.
+ num=$(.local/bin/cat $@ \
+ | .local/bin/sed '/^\s*$/d' \
+ | .local/bin/wc -l)
+
+ # Put them all in one paragraph.
+ .local/bin/cat $@ \
+ | .local/bin/sort \
+ | sed 's/_/\\_/' \
+ | .local/bin/awk 'NF>0 { \
+ c++; \
+ if(c==1) \
+ { \
+ if('$num'==1) printf("%s", $0); \
+ else printf("%s", $0); \
+ } \
+ else if(c=='$num') printf(" and %s\n", $0); \
+ else printf(", %s", $0) \
+ }'
+ fi
+}
+
+# Report the different software in separate contexts (separating Python and
+# TeX packages from the C/C++ programs and libraries).
+proglibs=$(prepare_name_version $verdir/proglib/*)
+pymodules=$(prepare_name_version $verdir/python/*)
+texpkg=$(prepare_name_version $verdir/tex/texlive)
+
+# Write them as one paragraph for LaTeX.
+pkgver=$mtexdir/dependencies.tex
+.local/bin/echo "This research was done with the following free" > $pkgver
+.local/bin/echo "software programs and libraries: $proglibs." >> $pkgver
+if [ x"$pymodules" != x ]; then
+ .local/bin/echo "Within Python, the following modules" >> $pkgver
+ echo "were used: $pymodules." >> $pkgver
+fi
+.local/bin/echo "The \LaTeX{} source of the paper was compiled" >> $pkgver
+.local/bin/echo "to make the PDF using the following packages:" >> $pkgver
+.local/bin/echo "$texpkg. We are very grateful to all their" >> $pkgver
+.local/bin/echo "creators for freely providing this necessary" >> $pkgver
+.local/bin/echo "infrastructure. This research (and many " >> $pkgver
+.local/bin/echo "others) would not be possible without them." >> $pkgver
+
+# Prepare the BibTeX entries for the used software (if there are any).
+hasentry=0
+bibfiles="$ictdir/*"
+for f in $bibfiles; do if [ -f $f ]; then hasentry=1; break; fi; done;
+
+# Make sure we start with an empty output file.
+pkgbib=$mtexdir/dependencies-bib.tex
+echo "" > $pkgbib
+
+# Fill it in with all the BibTeX entries in this directory. We'll just
+# avoid writing any comments (usually copyright notices) and also put an
+# empty line after each file's contents to make the output more readable.
+if [ $hasentry = 1 ]; then
+ for f in $bibfiles; do
+ awk '!/^%/{print} END{print ""}' $f >> $pkgbib
+ done
+fi
+
+
+
+
+
+# Clean the temporary build directory
+# ---------------------------------
+#
+# By the time the script reaches here the temporary software build
+# directory should be empty, so just delete it. Note `tmpblddir' may be a
+# symbolic link to shared memory. So, to work in any scenario, first delete
+# the contents of the directory (if it has any), then delete `tmpblddir'.
+.local/bin/rm -rf $tmpblddir/* $tmpblddir
+
+
+
+
+
+# Register successful completion
+# ------------------------------
+echo `.local/bin/date` > $finaltarget
+
+
+
+
+
+
+# Final notice
+# ------------
+#
+# The configuration is now complete, we can inform the user on the next
+# step(s) to take.
+if [ x$reproducible_paper_group_name = x ]; then
+ buildcommand="./project make -j8"
+else
+ buildcommand="./project make --group=$reproducible_paper_group_name -j8"
+fi
+cat <<EOF
+
+----------------
+The project and its environment are configured with no errors.
+
+Please run the following command to start.
+(Replace '8' with the number of CPU threads)
+
+ $buildcommand
+
+To change the configuration later, please re-run './project configure', DO
+NOT manually edit the relevant files.
+
+EOF
diff --git a/reproduce/software/config/installation/LOCAL.mk.in b/reproduce/software/config/installation/LOCAL.mk.in
index 785bb6a..132d961 100644
--- a/reproduce/software/config/installation/LOCAL.mk.in
+++ b/reproduce/software/config/installation/LOCAL.mk.in
@@ -1,7 +1,7 @@
# Local project configuration.
#
-# This is just a template for the `./configure' script to fill in. Please
-# don't make any change to this file.
+# This is just a template for the `./project configure' script to fill
+# in. Please don't make any change to this file.
#
# Copyright (C) 2018-2019 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
diff --git a/reproduce/software/make/atlas-multiple.mk b/reproduce/software/make/atlas-multiple.mk
index fef25c7..f5efb35 100755
--- a/reproduce/software/make/atlas-multiple.mk
+++ b/reproduce/software/make/atlas-multiple.mk
@@ -3,8 +3,8 @@
# ------------------------------------------------------------------------
# !!!!! IMPORTANT NOTES !!!!!
#
-# This Makefile will be run during the initial `./configure' script. It is
-# not included into the reproduction pipe after that.
+# This Makefile will be run during the initial `./project configure'
+# script. It is not included into the reproduction pipe after that.
#
# ------------------------------------------------------------------------
#
diff --git a/reproduce/software/make/atlas-single.mk b/reproduce/software/make/atlas-single.mk
index dde2926..c4621ce 100755
--- a/reproduce/software/make/atlas-single.mk
+++ b/reproduce/software/make/atlas-single.mk
@@ -3,8 +3,8 @@
# ------------------------------------------------------------------------
# !!!!! IMPORTANT NOTES !!!!!
#
-# This Makefile will be run during the initial `./configure' script. It is
-# not included into the reproduction pipe after that.
+# This Makefile will be run during the initial `./project configure'
+# script. It is not included into the reproduction pipe after that.
#
# ------------------------------------------------------------------------
#
diff --git a/reproduce/software/make/basic.mk b/reproduce/software/make/basic.mk
index bab37ed..e4ab55a 100644
--- a/reproduce/software/make/basic.mk
+++ b/reproduce/software/make/basic.mk
@@ -4,8 +4,8 @@
# ------------------------------------------------------------------------
# !!!!! IMPORTANT NOTES !!!!!
#
-# This Makefile will be run by the initial `./configure' script. It is not
-# included into the project after that.
+# This Makefile will be run by the initial `./project configure' script. It
+# is not included into the project after that.
#
# This Makefile builds very low-level and basic tools like GNU Tar, GNU
# Bash, GNU Make, GCC and etc. Therefore this is the only Makefile in the
@@ -58,7 +58,11 @@ export PKG_CONFIG_LIBDIR := $(ildir)/pkgconfig
export CPPFLAGS := -I$(idir)/include $(CPPFLAGS)
export LD_LIBRARY_PATH := $(ildir):$(LD_LIBRARY_PATH)
export LDFLAGS := $(rpath_command) -L$(ildir) $(LDFLAGS)
-export DYLD_LIBRARY_PATH := $(ildir):$(DYLD_LIBRARY_PATH)
+
+# RPATH is automatically written in macOS, so `DYLD_LIBRARY_PATH' is
+# ultimately redundant. But on some systems, even having a single value
+# causes crashs (see bug #56682). So we'll just give it no value at all.
+export DYLD_LIBRARY_PATH :=
# Define the top-level basic programs (that don't depend on any other).
top-level-programs = low-level-links gcc
diff --git a/reproduce/software/make/high-level.mk b/reproduce/software/make/high-level.mk
index dee23ca..b9a70de 100644
--- a/reproduce/software/make/high-level.mk
+++ b/reproduce/software/make/high-level.mk
@@ -3,8 +3,8 @@
# ------------------------------------------------------------------------
# !!!!! IMPORTANT NOTES !!!!!
#
-# This Makefile will be run by the initial `./configure' script. It is not
-# included into the reproduction pipe after that.
+# This Makefile will be run by the initial `./project configure' script. It
+# is not included into the reproduction pipe after that.
#
# ------------------------------------------------------------------------
#
@@ -76,7 +76,11 @@ export LD_RUN_PATH := $(ildir):$(il64dir)
export PKG_CONFIG_PATH := $(ildir)/pkgconfig
export LD_LIBRARY_PATH := $(ildir):$(il64dir)
export PKG_CONFIG_LIBDIR := $(ildir)/pkgconfig
-export DYLD_LIBRARY_PATH := $(ildir):$(il64dir)
+
+# RPATH is automatically written in macOS, so `DYLD_LIBRARY_PATH' is
+# ultimately redundant. But on some systems, even having a single value
+# causes crashs (see bug #56682). So we'll just give it no value at all.
+export DYLD_LIBRARY_PATH :=
# Building flags:
#
@@ -90,6 +94,9 @@ export CXXFLAGS := -liconv
endif
+
+
+
# We want the download to happen on a single thread. So we need to define a
# lock, and call a special script we have written for this job. These are
# placed here because we want them both in the `high-level.mk' and
diff --git a/reproduce/software/make/python.mk b/reproduce/software/make/python.mk
index 22b668f..ed1c87d 100644
--- a/reproduce/software/make/python.mk
+++ b/reproduce/software/make/python.mk
@@ -3,8 +3,8 @@
# ------------------------------------------------------------------------
# !!!!! IMPORTANT NOTES !!!!!
#
-# This Makefile will be run by the initial `./configure' script. It is not
-# included into the reproduction pipe after that.
+# This Makefile will be run by the initial `./project configure' script. It
+# is not included into the reproduction pipe after that.
#
# ------------------------------------------------------------------------
#