aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMohammad Akhlaghi <mohammad@akhlaghi.org>2022-05-09 13:32:47 +0200
committerMohammad Akhlaghi <mohammad@akhlaghi.org>2022-05-09 23:52:29 +0200
commit9fdeebaacd06d57c479cd69e9937c4bfe5d0a286 (patch)
tree012e6194ad6e25a81a9c99b4d0bd0852bc9a12af
parent480184b3da399fab11b50e67f01d2efa6bea0e3e (diff)
parentf51b5e2e500dd6450a5a3425e85df78245fc5c5c (diff)
Imported recent updates in Maneage, conflicts fixed
Until now, Maneage had undergone some updates. With this commit, those updates have been imported and the conflicts that resulted were fixed. They were all cosmetic and had no effect on the analysis. The most significant one was about the change in the format of 'INPUTS.conf'. In the process, I also noticed that the IEEEtran LaTeX package is now called 'ieeetran' (the 'tlmgr' of TeXLive 2022 was failing).
-rw-r--r--.gitignore4
-rw-r--r--README-hacking.md134
-rw-r--r--README.md358
-rw-r--r--paper.tex12
-rwxr-xr-xproject60
-rwxr-xr-xreproduce/analysis/bash/download-multi-try14
-rw-r--r--reproduce/analysis/config/INPUTS.conf113
-rw-r--r--reproduce/analysis/config/metadata.conf2
-rw-r--r--reproduce/analysis/config/pdf-build.conf2
-rw-r--r--reproduce/analysis/config/verify-outputs.conf2
-rw-r--r--reproduce/analysis/make/demo-plot.mk28
-rw-r--r--reproduce/analysis/make/download.mk86
-rw-r--r--reproduce/analysis/make/format.mk44
-rw-r--r--reproduce/analysis/make/initialize.mk221
-rw-r--r--reproduce/analysis/make/paper.mk130
-rw-r--r--reproduce/analysis/make/prepare.mk47
-rw-r--r--reproduce/analysis/make/top-make.mk20
-rw-r--r--reproduce/analysis/make/top-prepare.mk20
-rw-r--r--reproduce/analysis/make/verify.mk42
-rw-r--r--reproduce/software/bibtex/astrometrynet.tex2
-rw-r--r--reproduce/software/bibtex/astropy.tex2
-rw-r--r--reproduce/software/bibtex/corner.tex2
-rw-r--r--reproduce/software/bibtex/cython.tex2
-rw-r--r--reproduce/software/bibtex/fftw.tex2
-rw-r--r--reproduce/software/bibtex/galsim.tex2
-rw-r--r--reproduce/software/bibtex/gnuastro.tex2
-rw-r--r--reproduce/software/bibtex/healpix.tex2
-rw-r--r--reproduce/software/bibtex/imfit.tex2
-rw-r--r--reproduce/software/bibtex/matplotlib.tex2
-rw-r--r--reproduce/software/bibtex/missfits.tex2
-rw-r--r--reproduce/software/bibtex/mpi4py.tex2
-rw-r--r--reproduce/software/bibtex/numpy.tex2
-rw-r--r--reproduce/software/bibtex/r-cran.tex19
-rw-r--r--reproduce/software/bibtex/scamp.tex2
-rw-r--r--reproduce/software/bibtex/scipy.tex2
-rw-r--r--reproduce/software/bibtex/sextractor.tex4
-rw-r--r--reproduce/software/bibtex/sip_tpv.tex2
-rw-r--r--reproduce/software/bibtex/swarp.tex2
-rw-r--r--reproduce/software/bibtex/sympy.tex2
-rw-r--r--reproduce/software/bibtex/tides.tex2
-rw-r--r--reproduce/software/config/LOCAL.conf.in4
-rw-r--r--reproduce/software/config/TARGETS.conf18
-rw-r--r--reproduce/software/config/checksums.conf316
-rw-r--r--reproduce/software/config/numpy-scipy.cfg6
-rw-r--r--reproduce/software/config/servers-backup.conf2
-rwxr-xr-xreproduce/software/config/software_acknowledge_context.sh4
-rw-r--r--reproduce/software/config/texlive-packages.conf10
-rw-r--r--reproduce/software/config/texlive.conf2
-rw-r--r--reproduce/software/config/urls.conf55
-rw-r--r--reproduce/software/config/versions.conf307
-rw-r--r--reproduce/software/make/README.md2
-rw-r--r--reproduce/software/make/atlas-multiple.mk4
-rw-r--r--reproduce/software/make/atlas-single.mk4
-rw-r--r--reproduce/software/make/basic.mk788
-rw-r--r--reproduce/software/make/build-rules.mk73
-rw-r--r--reproduce/software/make/high-level.mk881
-rw-r--r--reproduce/software/make/python.mk313
-rw-r--r--reproduce/software/make/r-cran.mk487
-rw-r--r--reproduce/software/make/xorg.mk33
-rw-r--r--reproduce/software/patches/README.md6
-rw-r--r--reproduce/software/patches/valgrind-3.15.0-mpi-fix1.patch37
-rw-r--r--reproduce/software/patches/valgrind-3.15.0-mpi-fix2.patch23
-rwxr-xr-xreproduce/software/shell/bashrc.sh6
-rwxr-xr-xreproduce/software/shell/configure.sh218
-rwxr-xr-xreproduce/software/shell/git-post-checkout10
-rwxr-xr-xreproduce/software/shell/git-pre-commit10
-rwxr-xr-xreproduce/software/shell/pre-make-build.sh30
-rwxr-xr-xreproduce/software/shell/run-parts.in4
-rwxr-xr-xreproduce/software/shell/tarball-prepare.sh181
-rw-r--r--tex/README.md2
-rw-r--r--tex/img/icon-collaboration.eps2
-rw-r--r--tex/img/icon-complete.eps2
-rw-r--r--tex/img/icon-processing.eps2
-rw-r--r--tex/src/IEEEtran_openaccess.bst2
-rw-r--r--tex/src/appendix-existing-solutions.tex6
-rw-r--r--tex/src/appendix-existing-tools.tex6
-rw-r--r--tex/src/appendix-necessity.tex4
-rw-r--r--tex/src/figure-project-outline.tex2
-rw-r--r--tex/src/figure-src-inputconf.tex7
-rw-r--r--tex/src/preamble-maneage.tex2
-rw-r--r--tex/src/preamble-pgfplots.tex19
-rw-r--r--tex/src/preamble-project.tex6
-rw-r--r--tex/src/references.tex2
-rw-r--r--tex/src/supplement.tex4
84 files changed, 3292 insertions, 2010 deletions
diff --git a/.gitignore b/.gitignore
index 9bce289..6c46b87 100644
--- a/.gitignore
+++ b/.gitignore
@@ -17,6 +17,7 @@
# without any warranty.
*~
+*\#
*.txt
*.aux
*.log
@@ -39,8 +40,9 @@ tex/tikz
.DS_Store
.texlive*
LOCAL.conf
+docker-run
tex/pipeline
LOCAL_tmp.mk
LOCAL_old.mk
gnuastro-local.conf
-.compiler_test_dir_please_delete \ No newline at end of file
+.compiler_test_dir_please_delete
diff --git a/README-hacking.md b/README-hacking.md
index e42bf42..b721d6d 100644
--- a/README-hacking.md
+++ b/README-hacking.md
@@ -1,8 +1,8 @@
Maneage: managing data lineage
==============================
-Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>\
-Copyright (C) 2020-2021 Raul Infante-Sainz <infantesainz@gmail.com>\
+Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>\
+Copyright (C) 2020-2022 Raul Infante-Sainz <infantesainz@gmail.com>\
See the end of the file for license conditions.
Maneage is a **fully working template** for doing reproducible research (or
@@ -180,29 +180,44 @@ evolving rapidly, so some details will differ between the different
versions. The more recent papers will tend to be the most useful as good
working examples.
- - Peper & Roukema ([2020](https://arxiv.org/abs/2010.03742),
- arXiv:2010.03742): The live version of the controlled source is [at
+ - Borkowska & Roukema
+ ([2022](https://ui.adsabs.harvard.edu/abs/2021arXiv211214174B), MNRAS
+ Submitted, arXiv:2112.14174): The live version of the controlled source
+ is [at Codeberg](https://codeberg.org/boud/gevcurvtest); the main input
+ dataset, a software snapshot, the software tarballs, the project outputs
+ and editing history are available at
+ [zenodo.5806027](https://doi.org/10.5281/zenodo.5806027); and the
+ archived git history is available at [swh:1:rev:54398b720ddbac269ede30bf1e27fe27f07567f7](https://archive.softwareheritage.org/browse/revision/54398b720ddbac269ede30bf1e27fe27f07567f7).
+
+ - Peper & Roukema
+ ([2021](https://ui.adsabs.harvard.edu/abs/2021MNRAS.505.1223P), MNRAS,
+ 505, 1223, DOI:10.1093/mnras/stab1342, arXiv:2010.03742): The live
+ version of the controlled source is [at
Codeberg](https://codeberg.org/boud/elaphrocentre); the main input
- dataset, a software snapshot, the software tarballs, the project
+ dataset, a software snapshot, the software tarballs, the project outputs
+ and editing history are available at
+ [zenodo.4699702](https://zenodo.org/record/4699702); and the archived
+ git history is available at
+ [swh:1:rev:a029edd32d5cd41dbdac145189d9b1a08421114e](https://archive.softwareheritage.org/swh:1:rev:a029edd32d5cd41dbdac145189d9b1a08421114e).
+
+ - Roukema ([2021](https://ui.adsabs.harvard.edu/abs/2021PeerJ...911856R),
+ PeerJ, 9:e11856, arXiv:2007.11779): The live version of the controlled
+ source is [at Codeberg](https://codeberg.org/boud/subpoisson); the main
+ input dataset, a software snapshot, the software tarballs, the project
outputs and editing history are available at
- [zenodo.4062461](https://zenodo.org/record/4062461); and the
- archived git history is available at
- [swh:1:dir:c4770e81288f340083dd8aa9fe017103c4eaf476](https://archive.softwareheritage.org/swh:1:dir:c4770e81288f340083dd8aa9fe017103c4eaf476).
-
- - Roukema ([2020](https://arxiv.org/abs/2007.11779),
- arXiv:2007.11779): The live version of the controlled source is [at
- Codeberg](https://codeberg.org/boud/subpoisson); the main input
- dataset, a software snapshot, the software tarballs, the project
- outputs and editing history are available at
- [zenodo.3951152](https://zenodo.org/record/3951152); and the
- archived git history is available at
- [swh:1:dir:fcc9d6b111e319e51af88502fe6b233dc78d5166](https://archive.softwareheritage.org/swh:1:dir:fcc9d6b111e319e51af88502fe6b233dc78d5166).
-
- - Akhlaghi et al. ([2020](https://arxiv.org/abs/2006.03018),
- arXiv:2006.03018): The project's version controlled source is [on
+ [zenodo.4765705](https://zenodo.org/record/4765705); and the archived
+ git history is available at
+ [swh:1:rev:72242ca8eade9659031ea00394a30e0cc5cc1c37](https://archive.softwareheritage.org/swh:1:rev:72242ca8eade9659031ea00394a30e0cc5cc1c37).
+
+ - Akhlaghi et
+ al. ([2021](https://ui.adsabs.harvard.edu/abs/2021CSE....23c..82A),
+ CiSE, 23(3), 82 DOI:10.1109/MCSE.2021.3072860 arXiv:2006.03018): The
+ project's version controlled source is [on
Gitlab](https://gitlab.com/makhlaghi/maneage-paper), necessary software,
- outputs and backup of history is available in
- [zenodo.3872248](https://doi.org/10.5281/zenodo.3872248).
+ outputs and backup of history are available at
+ [zenodo.3872248](https://doi.org/10.5281/zenodo.3872248); and the
+ archived git history is available at
+ [swh:1:dir:45a9e282a86145fe9babef529c8fce52ffe8d717](https://archive.softwareheritage.org/swh:1:dir:45a9e282a86145fe9babef529c8fce52ffe8d717).
- Infante-Sainz et
al. ([2020](https://ui.adsabs.harvard.edu/abs/2020MNRAS.491.5317I),
@@ -212,8 +227,8 @@ working examples.
[zenodo.3524937](https://zenodo.org/record/3524937).
- Akhlaghi ([2019](https://arxiv.org/abs/1909.11230), IAU Symposium
- 355). The version controlled project source is available [on
- GitLab](https://gitlab.com/makhlaghi/iau-symposium-355) and is also
+ 355). The version controlled project source is available
+ [on GitLab](https://gitlab.com/makhlaghi/iau-symposium-355) and is also
archived on Zenodo with all the necessary software tarballs:
[zenodo.3408481](https://doi.org/10.5281/zenodo.3408481).
@@ -553,7 +568,7 @@ First custom commit
the default `origin` remote server to specify that this is Maneage's
remote server. This will allow you to use the conventional `origin`
name for your own project as shown in the next steps. Second, you will
- create and go into the conventional `master` branch to start
+ create and go into the conventional `main` branch to start
committing in your project later.
```shell
@@ -561,7 +576,7 @@ First custom commit
$ mv project my-project # Change the name to your project's name.
$ cd my-project # Go into the cloned directory.
$ git remote rename origin origin-maneage # Rename current/only remote to "origin-maneage".
- $ git checkout -b master # Create and enter your own "master" branch.
+ $ git checkout -b main # Create and enter your own "main" branch.
$ pwd # Just to confirm where you are.
```
@@ -616,7 +631,7 @@ First custom commit
a new project which is bad in this scenario, and will not allow you to
push to it). It will give you a URL (usually starting with `git@` and
ending in `.git`), put this URL in place of `XXXXXXXXXX` in the first
- command below. With the second command, "push" your `master` branch to
+ command below. With the second command, "push" your `main` branch to
your `origin` remote, and (with the `--set-upstream` option) set them
to track/follow each other. However, the `maneage` branch is currently
tracking/following your `origin-maneage` remote (automatically set
@@ -627,7 +642,7 @@ First custom commit
```shell
git remote add origin XXXXXXXXXX # Newly created repo is now called 'origin'.
- git push --set-upstream origin master # Push 'master' branch to 'origin' (with tracking).
+ git push --set-upstream origin main # Push 'main' branch to 'origin' (with tracking).
git push origin maneage # Push 'maneage' branch to 'origin' (no tracking).
```
@@ -635,7 +650,7 @@ First custom commit
your name (with your possible coauthors) and tentative abstract in
`paper.tex`. You should see the relevant place in the preamble (prior
to `\begin{document}`. Just note that some core project metadata like
- the project tile are actually set in
+ the project title are actually set in
`reproduce/analysis/config/metadata.conf`. So set your project title
in there. After you are done, run the `./project make` command again
to see your changes in the final PDF and make sure that your changes
@@ -681,13 +696,14 @@ First custom commit
$ rm reproduce/analysis/config/delete-me*
```
- - Disable verification of outputs by removing the `yes` from
- `reproduce/analysis/config/verify-outputs.conf`. Later, when you are
- ready to submit your paper, or publish the dataset, activate
- verification and make the proper corrections in this file (described
- under the "Other basic customizations" section below). This is a
- critical step and only takes a few minutes when your project is
- finished. So DON'T FORGET to activate it in the end.
+ - `reproduce/analysis/config/verify-outputs.conf`: Disable
+ verification of outputs by changing the `yes` (the value of
+ `verify-outputs`) to `no`. Later, when you are ready to submit your
+ paper, or publish the dataset, activate verification and make the
+ proper corrections in this file (described under the "Other basic
+ customizations" section below). This is a critical step and only
+ takes a few minutes when your project is finished. So DON'T FORGET
+ to activate it in the end.
- Re-make the project (after a cleaning) to see if you haven't
introduced any errors.
@@ -699,7 +715,7 @@ First custom commit
7. **Ignore changes in some Maneage files**: One of the main advantages of
Maneage is that you can later update your infra-structure by merging
- your `master` branch with the `maneage` branch. This is good for many
+ your `main` branch with the `maneage` branch. This is good for many
low-level features that you will likely never modify yourself. But it
is not desired for some files like `paper.tex` (you don't want changes
in Maneage's default `paper.tex` to cause conflicts with all the text
@@ -743,12 +759,12 @@ First custom commit
add a copyright notice in your name under the existing one(s), like
the line with capital letters below. To start with, add this line with
your name and email address to `paper.tex`,
- `tex/src/preamble-header.tex`, `reproduce/analysis/make/top-make.mk`,
+ `tex/src/preamble-project.tex`, `reproduce/analysis/make/top-make.mk`,
and generally, all the files you modified in the previous step.
```
- Copyright (C) 2018-2021 Existing Name <existing@email.address>
- Copyright (C) 2021 YOUR NAME <YOUR@EMAIL.ADDRESS>
+ Copyright (C) 2018-2022 Existing Name <existing@email.address>
+ Copyright (C) 2022 YOUR NAME <YOUR@EMAIL.ADDRESS>
```
9. **Configure Git for fist time**: If this is the first time you are
@@ -766,7 +782,7 @@ First custom commit
```
10. **Your first commit**: You have already made some small and basic
- changes in the steps above and you are in your project's `master`
+ changes in the steps above and you are in your project's `main`
branch. So, you can officially make your first commit in your
project's history and push it. But before that, you need to make sure
that there are no problems in the project. This is a good habit to
@@ -823,24 +839,12 @@ Other basic customizations
Gnuastro, go through the analysis steps in `reproduce/analysis` and
remove all its use cases (clearly marked).
- - **Input dataset**: The input datasets are managed through the
- `reproduce/analysis/config/INPUTS.conf` file. It is best to gather all
- the information regarding all the input datasets into this one central
- file. To ensure that the proper dataset is being downloaded and used
- by the project, it is also recommended get an [MD5
- checksum](https://en.wikipedia.org/wiki/MD5) of the file and include
- that in `INPUTS.conf` so the project can check it automatically. The
- preparation/downloading of the input datasets is done in
- `reproduce/analysis/make/download.mk`. Have a look there to see how
- these values are to be used. This information about the input datasets
- is also used in the initial `configure` script (to inform the users),
- so also modify that file. You can find all occurrences of the demo
- dataset with the command below and replace it with your input's
- dataset.
-
- ```shell
- $ grep -ir wfpc2 ./*
- ```
+ - **Input datasets**: The input datasets are managed through the
+ `reproduce/analysis/config/INPUTS.conf` file. It is best to gather the
+ following information regarding all the input datasets into this one
+ central file: 1) the SHA256 checksum of the file, 2) the URL where the
+ file can be downloaded online. Please read the comments at the start
+ of `reproduce/analysis/config/INPUTS.conf` carefully.
- **`README.md`**: Correct all the `XXXXX` place holders (name of your
project, your own name, address of your project's online/remote
@@ -1518,12 +1522,12 @@ for the benefit of others.
# Have a look at the commits in the 'maneage' branch in relation
# with your project.
- $ git log --oneline --graph --decorate --all # General view of branches.
+ $ git log --oneline --graph --all # General view of branches.
- # Go to your 'master' branch and import all the updates into
- # 'master', don't worry about the printed outputs (in particular
+ # Go to your 'main' branch and import all the updates into
+ # 'main', don't worry about the printed outputs (in particular
# the 'CONFLICT's), we'll clean them up in the next step.
- $ git checkout master
+ $ git checkout main
$ git merge maneage
# Ignore conflicting Maneage files that you had previously deleted
@@ -1541,7 +1545,7 @@ for the benefit of others.
git status
# TIP: If you want the changes in one file to be only from a
- # special branch ('maneage' or 'master', completely ignoring
+ # special branch ('maneage' or 'main', completely ignoring
# changes in the other), use this command:
# $ git checkout <BRANCH-NAME> -- <FILENAME>
@@ -1564,7 +1568,7 @@ for the benefit of others.
./project make
# When everything is OK, before continuing with your project's
- # work, don't forget to push both your 'master' branch and your
+ # work, don't forget to push both your 'main' branch and your
# updated 'maneage' branch to your remote server.
git push
git push origin maneage
diff --git a/README.md b/README.md
index f361baa..7ef3e08 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
Reproducible source for Akhlaghi et al. (2021, arXiv:2006.03018)
----------------------------------------------------------------
-Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>\
+Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>\
See the end of the file for license conditions.
This is the reproducible project source for the paper titled "**Toward
@@ -195,6 +195,55 @@ finally create the final paper).
+### Building on ARM
+
+As of 2021-10-13, very little testing of Maneage has been done on arm64
+(tested in [aarch64](https://en.wikipedia.org/wiki/AArch64)). However,
+_some_ testing has been done on [the
+PinePhone](https://en.wikipedia.org/wiki/PinePhone), running
+[Debian/Mobian](https://wiki.mobian-project.org/doku.php?id=pinephone). In
+principle default Maneage branch (not all high-level software have been
+tested) should run fully (configure + make) from the raw source to the
+final verified pdf. Some issues that you might need to be aware of are
+listed below.
+
+#### Older packages
+
+In old packages that may be still needed and that have an old
+`config.guess` file (e.g. from 2002, such as fftw2-2.1.5-4.2, that are not
+in the base Maneage branch) may crash during the build. A workaround is to
+provide an updated (e.g. 2018) 'config.guess' file (automake --add-missing
+--force-missing --copy) in 'reproduce/software/patches/' and copy it over
+the old file during the build of the package.
+
+#### An un-killable running job
+
+Vampires may be a problem on the pinephone/aarch64. A "vampire" is defined
+here as a job that is in the "R" (running) state, using nearly 95-100% of a
+cpu, for an extremely long time (hours), without producing any output to
+its log file, and is immune to being killed by the user or root with 'kill
+-9'. A reboot and relaunch of the './project configure --existing-conf'
+command is the only solution currently known (as of 2021-10-13) for
+vampires. These are known to have occurred with linux-image-5.13-sunxi64.
+
+
+#### RAM/swap space
+
+Adding atleast 3 Gb of swap space (man swapon, man mkswap, man dd) on the
+eMMC may help to reduce the chance of having errors due to the lack of RAM.
+
+
+#### Time scale
+
+On the PinePhone v1.2b, apart from the time wasted by vampires, expect
+roughly 24 hours' wall time in total for the full 'configure' phase. The
+default 'maneage' example calculations, diagrams and pdf production are
+light and should be very fast.
+
+
+
+
+
### Building in Docker containers
Docker containers are a common way to build projects in an independent
@@ -258,8 +307,9 @@ MB), not the full TeXLive collection!
```shell
FROM debian:stable-slim
- RUN apt-get update && apt-get install -y gcc g++ wget
+ RUN apt update && apt install -y gcc g++ wget
RUN useradd -ms /bin/sh maneager
+ RUN printf '123\n123' | passwd root
USER maneager
WORKDIR /home/maneager
RUN mkdir build
@@ -292,22 +342,22 @@ MB), not the full TeXLive collection!
```shell
# C and C++ compiler.
- RUN apt-get update && apt-get install -y gcc g++
+ RUN apt update && apt install -y gcc g++
# Uncomment this if you don't have 'software-XXXX.tar.gz' (below).
- #RUN apt-get install -y wget
+ #RUN apt install -y wget
```
3. **Define a user:** Some core software packages will complain if you try
to install them as the default (root) user. Generally, it is also good
- practice to avoid being the root user. After building the Docker image,
- you can always run it as root with this command: `docker run -u 0 -it
- XXXXXXX` (where `XXXXXXX` is the image identifier). Hence with the
- commands below we define a `maneager` user and activate it for the next
- steps.
+ practice to avoid being the root user. Hence with the commands below we
+ define a `maneager` user and activate it for the next steps. But just
+ in case root access is necessary temporarily, with the `passwd`
+ command, we are setting the root password to `123`.
```shell
RUN useradd -ms /bin/sh maneager
+ RUN printf '123\n123' | passwd root
USER maneager
WORKDIR /home/maneager
```
@@ -466,6 +516,8 @@ docker cp CONTAINER:/file/path/within/container /host/path/target
+
+
#### Only software environment in the Docker image
You can set the docker image to only contain the software environment and
@@ -475,96 +527,234 @@ image to a minimum (only containing the built software environment) to
easily move it from one computer to another. Below we'll summarize the
steps.
-1. Get your user ID with this command: `id -u`.
+ 1. Get your user ID with this command: `id -u`.
-2. Put the following lines into a `Dockerfile` of an otherwise empty
-directory. Just replacing `UID` with your user ID (found in the step
-above). This will build the basic directory structure. for the next steps.
+ 2. Make a new (empty) directory called `docker` temporarily (will be
+ deleted later).
-```shell
-FROM debian:stable-slim
-RUN apt-get update && apt-get install -y gcc g++ wget
-RUN useradd -ms /bin/sh --uid UID maneager
-USER maneager
-WORKDIR /home/maneager
-RUN mkdir build
-```
+ ```shell
+ mkdir docker-tmp
+ cd docker-tmp
+ ```
-3. Create an image based on the `Dockerfile` above. Just replace `PROJECT`
-with your desired name.
+ 3. Make a `Dockerfile` (within the new/empty directory) with the
+ following contents. Just replace `UID` with your user ID (found in
+ step 1 above). Note that we are manually setting the `maneager` (user)
+ password to `123` and the root password to '456' (both should be
+ repeated because they must be confirmed by `passwd`).
-```shell
-docker build -t PROJECT ./
-```
+ ```
+ FROM debian:stable-slim
+ RUN useradd -ms /bin/sh --uid UID maneager; \
+ printf '123\n123' | passwd maneager; \
+ printf '456\n456' | passwd root
+ USER maneager
+ WORKDIR /home/maneager
+ RUN mkdir build; mkdir build/analysis
+ ```
-4. Run the command below to create a container based on the image and mount
-the desired directories on your host into the special directories of your
-container. Just don't forget to replace `PROJECT` and set the `/PATH`s to
-the respective paths in your host operating system.
+ 4. Create a Docker image based on the `Dockerfile` above. Just replace
+ `MANEAGEBASE` with your desired name (this won't be your final image,
+ so you can safely use a name like `maneage-base`). Note that you need
+ to have root/administrator previlages when running it, so
-```shell
-docker run -v /PATH/TO/PROJECT/SOURCE:/home/maneager/source \
- -v /PATH/TO/PROJECT/ANALYSIS/OUTPUTS:/home/maneager/build/analysis \
- -v /PATH/TO/SOFTWARE/SOURCE/CODE/DIR:/home/maneager/software \
- -v /PATH/TO/RAW/INPUT/DATA:/home/maneager/data \
- -it PROJECT
-```
+ ```shell
+ sudo docker build -t MANEAGEBASE ./
+ ```
-5. After running the command above, you are within the container. Go into
-the project source directory and run these commands to build the software
-environment.
+ 5. You don't need the temporary directory any more (the docker image is
+ saved in Docker's own location, and accessible from anywhere).
-```shell
-cd /home/maneager/source
-./project configure --build-dir=/home/maneager/build \
- --software-dir=/home/maneager/software \
- --input-dir=/home/maneager/data
-```
+ ```shell
+ cd ..
+ rm -rf docker-tmp
+ ```
-6. After the configuration finishes successfully, it will say so and ask
-you to run `./project make`. But don't do that yet. Keep this Docker
-container open and don't exit the container or terminal. Open a new
-terminal, and follow the steps described in the sub-section above to
-preserve the built container as a Docker image. Let's assume you call it
-`PROJECT-ENV`. After the new image is made, you should be able to see the
-new image in the list of images with this command (in the same terminal
-that you created the image):
+ 6. Put the following contents into a newly created plain-text file called
+ `docker-run`, while setting the mandatory variables based on your
+ system. The name `docker-run` is already inside Maneage's `.gitignore`
+ file, so you don't have to worry about mistakenly commiting this file
+ (which contains private information: directories in this computer).
-```shell
-docker image list # In the other terminal.
-```
+ ```
+ #!/bin/sh
+ #
+ # Create a Docker container from an existing image of the built
+ # software environment, but with the source, data and build (analysis)
+ # directories directly within the host file system. This script should
+ # be run in the top project source directory (that has 'README.md' and
+ # 'paper.tex'). If not, replace the '$(pwd)' part with the project
+ # source directory.
+
+ # MANDATORY: Name of Docker container
+ docker_name=MANEAGEBASE
+
+ # MANDATORY: Location of "build" directory on this system (to host the
+ # 'analysis' sub-directory for output data products and possibly others).
+ build_dir=/PATH/TO/THIS/PROJECT/S/BUILD/DIR
+
+ # OPTIONAL: Location of project's input data in this system. If not
+ # present, a 'data' directory under the build directory will be created.
+ data_dir=/PATH/TO/THIS/PROJECT/S/DATA/DIR
+
+ # OPTIONAL: Location of software tarballs to use in building Maneage's
+ # internal software environment.
+ software_dir=/PATH/TO/SOFTWARE/TARBALL/DIR
+
+
+
+
+
+ # Internal proceessing
+ # --------------------
+ #
+ # Sanity check: Make sure that the build directory actually exists.
+ if ! [ -d $build_dir ]; then
+ echo "ERROR: '$build_dir' doesn't exist"; exit 1;
+ fi
+
+ # If the host operating system has '/dev/shm', then give Docker access
+ # to it also for improved speed in some scenarios (like configuration).
+ if [ -d /dev/shm ]; then shmopt="-v /dev/shm:/dev/shm";
+ else shmopt=""; fi
+
+ # If the 'analysis' and 'data' directories (that are mounted), don't exist,
+ # then create them (otherwise Docker will create them as 'root' before
+ # creating the container, and we won't have permission to write in them.
+ analysis_dir="$build_dir"/analysis
+ if ! [ -d $analysis_dir ]; then mkdir $analysis_dir; fi
+
+ # If the data or software directories don't exist, put them in the build
+ # directory (they will remain empty, but this helps in simplifiying the
+ # mounting command!).
+ if ! [ -d $data_dir ]; then
+ data_dir="$build_dir"/data
+ if ! [ -d $data_dir ]; then mkdir $data_dir; fi
+ fi
+ if ! [ -d $software_dir ]; then
+ software_dir="$build_dir"/tarballs-software
+ if ! [ -d $software_dir ]; then mkdir $software_dir; fi
+ fi
+
+ # Run the Docker image while setting up the directories.
+ sudo docker run -v "$software_dir":/home/maneager/tarballs-software \
+ -v "$analysis_dir":/home/maneager/build/analysis \
+ -v "$data_dir":/home/maneager/data \
+ -v "$(pwd)":/home/maneager/source \
+ $shmopt -it $docker_name
+ ```
-7. Now you can run `./project make` in the initial container. You will see
-that all the built products (temporary or final datasets or PDFs), will be
-written in the `/PATH/TO/PROJECT/ANALYSIS/OUTPUTS` directory of your
-host. You can even change the source of your project on your host operating
-system an re-run Make to see the effect on the outputs and add/commit the
-changes to your Git history within your host. You can also exit the
-container any time. You can later load the `PROJECT-ENV` environment image
-into a new container with the same `docker run -v ...` command above, just
-use `PROJECT-ENV` instead of `PROJECT`.
+ 7. Make the `docker-run` script executable.
-8. In case you want to store the image as a single file as backup or to
-move to another computer, you can run the commands below. They will produce
-a single `project-env.tar.gz` file.
+ ```shell
+ chmod +x docker-run
+ ```
-```shell
-docker save -o project-env.tar PROJECT-ENV
-gzip --best project-env.tar
-```
+ 8. You can now start the Docker image by executing your newly added
+ script like below (it will ask for your root password). You will
+ notice that you are in the Docker container with the changed prompt.
+
+ ```shell
+ ./docker-run
+ ```
+
+ 9. You are now within the container. First, we'll add the GNU C and C++
+ compilers (which are necessary to build our own programs in Maneage)
+ and the GNU WGet downloader (which may be necessary if you don't have
+ a core software's tarball already). Maneage will build pre-defined
+ versions of both and will use them. But for the very first packages,
+ they are necessary. In the process, by setting the `PS1` environment
+ variable, we'll define a color-coding for the interactive shell prompt
+ (red for root and purple for the user).
+
+ ```shell
+ su
+ echo 'export PS1="[\[\033[01;31m\]\u@\h \W\[\033[32m\]\[\033[00m\]]# "' >> ~/.bashrc
+ source ~/.bashrc
+ apt update
+ apt install -y gcc g++ wget
+ exit
+ echo 'export PS1="[\[\033[01;35m\]\u@\h \W\[\033[32m\]\[\033[00m\]]$ "' >> ~/.bashrc
+ source ~/.bashrc
+ ```
+
+ 10. Now that the compiler is ready, we can start Maneage's
+ configuration. So let's go into the project source directory and run
+ these commands to build the software environment.
+
+ ```shell
+ cd source
+ ./project configure --input-dir=/home/maneager/data \
+ --build-dir=/home/maneager/build \
+ --software-dir=/home/maneager/tarballs-software
+ ```
+
+ 11. After the configuration finishes successfully, it will say so. It will
+ then ask you to run `./project make`. **But don't do that
+ yet**. Keep this Docker container open and don't exit the container or
+ terminal. Open a new terminal, and follow the steps described in the
+ sub-section above to preserve (or "commit") the built container as a
+ Docker image. Let's assume you call it `MY-PROJECT-ENV`. After the new
+ image is made, you should be able to see the new image in the list of
+ images with this command (in yet another terminal):
+
+ ```shell
+ docker image list # In the other terminal.
+ ```
+
+ 12. Now that you have safely "committed" your current Docker container
+ into a separate Docker image, you can **exit the container** safely
+ with the `exit` command. Don't worry, you won't loose the built
+ software environment: it is all now saved separately within the Docker
+ image.
+
+ 13. Re-open your `docker-run` script and change `MANEAGEBASE` to
+ `MY-PROJECT-ENV` (or any other name you set for the environment you
+ committed above).
+
+ ```shell
+ emacs docker-run
+ ```
+
+ 14. That is it! You can now always easily enter your container (only for
+ the software environemnt) with the command below. Within the
+ container, any file you save/edit in the `source` directory of the
+ docker container is the same file on your host OS and any file you
+ build in your `build/analysis` directory (within the Maneage'd
+ project) will be on your host OS. You can even use your container's
+ Git to store the history of your project in your host OS. See the next
+ step in case you want to move your built software environment to
+ another computer.
+
+ ```shell
+ ./docker-run
+ ```
+
+ 15. In case you want to store the image as a single file as backup or to
+ move to another computer, you can run the commands below. They will
+ produce a single `project-env.tar.gz` file.
+
+ ```shell
+ docker save -o my-project-env.tar MY-PROJECT-ENV
+ gzip --best project-env.tar
+ ```
+
+ 16. To load the tarball above into a clean docker environment (for example
+ on another system) copy the `my-project-env.tar.gz` file there and run
+ the command below. You can then create the `docker-run` script for
+ that system and run it to enter. Just don't forget that if your
+ `analysis_dir` directory is empty on the new/clean system. So you
+ should first run the same `./project configure ...` command above in
+ the docker image so it connects the environment to your source. Don't
+ worry, it won't build any software and should finish in a second or
+ two. Afterwards, you can safely run `./project make` and continue
+ working like you did on the old system.
+
+ ```shell
+ docker load --input my-project-env.tar.gz
+ ```
-9. To load the tarball above into a clean docker environment (either on the
-same system or in another system), and create a new container from the
-image like above (the `docker run -v ...` command). Just don't forget that
-if your `/PATH/TO/PROJECT/ANALYSIS/OUTPUTS` directory is empty on the
-new/clean system, you should first run `./project configure -e` in the
-docker image so it builds the core file structure there. Don't worry, it
-won't build any software and should finish in a second or two. Afterwards,
-you can safely run `./project make`.
-```shell
-docker load --input project-env.tar.gz
-```
diff --git a/paper.tex b/paper.tex
index 69b200f..f17f083 100644
--- a/paper.tex
+++ b/paper.tex
@@ -1,11 +1,11 @@
%% Main LaTeX source of project's paper.
%
-%% Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
-%% Copyright (C) 2020-2021 Raúl Infante-Sainz <infantesainz@gmail.com>
-%% Copyright (C) 2020-2021 Boudewijn F. Roukema <boud@astro.uni.torun.pl>
-%% Copyright (C) 2020-2021 Mohammadreza Khellat <mkhellat@ideal-information.com>
-%% Copyright (C) 2020-2021 David Valls-Gabaud <david.valls-gabaud@obspm.fr>
-%% Copyright (C) 2020-2021 Roberto Baena-Gallé <roberto.baena@unir.net>
+%% Copyright (C) 2020-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2020-2022 Raúl Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2020-2022 Boudewijn F. Roukema <boud@astro.uni.torun.pl>
+%% Copyright (C) 2020-2022 Mohammadreza Khellat <mkhellat@ideal-information.com>
+%% Copyright (C) 2020-2022 David Valls-Gabaud <david.valls-gabaud@obspm.fr>
+%% Copyright (C) 2020-2022 Roberto Baena-Gallé <roberto.baena@unir.net>
%
%% This file is free software: you can redistribute it and/or modify it
%% under the terms of the GNU General Public License as published by the
diff --git a/project b/project
index a99e0a6..8db213a 100755
--- a/project
+++ b/project
@@ -1,10 +1,10 @@
#!/bin/sh
#
# High-level script to manage the project.
-# Run `./project --help' for a description of how to use it.
+# Run './project --help' for a description of how to use it.
#
-# Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
-# Copyright (C) 2021 Raul Infante-Sainz <infantesainz@gmail.com>
+# Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2021-2022 Raul Infante-Sainz <infantesainz@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -30,7 +30,7 @@ set -e
# Default option values
-jobs=0 # 0 is for the default for the `configure.sh' script.
+jobs=0 # 0 is for the default for the 'configure.sh' script.
group=
debug=
host_cc=0
@@ -60,9 +60,9 @@ scriptname="./project"
# into the respective variable.
#
# Each option has two lines because we want to process both these formats:
-# `--name=value' and `--name value'. The former (with `=') is a single
+# '--name=value' and '--name value'. The former (with '=') is a single
# command-line argument, so we just need to shift the counter by one. The
-# latter (without `=') is two arguments, so we'll need two shifts.
+# latter (without '=') is two arguments, so we'll need two shifts.
#
# Note on the case strings: for every option, we need three lines: one when
# the option name and value are separate. Another when there is an equal
@@ -127,6 +127,7 @@ Make (analysis) options:
-p, --prepare-redo Re-do preparation (only done automatically once).
Make (final PDF) options:
+ --refresh-bib Force refresh the bibliography.
--highlight-new Highlight '\new' parts of text as green.
--highlight-notes Show '\tonote' regions as red text in PDF.
--supplement Build the appendices as a separate supplement PDF.
@@ -174,7 +175,6 @@ do
make) func_operation_set $1; shift;;
shell) func_operation_set $1; shift;;
-
# Configure options:
-b|--build-dir) build_dir="$2"; check_v "$1" "$build_dir"; shift;shift;;
-b=*|--build-dir=*) build_dir="${1#*=}"; check_v "$1" "$build_dir"; shift;;
@@ -213,12 +213,23 @@ do
# value, it is like giving it a value of `a'):
--supplement) separatesupplement=1; shift;;
--supplement=*) on_off_option_error --supplement;;
+
+ # Note that Make's 'debug' can take values, but when called without any
+ # value, it is like giving it a value of 'a'):
+ --refresh-bib) [ -f tex/src/references.tex ] && touch tex/src/references.tex; shift;;
--highlight-new) highlightnew=1; shift;;
--highlight-new=*) on_off_option_error --highlight-new;;
--highlight-notes) highlightnotes=1; shift;;
--highlight-notes=*) on_off_option_error --highlight-notes;;
- -d|--debug) if [ x"$2" = x ]; then debug=a; shift;
- else debug="$2"; check_v debug "$debug"; shift;shift; fi;;
+ -d|--debug) if [ x$operation = x ]; then
+ echo "Please set the operation before calling '--debug'"; exit 1
+ elif [ x$operation = xconfigure ]; then debug=a; shift;
+ elif [ x$operation = xmake ]; then
+ if [ x"$2" = x ]; then echo "In make-mode, '--debug' needs a value"; exit 1
+ else debug="$2"; check_v debug "$debug"; shift;shift; fi
+ else
+ echo "Operation '$operation' not recognized, please use 'configure' or 'make'"
+ fi;;
-d=*|--debug=*) debug="${1#*=}"; check_v debug "$debug"; shift;;
-d*) debug=$(echo "$1" | sed -e's/-d//'); check_v debug "$debug"; shift;;
-p|--prepare-redo) prepare_redo=1; shift;;
@@ -239,8 +250,8 @@ done
# Check configuration status
# --------------------------
if ! [ x$check_config = x ]; then
- # Find the color option to pass to `ls'. Note that `--color' (for GNU
- # Coreutils `ls') should be checked first because it also has `-G', but
+ # Find the color option to pass to 'ls'. Note that '--color' (for GNU
+ # Coreutils 'ls') should be checked first because it also has '-G', but
# for something else.
if ls --color 2> /dev/null > /dev/null; then coloropt="--color=auto"
elif ls -G 2> /dev/null > /dev/null; then coloropt="-G"
@@ -286,7 +297,7 @@ EOF
check=$(ls .local/version-info/python/)
if ! [ "x$check" = x ]; then
printresults=1
- ln -s .local/version-info/python/* $checkdir/
+ ln -s "$(pwd)"/.local/version-info/python/* $checkdir/
fi
check=$(ls .local/version-info/proglib/)
if ! [ "x$check" = x ]; then
@@ -300,7 +311,7 @@ EOF
# Then sort all the links based on the most recent dates of the
# files they link to (with '-L').
- ls -Llt $checkdir \
+ ls -Llt $checkdir \
| awk '/^-/ && c++<5 {printf "[at %s] %s\n", $(NF-1), $NF}'
fi
else
@@ -376,10 +387,11 @@ EOF
# Run operations in controlled environment
# ----------------------------------------
controlled_env() {
+
# Get the full address of the build directory:
bdir=`.local/bin/realpath .build`
- # Remove all existing environment variables (with `env -i') and only
+ # Remove all existing environment variables (with 'env -i') and only
# use some pre-defined environment variables, then build the project.
envmake=".local/bin/env -i HOME=$bdir sys_rm=$(which rm) $gopt"
envmake="$envmake separatesupplement=$separatesupplement "
@@ -388,7 +400,7 @@ controlled_env() {
envmake="$envmake --no-builtin-rules --no-builtin-variables -f $1"
if ! [ x"$debug" = x ]; then envmake="$envmake --debug=$debug"; fi
- # Set the number of jobs. Note that for the `configure.sh' script the
+ # Set the number of jobs. Note that for the 'configure.sh' script the
# default value has to be 0, so the default is the maximum number of
# threads. But here, the default value is 1.
if ! [ x"$jobs" = x0 ]; then envmake="$envmake -j$jobs"; fi
@@ -420,7 +432,7 @@ case $operation in
#
# In some scenarios (for example when using a tarball from arXiv),
# it may happen that the host server has removed the executable
- # flags of all the files. In `README.md' we instruct the readers on
+ # flags of all the files. In 'README.md' we instruct the readers on
# setting the executable flag of this script. But we don't want the
# user to have to worry about any other file that needs an
# executable flag.
@@ -461,7 +473,7 @@ case $operation in
# Set the group writing permission for everything in the
# installed software directory. The common build process sets
# the writing permissions of the installed programs/libraries
- # to `755'. So group members can't write over a file. This
+ # to '755'. So group members can't write over a file. This
# creates problems when another group member wants to update
# the software for example. We thus need to manually add the
# group writing flag to all installed software files.
@@ -478,17 +490,17 @@ case $operation in
make)
# Make sure the configure script has been completed properly
- # (`configuration-done.txt' exists).
+ # ('configuration-done.txt' exists).
if ! [ -f .build/software/configuration-done.txt ]; then
configuration_necessary
fi
# Run data preparation phase (optionally build Makefiles with
- # special values for optimizing the main `top-make.mk'). But note
+ # special values for optimizing the main 'top-make.mk'). But note
# that data preparation is only done automatically the first time
- # the project is built (when `.build/software/preparation-done.mk'
+ # the project is built (when '.build/software/preparation-done.mk'
# doesn't yet exist). After that, if the user wants to re-do the
- # preparation they have to use the `--prepare-redo' option.
+ # preparation they have to use the '--prepare-redo' option.
if ! [ -f .build/software/preparation-done.mk ] \
|| [ x"$prepare_redo" = x1 ]; then
controlled_env reproduce/analysis/make/top-prepare.mk
@@ -518,7 +530,7 @@ case $operation in
shell)
# Make sure the configure script has been completed properly
- # (`configuration-done.txt' exists).
+ # ('configuration-done.txt' exists).
if ! [ -f .build/software/configuration-done.txt ]; then
configuration_necessary
fi
@@ -540,7 +552,7 @@ case $operation in
OMPI_MCA_plm_rsh_agent=/bin/false \
PYTHONPATH="$instdir"/lib/python/site-packages \
PYTHONPATH3="$instdir"/lib/python/site-packages \
- PS1="[\[\033[32m\](maneage)\[\033[00m\] \u@\h \W]$ " \
+ PS1="[\[\033[01;35m\]maneage@\h \W\[\033[32m\]\[\033[00m\]]$ " \
"$instdir"/bin/bash
;;
@@ -556,5 +568,5 @@ Please run with '--help' for more information.
EOF
exit 1
- ;;
+ ;;
esac
diff --git a/reproduce/analysis/bash/download-multi-try b/reproduce/analysis/bash/download-multi-try
index 76eb859..994a8fa 100755
--- a/reproduce/analysis/bash/download-multi-try
+++ b/reproduce/analysis/bash/download-multi-try
@@ -6,7 +6,7 @@
#
# $ /path/to/download-multi-try downloader lockfile input-url downloaded-name
#
-# NOTE: The `downloader' must contain the option to specify the output name
+# NOTE: The 'downloader' must contain the option to specify the output name
# in its end. For example "wget -O". Any other option can also be placed in
# the middle.
#
@@ -20,13 +20,13 @@
# world, downloading is done much faster in serial, not in parallel. But
# the project's processing may be done in parallel (with multiple threads
# needing to download different files at the same time). Therefore, this
-# script uses the `flock' program to only do one download at a time. To
+# script uses the 'flock' program to only do one download at a time. To
# benefit from it, any call to this script must be given the same lock
# file. If your system has multiple ports to the internet, or for any
-# reason, you don't want to use a lock file, set the `lockfile' name to
-# `nolock'.
+# reason, you don't want to use a lock file, set the 'lockfile' name to
+# 'nolock'.
#
-# Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -114,9 +114,9 @@ while [ ! -f "$outname" ]; do
sleep $tstep
fi
- # Attempt downloading the file. Note that the `downloader' ends with
+ # Attempt downloading the file. Note that the 'downloader' ends with
# the respective option to specify the output name. For example "wget
- # -O" (so `outname', that comes after it) will be the name of the
+ # -O" (so 'outname', that comes after it) will be the name of the
# downloaded file.
if [ x"$lockfile" = xnolock ]; then
if ! $downloader $outname $inurl; then rm -f $outname; fi
diff --git a/reproduce/analysis/config/INPUTS.conf b/reproduce/analysis/config/INPUTS.conf
index fd8ac53..f3d1cd4 100644
--- a/reproduce/analysis/config/INPUTS.conf
+++ b/reproduce/analysis/config/INPUTS.conf
@@ -1,42 +1,70 @@
-# Input files necessary for this project, the variables defined in this
-# file are primarily used in 'reproduce/analysis/make/download.mk'. See
-# there for precise usage of the variables. But comments are also provided
-# here.
-#
-# Necessary variables for each input dataset are listed below. Its good
-# that all the variables of each file have the same base-name (in the
-# example below 'DEMO') with descriptive suffixes, also put a short comment
-# above each group of variables for each dataset, shortly explaining what
-# it is.
-#
-# 1) Local file name ('DEMO-DATA' below): this is the name of the dataset
-# on the local system (in 'INDIR', given at configuration time). It is
-# recommended that it be the same name as the online version of the
-# file like the case here (note how this variable is used in 'DEMO-URL'
-# for the dataset's full URL). However, this is not always possible, so
-# the local and server filenames may be different. Ultimately, the file
-# name is irrelevant, we check the integrity with the checksum.
-#
-# 2) The MD5 checksum of the file ('DEMO-MD5' below): this is very
-# important for an automatic verification of the file. You can
-# calculate it by running 'md5sum' on your desired file. You can also
-# use any other checksum tool that you prefer, just be sure to correct
-# the respective command in 'reproduce/analysis/make/download.mk'.
-#
-# 3) The human-readable size of the file ('DEMO-SIZE' below): this is an
-# optional variable, mainly to help a reader of your project get a
-# sense of the volume they need to download if they don't already have
-# the dataset. So it is highly recommended to add it (future readers of
-# your project's source will appreciate it!). You can get it from the
-# output of 'ls -lh' command on the file. Optionally you can use it in
-# messages during the configuration phase (when Maneage asks for the
-# input data directory), along with other info about the file(s).
-#
-# 4) The full dataset URL ('DEMO-URL' below): this is the full URL
-# (including the file-name) that can be used to download the dataset
-# when necessary. Also, see the description above on local filename.
-#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# This project's input file information (metadata).
+#
+# For each input (external) data file that is used within the project,
+# three variables are suggested here (two of them are mandatory). These
+# variables will be used by 'reproduce/analysis/make/download.mk' to import
+# the dataset into the project (within the build directory):
+#
+# - If the file already exists locally in '$(INDIR)' (the optional input
+# directory that may have been specified at configuration time with
+# '--input-dir'), a symbolic link will be added in '$(indir)' (in the
+# build directory). A symbolic link is used to avoid extra storage when
+# files are large.
+#
+# - If the file doesn't exist in '$(INDIR)', or no input directory was
+# specified at configuration time, then the file is downloaded from a
+# specific URL.
+#
+# In both cases, before placing the file (or its link) in the build
+# directory, 'reproduce/analysis/make/download.mk' will check the SHA256
+# checksum of the dataset and if it differs from the pre-defined value (set
+# for that file, here), it will abort (since this is not the intended
+# dataset).
+#
+# Therefore, the two variables specifying the URL and SHA256 checksum of
+# the file are MANDATORY. The third variable (INPUT-%-size) showing the
+# human-readable size of the file (from 'ls -lh') is optional (but
+# recommended: because it gives future scientists to get a feeling of the
+# volume of data they need to input: will become important if the
+# size/number of files is large).
+#
+# The naming convension is critical for the input files to be properly
+# imported into the project. In the patterns below, the '%' is the full
+# file name (including its prefix): for example in the demo input of this
+# file in the 'maneage' branch, we have 'INPUT-wfpc2.fits-sha256':
+# therefore, the input file (within the project's '$(indir)') is called
+# 'wfpc2.fits'. This allows you to simply set '$(indir)/wfpc2.fits' as the
+# pre-requisite of any recipe that needs the input file: you will rarely
+# (if at all!) need to use these variables directly.
+#
+# INPUT-%-sha256: The sha256 checksum of the file. You can generate the
+# SHA256 checksum of a file with the 'sha256sum FILENAME'
+# command (where 'FILENAME' is the name of your
+# file). this is very important for an automatic
+# verification of the file: that it hasn't changed
+# between different runs of the project (locally or in
+# the URL). There are more robust checksum algorithms
+# like the 'SHA' standards.
+#
+# INPUT-%-url: The URL to download the file if it is not available
+# locally. It can happen that during the first phases of
+# your project the data aren't yet public. In this case, you
+# set a phony URL like this (just as a clear place-holder):
+# 'https://this.file/is/not/yet/public'.
+#
+# INPUT-%-size: The human-readable size of the file (output of 'ls
+# -lh'). This is not used by default but can help other
+# scientists who would like to run your project get a
+# good feeling of the necessary network and storage
+# capacity that is necessary to start the project.
+#
+# The input dataset's name (that goes into the '%') can be different from
+# the URL's file name (last component of the URL, after the last '/'). Just
+# note that it is assumed that the local copy (outside of your project) is
+# also called '%' (if your local copy of the input dataset and the only
+# repository names are the same, be sure to set '%' accordingly).
+#
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# Copying and distribution of this file, with or without modification, are
# permitted in any medium without royalty provided the copyright notice and
@@ -48,7 +76,6 @@
# Dataset used in this analysis and its checksum for integrity checking.
-MK20DATA = menke20.xlsx
-MK20MD5 = 8e4eee64791f351fec58680126d558a0
-MK20SIZE = 1.9MB
-MK20URL = https://www.biorxiv.org/content/biorxiv/early/2020/01/18/2020.01.15.908111/DC1/embed/media-1.xlsx
+INPUT-menke20.xlsx-size = 1.9M
+INPUT-menke20.xlsx-url = https://www.biorxiv.org/content/biorxiv/early/2020/01/18/2020.01.15.908111/DC1/embed/media-1.xlsx
+INPUT-menke20.xlsx-sha256 = 7839cdc2946134773ffc401cbcc78fb58fc489d2caad65375c85d605b2f8b13e
diff --git a/reproduce/analysis/config/metadata.conf b/reproduce/analysis/config/metadata.conf
index caac5c9..f570340 100644
--- a/reproduce/analysis/config/metadata.conf
+++ b/reproduce/analysis/config/metadata.conf
@@ -15,7 +15,7 @@
# and the copyright license name and standard link to the fully copyright
# license.
#
-# Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2020-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# Copying and distribution of this file, with or without modification, are
# permitted in any medium without royalty provided the copyright notice and
diff --git a/reproduce/analysis/config/pdf-build.conf b/reproduce/analysis/config/pdf-build.conf
index 015bf2e..a57b529 100644
--- a/reproduce/analysis/config/pdf-build.conf
+++ b/reproduce/analysis/config/pdf-build.conf
@@ -12,7 +12,7 @@
# LaTeX. Otherwise, a notice will just printed that, no PDF will be
# created.
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# Copying and distribution of this file, with or without modification, are
# permitted in any medium without royalty provided the copyright notice and
diff --git a/reproduce/analysis/config/verify-outputs.conf b/reproduce/analysis/config/verify-outputs.conf
index d96f293..37fc43c 100644
--- a/reproduce/analysis/config/verify-outputs.conf
+++ b/reproduce/analysis/config/verify-outputs.conf
@@ -1,6 +1,6 @@
# To enable verification of output datasets set this variable to 'yes'.
#
-# Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# Copying and distribution of this file, with or without modification, are
# permitted in any medium without royalty provided the copyright notice and
diff --git a/reproduce/analysis/make/demo-plot.mk b/reproduce/analysis/make/demo-plot.mk
index 53e1918..13b0d45 100644
--- a/reproduce/analysis/make/demo-plot.mk
+++ b/reproduce/analysis/make/demo-plot.mk
@@ -1,7 +1,7 @@
# Second step of analysis:
# Data for plot of number/fraction of tools per year.
#
-# Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2020-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This Makefile is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
@@ -22,16 +22,16 @@
a2mk20f1c = $(tex-publish-dir)/tools-per-year.txt
$(a2mk20f1c): $(mk20tab3) | $(tex-publish-dir)
- # Remove the (possibly) produced figure that is created from this
- # table: it is created by LaTeX's TiKZ package, and includes
- # multiple files with a fixed prefix.
+# Remove the (possibly) produced figure that is created from this
+# table: it is created by LaTeX's TiKZ package, and includes multiple
+# files with a fixed prefix.
rm -f $(tikzdir)/figure-tools-per-year*
- # Write the column metadata in a temporary file name (appending
- # '.tmp' to the actual target name). Once all steps are done, it is
- # renamed to the final target. We do this because if there is an
- # error in the middle, Make will not consider the job to be
- # complete and will stop here.
+# Write the column metadata in a temporary file name (appending
+# '.tmp' to the actual target name). Once all steps are done, it is
+# renamed to the final target. We do this because if there is an
+# error in the middle, Make will not consider the job to be complete
+# and will stop here.
echo "# Data of plot showing fraction of papers that mentioned software tools" > $@.tmp
echo "# per year to demonstrate the features of Maneage (MANaging data linEAGE)." >> $@.tmp
>> $@.tmp
@@ -48,7 +48,7 @@ $(a2mk20f1c): $(mk20tab3) | $(tex-publish-dir)
$(call print-general-metadata, $@.tmp)
- # Find the maximum number of papers.
+# Find the maximum number of papers.
awk '!/^#/{all[$$1]+=$$2; id[$$1]+=$$3} \
END{ for(year in all) \
printf("%-7d%-10.3f%d\n", year, 100*id[year]/all[year], \
@@ -56,7 +56,7 @@ $(a2mk20f1c): $(mk20tab3) | $(tex-publish-dir)
}' $< \
>> $@.tmp
- # Write it into the final target
+# Write it into the final target
mv $@.tmp $@
@@ -66,15 +66,15 @@ $(a2mk20f1c): $(mk20tab3) | $(tex-publish-dir)
# Final LaTeX macro
$(mtexdir)/demo-plot.tex: $(a2mk20f1c) $(pconfdir)/demo-year.conf
- # Find the first year (first column of first row) of data.
+# Find the first year (first column of first row) of data.
v=$$(awk '!/^#/ && c==0{c++; print $$1}' $(a2mk20f1c))
echo "\newcommand{\menkefirstyear}{$$v}" > $@
- # Find the number of rows in the plotted table.
+# Find the number of rows in the plotted table.
v=$$(awk '!/^#/{c++} END{print c}' $(a2mk20f1c))
echo "\newcommand{\menkenumyears}{$$v}" >> $@
- # Find the number of papers in 1996.
+# Find the number of papers in 1996.
v=$$(awk '$$1==$(menke-demo-year){print $$3}' $(a2mk20f1c))
echo "\newcommand{\menkenumpapersdemocount}{$$v}" >> $@
echo "\newcommand{\menkenumpapersdemoyear}{$(menke-demo-year)}" >> $@
diff --git a/reproduce/analysis/make/download.mk b/reproduce/analysis/make/download.mk
index ea70fca..7110c8f 100644
--- a/reproduce/analysis/make/download.mk
+++ b/reproduce/analysis/make/download.mk
@@ -5,7 +5,7 @@
# recipes in this Makefile all use a single file lock to have one download
# script running at every instant.
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This Makefile is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -27,70 +27,68 @@
# Download input data
# --------------------
#
-# The input dataset properties are defined in
-# `$(pconfdir)/INPUTS.conf'. For this template we only have one dataset to
-# enable easy processing, so all the extra checks in this rule may seem
-# redundant.
+# 'reproduce/analysis/config/INPUTS.conf' contains the input dataset
+# properties. In most cases, you will not need to edit this rule (or
+# file!). Simply follow the instructions of 'INPUTS.conf' and set the
+# variables names according to the described standards.
#
-# In a real project, you will need more than one dataset. In that case,
-# just add them to the target list and add an `elif' statement to define it
-# in the recipe.
-#
-# Files in a server usually have very long names, which are mainly designed
-# for helping in data-base management and being generic. Since Make uses
-# file names to identify which rule to execute, and the scope of this
-# research project is much less than the generic survey/dataset, it is
-# easier to have a simple/short name for the input dataset and work with
-# that. In the first condition of the recipe below, we connect the short
-# name with the raw database name of the dataset.
+# TECHNICAL NOTE on the '$(foreach, n ...)' loop of 'inputdatasets': we are
+# using several (relatively complex!) features particular to Make: In GNU
+# Make, '.VARIABLES' "... expands to a list of the names of all global
+# variables defined so far" (from the "Other Special Variables" section of
+# the GNU Make manual). Assuming that the pattern 'INPUT-%-sha256' is only
+# used for input files, we find all the variables that contain the input
+# file name (the '%' is the filename). Finally, using the
+# pattern-substitution function ('patsubst'), we remove the fixed string at
+# the start and end of the variable name.
#
# Download lock file: Most systems have a single connection to the
# internet, therefore downloading is inherently done in series. As a
# result, when more than one dataset is necessary for download, if they are
# done in parallel, the speed will be slower than downloading them in
-# series. We thus use the `flock' program to tie/lock the downloading
+# series. We thus use the 'flock' program to tie/lock the downloading
# process with a file and make sure that only one downloading event is in
# progress at every moment.
$(indir):; mkdir $@
downloadwrapper = $(bashdir)/download-multi-try
-inputdatasets = $(indir)/menke20.xlsx
+inputdatasets = $(foreach i, \
+ $(patsubst INPUT-%-sha256,%, \
+ $(filter INPUT-%-sha256,$(.VARIABLES))), \
+ $(indir)/$(i))
$(inputdatasets): $(indir)/%: | $(indir) $(lockdir)
- # Set the necessary parameters for this input file.
- if [ $* = menke20.xlsx ]; then
- localname=$(MK20DATA); url=$(MK20URL); mdf=$(MK20MD5);
- else
- echo; echo; echo "Not recognized input dataset: '$*'."
- echo; echo; exit 1
- fi
+# Set the necessary parameters for this input file as shell variables
+# (to help in readability).
+ url=$(INPUT-$*-url)
+ sha=$(INPUT-$*-sha256)
- # Download (or make the link to) the input dataset. If the file
- # exists in `INDIR', it may be a symbolic link to some other place
- # in the filesystem. To avoid too many links when using these files
- # during processing, we'll use `readlink -f' so the link we make
- # here points to the final file directly (note that `readlink' is
- # part of GNU Coreutils). If its not a link, the `readlink' part
- # has no effect.
+# Download (or make the link to) the input dataset. If the file
+# exists in 'INDIR', it may be a symbolic link to some other place in
+# the filesystem. To avoid too many links when using these files
+# during processing, we'll use 'readlink -f' so the link we make here
+# points to the final file directly (note that 'readlink' is part of
+# GNU Coreutils). If its not a link, the 'readlink' part has no
+# effect.
unchecked=$@.unchecked
- if [ -f $(INDIR)/$$localname ]; then
- ln -fs $$(readlink -f $(INDIR)/$$localname) $$unchecked
+ if [ -f $(INDIR)/$* ]; then
+ ln -fs $$(readlink -f $(INDIR)/$*) $$unchecked
else
touch $(lockdir)/download
$(downloadwrapper) "wget --no-use-server-timestamps -O" \
$(lockdir)/download $$url $$unchecked
fi
- # Check the md5 sum to see if this is the proper dataset.
- sum=$$(md5sum $$unchecked | awk '{print $$1}')
- if [ $$sum = $$mdf ]; then
+# Check the checksum to see if this is the proper dataset.
+ sum=$$(sha256sum $$unchecked | awk '{print $$1}')
+ if [ $$sum = $$sha ]; then
mv $$unchecked $@
echo "Integrity confirmed, using $@ in this project."
else
echo; echo;
- echo "Wrong MD5 checksum for input file '$$localname':"
+ echo "Wrong SHA256 checksum for input file '$*':"
echo " File location: $$unchecked"; \
- echo " Expected MD5 checksum: $$mdf"; \
- echo " Calculated MD5 checksum: $$sum"; \
+ echo " Expected SHA256 checksum: $$sha"; \
+ echo " Calculated SHA256 checksum: $$sum"; \
echo; exit 1
fi
@@ -104,7 +102,7 @@ $(inputdatasets): $(indir)/%: | $(indir) $(lockdir)
# It is very important to mention the address where the data were
# downloaded in the final report.
$(mtexdir)/download.tex: $(indir)/menke20.xlsx | $(mtexdir)
- echo "\newcommand{\menketwentyxlsxname}{$(MK20DATA)}" > $@
- echo "\newcommand{\menketwentychecksum}{$(MK20MD5)}" >> $@
- echo "\newcommand{\menketwentybytesize}{$(MK20SIZE)}" >> $@
- echo "\newcommand{\menketwentyurl}{$(MK20URL)}" >> $@
+ echo "\newcommand{\menketwentyxlsxname}{menke20.xlsx}" > $@
+ echo "\newcommand{\menketwentychecksum}{$(INPUT-menke20.xlsx-sha256)}" >> $@
+ echo "\newcommand{\menketwentybytesize}{$(INPUT-menke20.xlsx-size)}" >> $@
+ echo "\newcommand{\menketwentyurl}{$(INPUT-menke20.xlsx-url)}" >> $@
diff --git a/reproduce/analysis/make/format.mk b/reproduce/analysis/make/format.mk
index fd4060a..979475f 100644
--- a/reproduce/analysis/make/format.mk
+++ b/reproduce/analysis/make/format.mk
@@ -6,7 +6,7 @@
# because it provides interesting statistics about tools and methods used
# in scientific papers.
#
-# Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2020-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This Makefile is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
@@ -28,13 +28,13 @@ mk20tab3 = $(a1dir)/table-3.txt
$(a1dir):; mkdir $@
$(mk20tab3): $(indir)/menke20.xlsx | $(a1dir)
- # Set a base-name for the table-3 data.
+# Set a base-name for the table-3 data.
base=$(basename $(notdir $<))-table-3
- # Unfortunately XLSX I/O only works when the input and output are
- # in the directory it is running. So first, we need to switch to
- # the input directory, run it, then put our desired output where we
- # want and delete the extra files.
+# Unfortunately XLSX I/O only works when the input and output are in
+# the directory it is running. So first, we need to switch to the
+# input directory, run it, then put our desired output where we want
+# and delete the extra files.
topdir=$$(pwd)
cd $(indir)
xlsxio_xlsx2csv $(notdir $<)
@@ -42,12 +42,12 @@ $(mk20tab3): $(indir)/menke20.xlsx | $(a1dir)
rm $(notdir $<).*.csv
cd $$topdir
- # Read the necessary information. Note that we are dealing with a
- # CSV (comma-separated value) file. But when there are commas in a
- # string, quotation signs are put around it. The `FPAT' values is
- # fully described in the GNU AWK manual. In short, it ensures that
- # if there is a comma in the middle of double-quotes, it doesn't
- # count as a delimter.
+# Read the necessary information. Note that we are dealing with a CSV
+# (comma-separated value) file. But when there are commas in a
+# string, quotation signs are put around it. The `FPAT' values is
+# fully described in the GNU AWK manual. In short, it ensures that if
+# there is a comma in the middle of double-quotes, it doesn't count
+# as a delimter.
echo "# Column 1: YEAR [counter, i16] Year of journal's publication." > $@.tmp
echo "# Column 2: NUM_PAPERS [counter, i16] Number of studied papers in that journal." >> $@.tmp
echo "# Column 3: NUM_PAPERS_WITH_TOOLS [counter, i16] Number of papers with an identified tool." >> $@.tmp
@@ -56,9 +56,9 @@ $(mk20tab3): $(indir)/menke20.xlsx | $(a1dir)
awk 'NR>1{printf("%-10d%-10d%-10d%-10d %s\n", $$2, $$3, $$3*$$NF, $$(NF-1), $$1)}' \
FPAT='([^,]+)|("[^"]+")' $(indir)/$$base.csv >> $@.tmp
- # Set the temporary file as the final target. This was done so if
- # there is any possible crash in the steps above, this rule is
- # re-run (its final target isn't rebuilt).
+# Set the temporary file as the final target. This was done so if
+# there is any possible crash in the steps above, this rule is re-run
+# (its final target isn't rebuilt).
mv $@.tmp $@
@@ -68,19 +68,19 @@ $(mk20tab3): $(indir)/menke20.xlsx | $(a1dir)
# Main LaTeX macro file
$(mtexdir)/format.tex: $(mk20tab3)
- # Count the total number of papers in their study.
+# Count the total number of papers in their study.
v=$$(awk '!/^#/{c+=$$2} END{print c}' $(mk20tab3))
echo "\newcommand{\menkenumpapers}{$$v}" > $@
- # Count how many unique journals there were in the study. Note that
- # the `31' comes because we put 10 characters for each numeric
- # column and separated the last numeric column from the string
- # column with a space. If the number of numeric columns change in
- # the future, the `31' also has to change.
+# Count how many unique journals there were in the study. Note that
+# the `31' comes because we put 10 characters for each numeric column
+# and separated the last numeric column from the string column with a
+# space. If the number of numeric columns change in the future, the
+# `31' also has to change.
v=$$(awk 'BEGIN{FIELDWIDTHS="41 10000"} !/^#/{print $$2}' \
$(mk20tab3) | uniq | wc -l)
echo "\newcommand{\menkenumjournals}{$$v}" >> $@
- # Count how many rows the original catalog has.
+# Count how many rows the original catalog has.
v=$$(awk '!/^#/{c++} END{print c}' $(mk20tab3))
echo "\newcommand{\menkenumorigrows}{$$v}" >> $@
diff --git a/reproduce/analysis/make/initialize.mk b/reproduce/analysis/make/initialize.mk
index bc73df8..7f0c514 100644
--- a/reproduce/analysis/make/initialize.mk
+++ b/reproduce/analysis/make/initialize.mk
@@ -1,6 +1,6 @@
# Project initialization.
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This Makefile is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -25,10 +25,10 @@
# Basic directories that are used throughout the project.
#
# Locks are used to make sure that an operation is done in series not in
-# parallel (even if Make is run in parallel with the `-j' option). The most
+# parallel (even if Make is run in parallel with the '-j' option). The most
# common case is downloads which are better done in series and not in
# parallel. Also, some programs may not be thread-safe, therefore it will
-# be necessary to put a lock on them. This project uses the `flock' program
+# be necessary to put a lock on them. This project uses the 'flock' program
# to achieve this.
#
# To help with modularity and clarity of the build directory (not mixing
@@ -43,7 +43,7 @@ bsdir=$(BDIR)/software
texdir = $(badir)/tex
lockdir = $(bsdir)/locks
indir = $(badir)/inputs
-prepdir = $(padir)/prepare
+prepdir = $(badir)/prepare
mtexdir = $(texdir)/macros
installdir = $(bsdir)/installed
bashdir = reproduce/analysis/bash
@@ -56,10 +56,10 @@ pconfdir = reproduce/analysis/config
# Preparation phase
# -----------------
#
-# This Makefile is loaded both for the `prepare' phase and the `make'
+# This Makefile is loaded both for the 'prepare' phase and the 'make'
# phase. But the preparation files should be dealt with differently
-# (depending on the phase). In the `prepare' phase, the main directory
-# should be created, and in the `make' phase, its contents should be
+# (depending on the phase). In the 'prepare' phase, the main directory
+# should be created, and in the 'make' phase, its contents should be
# loaded.
#
# If you don't need any preparation, please simply comment these lines.
@@ -87,6 +87,14 @@ endif
# (independent parts of the paper can be added to it independently). To fix
# this problem, when we are in a group setting, we'll use the user's ID to
# create a separate LaTeX build directory for each user.
+#
+# The same logic applies to the final paper PDF: each user will create a
+# separte final PDF (for example 'paper-user1.pdf' and 'paper-user2.pdf')
+# and no 'paper.pdf' will be built. This isn't a problem because
+# 'initialize.tex' is a .PHONY prerequisite, so the rule to build the final
+# paper is always executed (even if it is present and nothing has
+# changed). So in terms of over-all efficiency and processing steps, this
+# doesn't change anything.
ifeq (x$(GROUP-NAME),x)
texbtopdir = build
else
@@ -104,7 +112,7 @@ tikzdir = $(texbdir)/tikz
# ---------------------------
#
# Before defining the local sub-environment here, we'll need to save the
-# system's environment for some scenarios (for example after `clean'ing the
+# system's environment for some scenarios (for example after 'clean'ing the
# built programs).
curdir := $(shell echo $$(pwd))
@@ -117,16 +125,16 @@ curdir := $(shell echo $$(pwd))
#
# We want the full recipe to be executed in one call to the shell. Also we
# want Make to run the specific version of Bash that we have installed
-# during `./project configure' time.
+# during './project configure' time.
#
# Regarding the directories, this project builds its major dependencies
# itself and doesn't use the local system's default tools. With these
# environment variables, we are setting it to prefer the software we have
# build here.
#
-# `TEXINPUTS': we have to remove all possible user-specified directories to
-# avoid conflicts with existing TeX Live solutions. Later (in `paper.mk'),
-# we are also going to overwrite `TEXINPUTS' just before `pdflatex'.
+# 'TEXINPUTS': we have to remove all possible user-specified directories to
+# avoid conflicts with existing TeX Live solutions. Later (in 'paper.mk'),
+# we are also going to overwrite 'TEXINPUTS' just before 'pdflatex'.
.ONESHELL:
.SHELLFLAGS = -ec
export TERM=xterm
@@ -144,12 +152,12 @@ export LD_LIBRARY_PATH := $(installdir)/lib
# will be empty.
export CPATH := $(SYS_CPATH)
-# RPATH is automatically written in macOS, so `DYLD_LIBRARY_PATH' is
+# RPATH is automatically written in macOS, so 'DYLD_LIBRARY_PATH' is
# ultimately redundant. But on some systems, even having a single value
# causes crashs (see bug #56682). So we'll just give it no value at all.
export DYLD_LIBRARY_PATH :=
-# OpenMPI can depend on an existing `ssh' or `rsh' binary. However, because
+# OpenMPI can depend on an existing 'ssh' or 'rsh' binary. However, because
# of security reasons, its best to not install them, disable any
# remote-shell accesss through this environment variable.
export OMPI_MCA_plm_rsh_agent=/bin/false
@@ -163,7 +171,7 @@ export BASH_ENV := $(shell pwd)/reproduce/software/shell/bashrc.sh
# Python enviroment
# -----------------
#
-# The main Python environment variable is `PYTHONPATH'. However, so far we
+# The main Python environment variable is 'PYTHONPATH'. However, so far we
# have found several other Python-related environment variables on some
# systems which might interfere. To be safe, we are removing all their
# values.
@@ -187,10 +195,10 @@ export MPI_PYTHON3_SITEARCH :=
# directories (or possible sub-directories) for individual steps will be
# defined and added within their own Makefiles.
#
-# The `.SUFFIXES' rule with no prerequisite is defined to eliminate all the
+# The '.SUFFIXES' rule with no prerequisite is defined to eliminate all the
# default implicit rules. The default implicit rules are to do with
-# programming (for example converting `.c' files to `.o' files). The
-# problem they cause is when you want to debug the make command with `-d'
+# programming (for example converting '.c' files to '.o' files). The
+# problem they cause is when you want to debug the make command with '-d'
# option: they add too many extra checks that make it hard to find what you
# are looking for in the outputs.
.SUFFIXES:
@@ -201,8 +209,11 @@ $(lockdir): | $(bsdir); mkdir $@
# Version and distribution tarball definitions
-project-commit-hash := $(shell if [ -d .git ]; then \
- echo $$(git describe --dirty --always --long); else echo NOGIT; fi)
+project-commit-hash := $(shell \
+ if [ -d .git ]; then \
+ export LD_LIBRARY_PATH="$(installdir)/lib"; \
+ echo $$($(installdir)/bin/git describe --dirty --always --long); \
+ else echo NOGIT; fi)
project-package-name := maneaged-$(project-commit-hash)
project-package-contents = $(texdir)/$(project-package-name)
@@ -213,10 +224,10 @@ project-package-contents = $(texdir)/$(project-package-name)
# High-level Makefile management
# ------------------------------
#
-# About `.PHONY': these are targets that must be built even if a file with
+# About '.PHONY': these are targets that must be built even if a file with
# their name exists.
#
-# Only `$(mtexdir)/initialize.tex' corresponds to a file. This is because
+# Only '$(mtexdir)/initialize.tex' corresponds to a file. This is because
# we want to ensure that the file is always built in every run: it contains
# the project version which may change between two separate runs, even when
# no file actually differs.
@@ -229,14 +240,20 @@ texclean:
mkdir $(texdir)/build/tikz # 'tikz' is assumed to already exist.
clean:
- # Delete the top-level PDF file.
+# Delete the top-level PDF file.
rm -f *.pdf
- # Delete all the built outputs except the dependency
- # programs. We'll use Bash's extended options builtin (`shopt') to
- # enable "extended glob" (for listing of files). It allows extended
- # features like ignoring the listing of a file with `!()' that we
- # are using afterwards.
+# Delete possible LaTeX output in top directory. This can happen when
+# the user has run LaTeX with applications other than maneage. For
+# example, when opening 'paper.tex' file with 'texstudio' and
+# executing 'build'.
+ rm -f *.aux *.log *.synctex *.auxlock *.dvi *.out *.run.xml *.bcf
+
+# Delete all the built outputs except the dependency programs. We'll
+# use Bash's extended options builtin ('shopt') to enable "extended
+# glob" (for listing of files). It allows extended features like
+# ignoring the listing of a file with '!()' that we are using
+# afterwards.
shopt -s extglob
rm -rf $(texdir)/macros/!(dependencies.tex|dependencies-bib.tex|hardware-parameters.tex)
rm -rf $(badir)/!(tex) $(texdir)/!(macros|$(texbtopdir))
@@ -244,14 +261,13 @@ clean:
rm -rf $(bsdir)/preparation-done.mk
distclean: clean
- # Without cleaning the Git hooks, we won't be able to easily
- # commit or checkout after this task is done. So we'll remove them
- # first.
+# Without cleaning the Git hooks, we won't be able to easily commit
+# or checkout after this task is done. So we'll remove them first.
rm -f .git/hooks/post-checkout .git/hooks/pre-commit
- # We'll be deleting the built environent programs and just need the
- # `rm' program. So for this recipe, we'll use the host system's
- # `rm', not our own.
+# We'll be deleting the built environent programs and just need the
+# 'rm' program. So for this recipe, we'll use the host system's 'rm',
+# not our own.
$$sys_rm -rf $(BDIR)
$$sys_rm -f .local .build $(pconfdir)/LOCAL.conf
@@ -268,15 +284,15 @@ distclean: clean
# without having to worry about the technicalities of the analysis.
$(project-package-contents): paper.pdf | $(texdir)
- # Set up the output directory, delete it if it exists and remake it
- # to fill with new contents.
+# Set up the output directory, delete it if it exists and remake it
+# to fill with new contents.
dir=$@
rm -rf $$dir
mkdir $$dir
curdir=$$(pwd)
- # Build a small Makefile to help in automatizing the paper building
- # (including the bibliography).
+# Build a small Makefile to help in automatizing the paper building
+# (including the bibliography).
m=$$dir/Makefile
echo "paper.pdf: paper.tex paper.bbl" > $$m
printf "\tlatex -shell-escape -halt-on-error paper\n" >> $$m
@@ -291,94 +307,92 @@ $(project-package-contents): paper.pdf | $(texdir)
printf "\trm -f *.aux *.auxlock *.bbl *.bcf\n" >> $$m
printf "\trm -f *.blg *.log *.out *.run.xml\n" >> $$m
- # Copy the top-level contents (see next step for `paper.tex').
+# Copy the top-level contents (see next step for 'paper.tex').
cp COPYING project README.md README-hacking.md $$dir/
- # Since the packaging is mainly intended for high-level building of
- # the PDF with LaTeX, we'll comment the `makepdf' LaTeX macro in
- # the paper. This will disable usage of TiKZ.
+# Since the packaging is mainly intended for high-level building of
+# the PDF with LaTeX, we'll comment the 'makepdf' LaTeX macro in the
+# paper. This will disable usage of TiKZ.
sed -e's|\\newcommand{\\makepdf}{}|%\\newcommand{\\makepdf}{}|' \
paper.tex > $$dir/paper.tex
- # Copy ONLY the version-controlled files in 'reproduce' and
- # 'tex/src'. This is important because files like 'LOCAL.conf' (in
- # 'reproduce/software/config') should not be archived, they contain
- # information about the host computer and are irrelevant for
- # others. Also some project authors may have temporary files here
- # that are not under version control and thus shouldn't be archived
- # (although this is bad practice, but that is up to the user).
- #
- # To keep the sub-directory structure, we are packaging the files
- # with Tar, piping it, and unpacking it in the archive
- # directory. So afterwards we need to come back to the current
- # directory.
+# Copy ONLY the version-controlled files in 'reproduce' and
+# 'tex/src'. This is important because files like 'LOCAL.conf' (in
+# 'reproduce/software/config') should not be archived, they contain
+# information about the host computer and are irrelevant for
+# others. Also some project authors may have temporary files here
+# that are not under version control and thus shouldn't be archived
+# (although this is bad practice, but that is up to the user).
+#
+# To keep the sub-directory structure, we are packaging the files
+# with Tar, piping it, and unpacking it in the archive directory. So
+# afterwards we need to come back to the current directory.
tar -c -f - $$(git ls-files peer-review reproduce tex/src) \
| (cd $$dir ; tar -x -f -)
cd $(curdir)
- # Build the other two subdirectories of 'tex/' that we need in the
- # archive (in the actual project, these are symbolic links to the
- # build directory).
+# Build the other two subdirectories of 'tex/' that we need in the
+# archive (in the actual project, these are symbolic links to the
+# build directory).
mkdir $$dir/tex/tikz $$dir/tex/build
- # Copy the 'tex/build' directory into the archive (excluding the
- # temporary archive directory that we are now copying to). We will
- # be using Bash's extended globbing ('extglob') for excluding this
- # directory.
+# Copy the 'tex/build' directory into the archive (excluding the
+# temporary archive directory that we are now copying to). We will be
+# using Bash's extended globbing ('extglob') for excluding this
+# directory.
shopt -s extglob
cp -r tex/img $$dir/tex/img
cp tex/tikz/*.eps $$dir/tex/tikz
cp -r tex/build/!($(project-package-name)) $$dir/tex/build
- # Clean up the $(texdir)/build* directories in the archive (when
- # building in a group structure, there will be `build-user1',
- # `build-user2' and etc). These are just temporary LaTeX build
- # files and don't have any relevant/hand-written files in them.
+# Clean up the $(texdir)/build* directories in the archive (when
+# building in a group structure, there will be 'build-user1',
+# 'build-user2' and etc). These are just temporary LaTeX build files
+# and don't have any relevant/hand-written files in them.
rm -rf $$dir/tex/build/build*
- # If the project has any PDFs in its 'tex/tikz' directory (TiKZ or
- # PGFPlots was used to generate them), copy them too.
+# If the project has any PDFs in its 'tex/tikz' directory (TiKZ or
+# PGFPlots was used to generate them), copy them too.
if ls tex/tikz/*.pdf &> /dev/null; then
cp tex/tikz/*.pdf $$dir/tex/tikz
fi
- # When submitting to places like arXiv, they will just run LaTeX
- # once and won't run `biber'. So we need to also keep the `.bbl'
- # file into the distributing tarball. However, BibLaTeX is
- # particularly sensitive to versioning (a `.bbl' file has to be
- # read by the same BibLaTeX version that created it). This is hard
- # to do with non-up-to-date places like arXiv. Therefore, we thus
- # just copy the whole of BibLaTeX's source (the version we are
- # using) into the top tarball directory. In this way, arXiv's LaTeX
- # engine will use the same BibLaTeX version to interpret the `.bbl'
- # file. TIP: you can use the same strategy for other LaTeX packages
- # that may cause problems on the arXiv server.
+# When submitting to places like arXiv, they will just run LaTeX once
+# and won't run 'biber'. So we need to also keep the '.bbl' file into
+# the distributing tarball. However, BibLaTeX is particularly
+# sensitive to versioning (a '.bbl' file has to be read by the same
+# BibLaTeX version that created it). This is hard to do with
+# non-up-to-date places like arXiv. Therefore, we thus just copy the
+# whole of BibLaTeX's source (the version we are using) into the top
+# tarball directory. In this way, arXiv's LaTeX engine will use the
+# same BibLaTeX version to interpret the '.bbl' file. TIP: you can
+# use the same strategy for other LaTeX packages that may cause
+# problems on the arXiv server.
cp tex/build/build/paper.bbl $$dir/
tltopdir=.local/texlive/maneage/texmf-dist/tex/latex
#find $$tltopdir/biblatex/ -maxdepth 1 -type f -print0 \
# | xargs -0 cp -t $$dir
- # Just in case the package users want to rebuild some of the
- # figures (manually un-comment the `makepdf' command we commented
- # above), correct the TikZ external directory, so the figures can
- # be rebuilt.
+# Just in case the package users want to rebuild some of the figures
+# (manually un-comment the 'makepdf' command we commented above),
+# correct the TikZ external directory, so the figures can be rebuilt.
pgfsettings="$$dir/tex/src/preamble-pgfplots.tex"
sed -e's|{tikz/}|{tex/tikz/}|' $$pgfsettings > $$pgfsettings.new
mv $$pgfsettings.new $$pgfsettings
- # PROJECT SPECIFIC
- # ----------------
- # Put any project-specific distribution steps here.
+# PROJECT SPECIFIC
+# ----------------
+# Put any project-specific distribution steps here.
cd $$curdir
cp tex/build/build/appendix.bbl $$dir/
- # ----------------
+# ----------------
- # Clean temporary files that may have been created by text editors.
+# Clean temporary files that may have been created by text editors.
cd $(texdir)
find $(project-package-name) -name \*~ -delete
find $(project-package-name) -name \*.swp -delete
-# Package into `.tar.gz' or '.tar.lz'.
+# Package into '.tar.gz' or '.tar.lz'.
dist dist-lzip: $(project-package-contents)
curdir=$$(pwd)
cd $(texdir)
@@ -394,7 +408,7 @@ dist dist-lzip: $(project-package-contents)
cd $$curdir
mv $(texdir)/$(project-package-name).tar.$$suffix ./
-# Package into `.zip'.
+# Package into '.zip'.
dist-zip: $(project-package-contents)
curdir=$$(pwd)
cd $(texdir)
@@ -495,13 +509,13 @@ print-general-metadata = \
# This file will store some basic info about the project that is necessary
# for the final PDF. Since these are not version controlled, it must be
# calculated everytime the project is run. So even though this file
-# actually exists, it is also aded as a `.PHONY' target above.
+# actually exists, it is also aded as a '.PHONY' target above.
$(mtexdir)/initialize.tex: | $(mtexdir)
- # Version and title of project. About the starting '@': since these
- # commands are run every time with './project make', it is annoying
- # to print them on the standard output every time. With the '@',
- # make will not print the commands that it runs in this recipe.
+# Version and title of project. About the starting '@': since these
+# commands are run every time with './project make', it is annoying
+# to print them on the standard output every time. With the '@', make
+# will not print the commands that it runs in this recipe.
@d=$$(git show -s --format=%aD HEAD | awk '{print $$2, $$3, $$4}')
echo "\newcommand{\projectdate}{$$d}" > $@
echo "\newcommand{\projecttitle}{$(metadata-title)}" >> $@
@@ -514,16 +528,15 @@ $(mtexdir)/initialize.tex: | $(mtexdir)
v=$$(echo $(metadata-doi-zenodo) | sed -e's/\./ /g' | awk '{print $$NF}')
echo "\newcommand{\projectzenodoid}{$$v}" >> $@
- # Calculate the latest Maneage commit used to build this
- # project:
- # - The project may not have the 'maneage' branch (for example
- # after cloning from a fork that didn't include it!). In this
- # case, we'll print a descriptive warning, telling the user what
- # should be done (reporting the last merged commit and its date
- # is very useful for the future).
- # - The '--dirty' option (used in 'project-commit-hash') isn't
- # applicable to "commit-ishes" (direct quote from Git's error
- # message!).
+# Calculate the latest Maneage commit used to build this project:
+# - The project may not have the 'maneage' branch (for example
+# after cloning from a fork that didn't include it!). In this
+# case, we'll print a descriptive warning, telling the user what
+# should be done (reporting the last merged commit and its date
+# is very useful for the future).
+# - The '--dirty' option (used in 'project-commit-hash') isn't
+# applicable to "commit-ishes" (direct quote from Git's error
+# message!).
if git log maneage -1 &> /dev/null; then
c=$$(git merge-base HEAD maneage)
v=$$(git describe --always --long $$c)
diff --git a/reproduce/analysis/make/paper.mk b/reproduce/analysis/make/paper.mk
index 00bd3b5..da2702c 100644
--- a/reproduce/analysis/make/paper.mk
+++ b/reproduce/analysis/make/paper.mk
@@ -1,6 +1,6 @@
# Build the final PDF paper/report.
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This Makefile is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -24,14 +24,14 @@
#
# To report the input settings and results, the final report's PDF (final
# target of this project) uses macros generated from various steps of the
-# project. All these macros are defined through `$(mtexdir)/project.tex'.
+# project. All these macros are defined through '$(mtexdir)/project.tex'.
#
-# `$(mtexdir)/project.tex' is actually just a combination of separate files
+# '$(mtexdir)/project.tex' is actually just a combination of separate files
# that keep the LaTeX macros related to each workhorse Makefile (in
-# `reproduce/src/make/*.mk'). Those individual macros are pre-requisites to
-# `$(mtexdir)/verify.tex' which will check them before starting to build
+# 'reproduce/src/make/*.mk'). Those individual macros are pre-requisites to
+# '$(mtexdir)/verify.tex' which will check them before starting to build
# the paper. The only workhorse Makefile that doesn't need to produce LaTeX
-# macros is this Makefile (`reproduce/src/make/paper.mk').
+# macros is this Makefile ('reproduce/src/make/paper.mk').
#
# This file is thus the interface between the analysis/processing steps and
# the final PDF: when we get to this point, all the processing has been
@@ -39,38 +39,38 @@
#
# Note that if you don't want the final PDF and just want the processing
# and file outputs, you can give any value other than 'yes' to
-# 'pdf-build-final' in `reproduce/analysis/config/pdf-build.conf'.
+# 'pdf-build-final' in 'reproduce/analysis/config/pdf-build.conf'.
$(mtexdir)/project.tex: $(mtexdir)/verify.tex
- # If no PDF is requested, or if LaTeX isn't available, don't
- # continue to building the final PDF. Otherwise, merge all the TeX
- # macros into one for building the PDF.
+# If no PDF is requested, or if LaTeX isn't available, don't continue
+# to building the final PDF. Otherwise, merge all the TeX macros into
+# one for building the PDF.
@if [ -f .local/bin/latex ] && [ x"$(pdf-build-final)" = xyes ]; then
- # Put a LaTeX input command for all the necessary macro files.
- # 'hardware-parameters.tex' is created in 'configure.sh'.
+# Put a LaTeX input command for all the necessary macro files.
+# 'hardware-parameters.tex' is created in 'configure.sh'.
projecttex=$(mtexdir)/project.tex
rm -f $$projecttex
for t in $(subst paper,,$(makesrc)) hardware-parameters; do
echo "\input{tex/build/macros/$$t.tex}" >> $$projecttex
done
- # Possibly print the appendix in the final PDF.
+# Possibly print the appendix in the final PDF.
if [ x"$(separatesupplement)" = x1 ]; then
echo "\newcommand{\separatesupplement}{}" >> $$projecttex
fi
- # Possibly highlight the '\new' parts of the text.
+# Possibly highlight the '\new' parts of the text.
if [ x"$(highlightnew)" = x1 ]; then
echo "\newcommand{\highlightnew}{}" >> $$projecttex
fi
- # Possibly show the text within '\tonote'.
+# Possibly show the text within '\tonote'.
if [ x"$(highlightnotes)" = x1 ]; then
echo "\newcommand{\highlightnotes}{}" >> $$projecttex
fi
- # The paper shouldn't be built.
+# The paper shouldn't be built.
else
echo
echo "-----"
@@ -101,76 +101,76 @@ $(mtexdir)/project.tex: $(mtexdir)/verify.tex
# The bibliography
# ----------------
#
-# We need to run the `bibtex' program on the output of LaTeX to generate
+# We need to run the 'bibtex' program on the output of LaTeX to generate
# the necessary bibliography before making the final paper. So we'll first
-# have one run of LaTeX (similar to the `paper.pdf' recipe), then `biber'.
+# have one run of LaTeX (similar to the 'paper.pdf' recipe), then 'bibtex'.
#
-# NOTE: `$(mtexdir)/project.tex' is an order-only-prerequisite for
-# `paper.bbl'. This is because we need to run LaTeX in both the `paper.bbl'
-# recipe and the `paper.pdf' recipe. But if `tex/src/references.bib' hasn't
+# NOTE: '$(mtexdir)/project.tex' is an order-only-prerequisite for
+# 'paper.bbl'. This is because we need to run LaTeX in both the 'paper.bbl'
+# recipe and the 'paper.pdf' recipe. But if 'tex/src/references.tex' hasn't
# been modified, we don't want to re-build the bibliography, only the final
# PDF.
bbls = $(foreach t,$(subst .pdf,,$(top-pdfs)),$(texbdir)/$(t).bbl)
$(bbls): $(texbdir)/%.bbl: tex/src/references.tex \
$(mtexdir)/dependencies-bib.tex | $(mtexdir)/project.tex
- # If `$(mtexdir)/project.tex' is empty, don't build PDF.
+# If '$(mtexdir)/project.tex' is empty, don't build PDF.
@macros=$$(cat $(mtexdir)/project.tex)
if [ x"$$macros" != x ]; then
- # Unfortunately I can't get bibtex to look into a special
- # directory for the references, so we'll copy it into the LaTeX
- # building directory.
+# Unfortunately I can't get bibtex to look into a special directory
+# for the references, so we'll copy it into the LaTeX building
+# directory.
p=$$(pwd)
if ! [ -L $(texbdir)/references.bib ]; then
ln -sf $$p/tex/src/references.tex $(texbdir)/references.bib
fi
- # Copy the improved IEEE bst file into the build directory.
- # The improved bst file provides ArXiv clickable URLs and
- # if available, open-access URLs based on the DOIs, with
- # closed-access URLs as a fallback, via https://oadoi.org .
+# Copy the improved IEEE bst file into the build directory. The
+# improved bst file provides ArXiv clickable URLs and if available,
+# open-access URLs based on the DOIs, with closed-access URLs as a
+# fallback, via https://oadoi.org .
ln -sf $$p/tex/src/IEEEtran_openaccess.bst $(texbdir)/
- # We'll run LaTeX first to generate the `.bcf' file (necessary
- # for `biber') and then run `biber' to generate the `.bbl' file.
+# We'll run LaTeX first to generate the '.bcf' file (necessary for
+# 'bibtex') and then run 'bibtex' to generate the '.bbl' file.
export TEXINPUTS=$$p:
cd $(texbdir);
- # Delete any possibly existing target (a '.bbl' file) to avoid
- # complications with LaTeX being run before the command that
- # generates it. Otherwise users will have to manually delete
- # it. It will be built anyway once this rule is done.
+# Delete any possibly existing target (a '.bbl' file) to avoid
+# complications with LaTeX being run before the command that
+# generates it. Otherwise users will have to manually delete it. It
+# will be built anyway once this rule is done.
rm -f $@
- # Put a link to the main LaTeX source that we want to build.
+# Put a link to the main LaTeX source that we want to build.
if [ $* = paper ]; then sdir="$$p"
else sdir="$$p"/tex/src
fi
ln -sf "$$sdir"/$*.tex ./
- # The pdflatex option '-shell-escape' is "normally disallowed for
- # security reasons" according to the `info pdflatex' manual, but
- # is enabled here in order to allow the use of PGFPlots. If you
- # do not use PGFPlots, then you can remove the `-shell-escape'
- # option for better security. See
- # https://savannah.nongnu.org/task/?15694 for details.
+# The pdflatex option '-shell-escape' is "normally disallowed for
+# security reasons" according to the 'info pdflatex' manual, but is
+# enabled here in order to allow the use of PGFPlots. If you do not
+# use PGFPlots, then you should remove the '-shell-escape' option
+# for better security. See https://savannah.nongnu.org/task/?15694
+# for details.
latex -shell-escape -halt-on-error $*.tex
- # When we are building the main paper and the appendices are to
- # be built within the main paper's PDF, we need two
- # bibliographies: one for the main body, and one for the
- # appendix. For this, we use 'multibib'. Multibib creates a
- # separate '.aux' file for each bibliography.
+# When we are building the main paper and the appendices are to be
+# built within the main paper's PDF, we need two bibliographies:
+# one for the main body, and one for the appendix. For this, we use
+# 'multibib'. Multibib creates a separate '.aux' file for each
+# bibliography.
bibtex $*
if [ x"$(separatesupplement)" != x1 ]; then
bibtex appendix
fi
- # Hack: tidy up eprint+doi style that didn't work in .bst file.
- # TODO (better): read Part 4 of
- # http://mirrors.ctan.org/info/bibtex/tamethebeast/ttb_en.pdf
- # and fix the .bst style properly.
+# Hack: tidy up eprint+doi style that didn't work in .bst file.
+# TODO (better): read Part 4 of
+# http://mirrors.ctan.org/info/bibtex/tamethebeast/ttb_en.pdf and
+# fix the .bst style properly.
cp -pv $*.bbl $*-tmp.bbl \
&& sed -e "s/\'/EOLINE/g" $*-tmp.bbl \
| tr -d '\n' \
@@ -188,7 +188,7 @@ $(bbls): $(texbdir)/%.bbl: tex/src/references.tex \
| sed -e 's/EOLINE/\n/g' > appendix.bbl
fi
- # Paper-specific hacks for reducing very-long author lists.
+# Paper-specific hacks for reducing very-long author lists.
cp -pv $*.bbl $*-tmp.bbl \
&& sed -e "s/\'/EOLINE/g" $*-tmp.bbl \
| tr -d '\n' \
@@ -196,7 +196,7 @@ $(bbls): $(texbdir)/%.bbl: tex/src/references.tex \
| sed -e 's;, V\..Khodiyar[^{]*Whyte; et al.\\/;' \
| sed -e 's/EOLINE/\n/g' > $*.bbl
- # The pre-final run of LaTeX after 'paper.bbl' was created.
+# The pre-final run of LaTeX after 'paper.bbl' was created.
latex -shell-escape -halt-on-error $*.tex
fi
@@ -207,36 +207,36 @@ $(bbls): $(texbdir)/%.bbl: tex/src/references.tex \
# The final paper
# ---------------
#
-# Run LaTeX in the `$(texbdir)' directory so all the intermediate and
+# Run LaTeX in the '$(texbdir)' directory so all the intermediate and
# auxiliary files stay there and keep the top directory clean. To be able
# to run everything cleanly from there, it is necessary to add the current
-# directory (top project directory) to the `TEXINPUTS' environment
+# directory (top project directory) to the 'TEXINPUTS' environment
# variable.
$(top-pdfs): %.pdf: $(mtexdir)/project.tex paper.tex \
tex/src/appendix-*.tex $(texbdir)/%.bbl
- # If `$(mtexdir)/project.tex' is empty, don't build the PDF.
+# If '$(mtexdir)/project.tex' is empty, don't build the PDF.
@macros=$$(cat $(mtexdir)/project.tex)
if [ x"$$macros" != x ]; then
- # Go into the top TeX build directory and make the paper.
+# Go into the top TeX build directory and make the paper.
p=$$(pwd)
export TEXINPUTS=$$p:
cd $(texbdir)
- # See above for a warning and brief discussion on the the
- # pdflatex option `-shell-escape'.
+# See above for a warning and brief discussion on the the pdflatex
+# option '-shell-escape'.
latex -shell-escape -halt-on-error $*.tex
- # Convert the DVI to PostScript, and the PostScript to PDF. The
- # `-dNOSAFER' option to GhostScript allows transparencies in the
- # conversion from PostScript to PDF, see
- # https://www.ghostscript.com/doc/current/Language.htm#Transparency
+# Convert the DVI to PostScript, and the PostScript to PDF. The
+# '-dNOSAFER' option to GhostScript allows transparencies in the
+# conversion from PostScript to PDF, see
+# https://www.ghostscript.com/doc/current/Language.htm#Transparency
dvips $*.dvi
ps2pdf $*.ps
- # Come back to the top project directory and copy the built PDF
- # file here.
+# Come back to the top project directory and copy the built PDF
+# file here.
cd "$$p"
cp $(texbdir)/$*.pdf $@
fi
diff --git a/reproduce/analysis/make/prepare.mk b/reproduce/analysis/make/prepare.mk
index d0b61d9..ecb6842 100644
--- a/reproduce/analysis/make/prepare.mk
+++ b/reproduce/analysis/make/prepare.mk
@@ -1,6 +1,6 @@
-# Basic preparations, called by `./project prepare'.
+# Basic preparations, called by './project prepare'.
#
-# Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This Makefile is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -21,30 +21,29 @@
# Final-target
#
-# Without this file, `./project make' won't work.
+# Without this file, './project make' won't work.
prepare-dep = $(subst prepare, ,$(makesrc))
$(bsdir)/preparation-done.mk: \
$(foreach s, $(prepare-dep), $(mtexdir)/$(s).tex)
- # If you need to add preparations define targets above to do the
- # preparations, then set the value below to `yes'. Recall that just
- # like `./project make', before loading this file, `./project
- # prepare' loads loads `initialize.mk' and `download.mk', so you
- # can safely assume everything that is defined there in the
- # preparation phase also.
- #
- # TIP: the targets can actually be automatically generated
- # Makefiles that are used by `./project make'. They can include
- # variables, or automatically generated rules. Just make sure that
- # those Makefiles aren't written in the source directory. Even
- # though they are Makefiles, they are automatically built, so they
- # don't belong in the source. `$(prepdir)' has been defined for
- # this purpose (see `initialize.mk'), we recommend that you put all
- # automatically generated Makefiles under this directory. In the
- # `make' phase, `initialize.mk' will automatically load all the
- # `*.mk' files. If you need to load your generated
- # configuration-makefiles before automatically generated Makefiles
- # containing rules, you can use some naming convension like
- # `conf-*.mk' and `rule-*.mk', or you can put them in
- # subdirectories.
+# If you need to add preparations define targets above to do the
+# preparations, then set the value below to 'yes'. Recall that just
+# like './project make', before loading this file, './project
+# prepare' loads loads 'initialize.mk' and 'download.mk', so you can
+# safely assume everything that is defined there in the preparation
+# phase also.
+#
+# TIP: the targets can actually be automatically generated Makefiles
+# that are used by './project make'. They can include variables, or
+# automatically generated rules. Just make sure that those Makefiles
+# aren't written in the source directory. Even though they are
+# Makefiles, they are automatically built, so they don't belong in
+# the source. '$(prepdir)' has been defined for this purpose (see
+# 'initialize.mk'), we recommend that you put all automatically
+# generated Makefiles under this directory. In the 'make' phase,
+# 'initialize.mk' will automatically load all the '*.mk' files. If
+# you need to load your generated configuration-makefiles before
+# automatically generated Makefiles containing rules, you can use
+# some naming convension like 'conf-*.mk' and 'rule-*.mk', or you can
+# put them in subdirectories.
@echo "include-prepare-results = no" > $@
diff --git a/reproduce/analysis/make/top-make.mk b/reproduce/analysis/make/top-make.mk
index 27c1b5b..7755174 100644
--- a/reproduce/analysis/make/top-make.mk
+++ b/reproduce/analysis/make/top-make.mk
@@ -1,6 +1,6 @@
# Top-level Makefile (first to be loaded).
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This Makefile is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -20,7 +20,7 @@
# Load the local configuration (created after running
-# `./project configure').
+# './project configure').
include reproduce/software/config/LOCAL.conf
@@ -30,7 +30,7 @@ include reproduce/software/config/LOCAL.conf
# Ultimate target of this project
# -------------------------------
#
-# The final paper/report (`paper.pdf') is the main target of this
+# The final paper/report ('paper.pdf') is the main target of this
# project. As defined in the Make paradigm, it must be the first target
# that Make encounters (immediately after loading the local configuration
# settings, necessary for a group building scenario mentioned next).
@@ -50,8 +50,8 @@ include reproduce/software/config/LOCAL.conf
#
# Controlling this requires two variables that are available at this stage:
#
-# - `GROUP-NAME': from `LOCAL.conf' (which was built by `./project configure').
-# - `maneage_group_name': value to the `--group' option.
+# - 'GROUP-NAME': from 'LOCAL.conf' (which was built by './project configure').
+# - 'maneage_group_name': value to the '--group' option.
#
# The analysis is only done when both have the same group name. Note that
# when the project isn't being built for a group, both variables will be an
@@ -63,7 +63,7 @@ include reproduce/software/config/LOCAL.conf
#
# If you are just interested in the processing and don't want to build the
# PDF, you can skip the creation of the final PDF by giving a value of
-# `yes' to `pdf-build-final' in `reproduce/analysis/config/pdf-build.conf'.
+# 'yes' to 'pdf-build-final' in 'reproduce/analysis/config/pdf-build.conf'.
ifeq ($(separatesupplement),0)
top-pdfs = paper.pdf
else
@@ -92,13 +92,13 @@ endif
# To keep things clean, managable and readable, each set of operations
# is (and must be) classified (modularized) by context into separate
# Makefiles: the more the better. These modular steps are then
-# included in this top-level Makefile through the `include' command of
+# included in this top-level Makefile through the 'include' command of
# the next step. Each Makefile should also produce a LaTeX macro file
# with the same fixed name (used to keep all the parameters and
# relevant outputs of the steps in it for the final paper).
#
# In the rare case that no special LaTeX macros are necessary in a
-# workhorse Makefile, you can simply make an empty file with `touch
+# workhorse Makefile, you can simply make an empty file with 'touch
# $@'. This will not add any lines to the final combined LaTeX macros
# file, but will create the file that is a prerequisite to the final
# paper generation.
@@ -112,7 +112,7 @@ endif
# IMPORTANT NOTE: order matters in the inclusion of the processing
# Makefiles. As the project grows, some Makefiles will define
# variables/dependencies that later Makefiles need. Therefore we are using
-# a `foreach' loop in the next step to explicitly request loading them in
+# a 'foreach' loop in the next step to explicitly request loading them in
# the same order that they are defined here (we aren't just using a
# wild-card like the configuration Makefiles).
makesrc = initialize \
@@ -136,7 +136,7 @@ makesrc = initialize \
# contain rules to actually do this project's processing.
#
# But before that, we need to identify the phase for the Makefiles that are
-# run both in `./project prepare' and `./project make'.
+# run both in './project prepare' and './project make'.
project-phase = make
include reproduce/analysis/config/*.conf
include $(foreach s,$(makesrc), reproduce/analysis/make/$(s).mk)
diff --git a/reproduce/analysis/make/top-prepare.mk b/reproduce/analysis/make/top-prepare.mk
index fb5700e..3950bf1 100644
--- a/reproduce/analysis/make/top-prepare.mk
+++ b/reproduce/analysis/make/top-prepare.mk
@@ -1,10 +1,10 @@
# Do basic preparations to optimize the project's running.
#
-# NOTE: This file is very similar to `top-make.mk', so the large comments
+# NOTE: This file is very similar to 'top-make.mk', so the large comments
# are not included here. Please see that file for thorough comments on each
# step.
#
-# Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This Makefile is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -24,7 +24,7 @@
# Load the local configuration (created after running
-# `./project configure').
+# './project configure').
include reproduce/software/config/LOCAL.conf
@@ -34,7 +34,7 @@ include reproduce/software/config/LOCAL.conf
# Ultimate target of this project
# -------------------------------
#
-# See `top-make.mk' for complete explanation.
+# See 'top-make.mk' for complete explanation.
ifeq (x$(maneage_group_name),x$(GROUP-NAME))
all: $(BDIR)/software/preparation-done.mk
@echo "Project preparation is complete.";
@@ -57,12 +57,12 @@ endif
# Define source Makefiles
# -----------------------
#
-# See `top-make.mk' for complete explanation.
+# See 'top-make.mk' for complete explanation.
#
-# To ensure that `prepare' and `make' have the same basic definitions and
-# environment and that all `downloads' are managed in one place, both
-# `./project prepare' and `./project make' will first read `initialize.mk'
-# and `downloads.mk'.
+# To ensure that 'prepare' and 'make' have the same basic definitions and
+# environment and that all 'downloads' are managed in one place, both
+# './project prepare' and './project make' will first read 'initialize.mk'
+# and 'downloads.mk'.
makesrc = initialize \
download \
prepare
@@ -74,7 +74,7 @@ makesrc = initialize \
# Include all analysis Makefiles
# ------------------------------
#
-# See `top-make.mk' for complete explanation.
+# See 'top-make.mk' for complete explanation.
project-phase = prepare
include reproduce/analysis/config/*.conf
include $(foreach s,$(makesrc), reproduce/analysis/make/$(s).mk)
diff --git a/reproduce/analysis/make/verify.mk b/reproduce/analysis/make/verify.mk
index 6503172..ac91089 100644
--- a/reproduce/analysis/make/verify.mk
+++ b/reproduce/analysis/make/verify.mk
@@ -1,6 +1,6 @@
# Verify the project outputs before building the paper.
#
-# Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2020-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This Makefile is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -94,7 +94,7 @@ verify-txt-no-comments-no-space = \
# (generated in various stages of the analysis.
#
# Since each analysis step's data files are already prerequisites of their
-# respective TeX macro file, its enough for `verify.tex' to depend on the
+# respective TeX macro file, its enough for 'verify.tex' to depend on the
# final TeX macro.
#
# USEFUL TIP: during the early phases of your research (when you are
@@ -103,41 +103,41 @@ verify-txt-no-comments-no-space = \
#
# Here is a description of the variables defined here.
#
-# verify-dep: The major step dependencies of `verify.tex', this includes
+# verify-dep: The major step dependencies of 'verify.tex', this includes
# all the steps that must be finished before it.
#
# verify-changes: The files whose contents are important. This is
-# essentially the same as `verify-dep', but it has removed
-# the `initialize' step (which is information about the
+# essentially the same as 'verify-dep', but it has removed
+# the 'initialize' step (which is information about the
# pipeline, not the results).
verify-dep = $(subst verify,,$(subst paper,,$(makesrc)))
verify-check = $(subst initialize,,$(verify-dep))
$(mtexdir)/verify.tex: $(foreach s, $(verify-dep), $(mtexdir)/$(s).tex)
- # Make sure that verification is actually requested, the '@' at the
- # start of the recipe is added so Make doesn't print the commands
- # on the standard output because this recipe is run on every call
- # to the project and can be annoying (get mixed in the middle of
- # the analysis outputs or the LaTeX outputs).
+# Make sure that verification is actually requested, the '@' at the
+# start of the recipe is added so Make doesn't print the commands on
+# the standard output because this recipe is run on every call to the
+# project and can be annoying (get mixed in the middle of the
+# analysis outputs or the LaTeX outputs).
@if [ x"$(verify-outputs)" = xyes ]; then
- # Make sure the temporary output doesn't exist (because we want
- # to append to it). We are making a temporary output target so if
- # there is a crash in the middle, Make will not continue. If we
- # write in the final target progressively, the file will exist,
- # and its date will be more recent than all prerequisites, so
- # next time the project is run, Make will continue and ignore the
- # rest of the checks.
+# Make sure the temporary output doesn't exist (because we want to
+# append to it). We are making a temporary output target so if
+# there is a crash in the middle, Make will not continue. If we
+# write in the final target progressively, the file will exist, and
+# its date will be more recent than all prerequisites, so next time
+# the project is run, Make will continue and ignore the rest of the
+# checks.
rm -f $@.tmp
- # Verify the figure datasets.
+# Verify the figure datasets.
$(call verify-txt-no-comments-leading-space, \
$(a2mk20f1c), 76fc5b13495c4d8e8e6f8d440304cf69)
- # Verify TeX macros (the values that go into the PDF text).
+# Verify TeX macros (the values that go into the PDF text).
for m in $(verify-check); do
file=$(mtexdir)/$$m.tex
- if [ $$m == download ]; then s=64da83ee3bfaa236849927cdc001f5d3
+ if [ $$m == download ]; then s=5d0ab54ca95366d1aab12196966dd3b6
elif [ $$m == format ]; then s=e04d95a539b5540c940bf48994d8d45f
elif [ $$m == demo-plot ]; then s=48bffe6cf8db790c63a33302d20db77f
else echo; echo "'$$m' not recognized."; exit 1
@@ -145,7 +145,7 @@ $(mtexdir)/verify.tex: $(foreach s, $(verify-dep), $(mtexdir)/$(s).tex)
$(call verify-txt-no-comments-no-space, $$file, $$s, $@.tmp)
done
- # Move temporary file to final target.
+# Move temporary file to final target.
mv $@.tmp $@
else
echo "% Verification was DISABLED!" > $@
diff --git a/reproduce/software/bibtex/astrometrynet.tex b/reproduce/software/bibtex/astrometrynet.tex
index 15d4829..26d211b 100644
--- a/reproduce/software/bibtex/astrometrynet.tex
+++ b/reproduce/software/bibtex/astrometrynet.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/astropy.tex b/reproduce/software/bibtex/astropy.tex
index c25803b..2041fc3 100644
--- a/reproduce/software/bibtex/astropy.tex
+++ b/reproduce/software/bibtex/astropy.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/corner.tex b/reproduce/software/bibtex/corner.tex
index b2e0e25..f6b4f22 100644
--- a/reproduce/software/bibtex/corner.tex
+++ b/reproduce/software/bibtex/corner.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/cython.tex b/reproduce/software/bibtex/cython.tex
index 88a5c4e..30a0713 100644
--- a/reproduce/software/bibtex/cython.tex
+++ b/reproduce/software/bibtex/cython.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/fftw.tex b/reproduce/software/bibtex/fftw.tex
index 7f525c9..1186efa 100644
--- a/reproduce/software/bibtex/fftw.tex
+++ b/reproduce/software/bibtex/fftw.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/galsim.tex b/reproduce/software/bibtex/galsim.tex
index 3646639..8fceda8 100644
--- a/reproduce/software/bibtex/galsim.tex
+++ b/reproduce/software/bibtex/galsim.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/gnuastro.tex b/reproduce/software/bibtex/gnuastro.tex
index 1470a00..018a35b 100644
--- a/reproduce/software/bibtex/gnuastro.tex
+++ b/reproduce/software/bibtex/gnuastro.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/healpix.tex b/reproduce/software/bibtex/healpix.tex
index 08bef90..77471c5 100644
--- a/reproduce/software/bibtex/healpix.tex
+++ b/reproduce/software/bibtex/healpix.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/imfit.tex b/reproduce/software/bibtex/imfit.tex
index 3822fd4..167586d 100644
--- a/reproduce/software/bibtex/imfit.tex
+++ b/reproduce/software/bibtex/imfit.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/matplotlib.tex b/reproduce/software/bibtex/matplotlib.tex
index c35f682..aab2c04 100644
--- a/reproduce/software/bibtex/matplotlib.tex
+++ b/reproduce/software/bibtex/matplotlib.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/missfits.tex b/reproduce/software/bibtex/missfits.tex
index 9fb0f54..470cf8a 100644
--- a/reproduce/software/bibtex/missfits.tex
+++ b/reproduce/software/bibtex/missfits.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2020-2021 Surena Fatemi <surena.fatemi@ipm.ir>
+%% Copyright (C) 2020-2022 Surena Fatemi <surena.fatemi@ipm.ir>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/mpi4py.tex b/reproduce/software/bibtex/mpi4py.tex
index 9a99803..598bbda 100644
--- a/reproduce/software/bibtex/mpi4py.tex
+++ b/reproduce/software/bibtex/mpi4py.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/numpy.tex b/reproduce/software/bibtex/numpy.tex
index 07101cf..0dee489 100644
--- a/reproduce/software/bibtex/numpy.tex
+++ b/reproduce/software/bibtex/numpy.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/r-cran.tex b/reproduce/software/bibtex/r-cran.tex
new file mode 100644
index 0000000..cf74a8e
--- /dev/null
+++ b/reproduce/software/bibtex/r-cran.tex
@@ -0,0 +1,19 @@
+%% Copyright (C) 2022 Boud Roukema <boud@cosmo.torun.pl>
+%% Copyright (C) 2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%%
+%% Copying and distribution of this file, with or without modification,
+%% are permitted in any medium without royalty provided the copyright
+%% notice and this notice are preserved. This file is offered as-is,
+%% without any warranty.
+
+@ARTICLE{RIhakaGentleman1996,
+ author = {{Ihaka}, Ross and {Gentleman}, Robert},
+ title = "{R: A language for data analysis and graphics}",
+ journal = {J.Comput.Graph.Stat.},
+ year = {1996},
+ volume = {5},
+ number = {3},
+ pages = {299-314},
+ DOI = {10.1080/10618600.1996.10474713},
+ ISSN = {10618600},
+}
diff --git a/reproduce/software/bibtex/scamp.tex b/reproduce/software/bibtex/scamp.tex
index ab68dd6..62e910c 100644
--- a/reproduce/software/bibtex/scamp.tex
+++ b/reproduce/software/bibtex/scamp.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/scipy.tex b/reproduce/software/bibtex/scipy.tex
index 71b0caa..4b1292d 100644
--- a/reproduce/software/bibtex/scipy.tex
+++ b/reproduce/software/bibtex/scipy.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/sextractor.tex b/reproduce/software/bibtex/sextractor.tex
index 0c91a3d..8905be8 100644
--- a/reproduce/software/bibtex/sextractor.tex
+++ b/reproduce/software/bibtex/sextractor.tex
@@ -1,5 +1,5 @@
-%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
-%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/sip_tpv.tex b/reproduce/software/bibtex/sip_tpv.tex
index 02dfec0..2a5b68b 100644
--- a/reproduce/software/bibtex/sip_tpv.tex
+++ b/reproduce/software/bibtex/sip_tpv.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/swarp.tex b/reproduce/software/bibtex/swarp.tex
index 7636f1c..f645bb3 100644
--- a/reproduce/software/bibtex/swarp.tex
+++ b/reproduce/software/bibtex/swarp.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/sympy.tex b/reproduce/software/bibtex/sympy.tex
index 1d07846..3ce064a 100644
--- a/reproduce/software/bibtex/sympy.tex
+++ b/reproduce/software/bibtex/sympy.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/bibtex/tides.tex b/reproduce/software/bibtex/tides.tex
index f3b5490..b97762a 100644
--- a/reproduce/software/bibtex/tides.tex
+++ b/reproduce/software/bibtex/tides.tex
@@ -1,4 +1,4 @@
-%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
%%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/reproduce/software/config/LOCAL.conf.in b/reproduce/software/config/LOCAL.conf.in
index 132c3f7..a7434ea 100644
--- a/reproduce/software/config/LOCAL.conf.in
+++ b/reproduce/software/config/LOCAL.conf.in
@@ -1,9 +1,9 @@
# Local project configuration.
#
-# This is just a template for the `./project configure' script to fill
+# This is just a template for the './project configure' script to fill
# in. Please don't make any change to this file.
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# Copying and distribution of this file, with or without modification, are
# permitted in any medium without royalty provided the copyright notice and
diff --git a/reproduce/software/config/TARGETS.conf b/reproduce/software/config/TARGETS.conf
index 95f12f2..94c7e5f 100644
--- a/reproduce/software/config/TARGETS.conf
+++ b/reproduce/software/config/TARGETS.conf
@@ -1,7 +1,7 @@
# Necessary high-level software to build in this project.
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
-# Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
#
# Copying and distribution of this file, with or without modification, are
# permitted in any medium without royalty provided the copyright notice and
@@ -21,25 +21,29 @@
# reproduce/software/config/versions.conf
#
# Please add any software that you need for your project in the respective
-# part below (using its name in `versions.conf', but without the `-version'
+# part below (using its name in 'versions.conf', but without the '-version'
# part). Just note that if a program/library is a dependency of another,
# you don't need to include it here (it will be installed before the
# higher-level software anyway).
#
# Note that many low-level software will be installed before those that are
# installed in this step. They are clearly distinguished from the
-# higher-level (optional) software in `versions.conf'. These low-level
+# higher-level (optional) software in 'versions.conf'. These low-level
# software MUST NOT be added here.
-# Programs and libraries.
+# Programs and libraries (for Python or R modules, use respective variable).
#
# Ghostscript: to build PDF paper (in particular the `ps2pdf' command).
# XLSXI/O: to read and write XLSX files.
-top-level-programs = ghostscript xlsxio
+top-level-programs = ghostscript xlsxio
# Python libraries/modules.
-top-level-python =
+top-level-python =
+
+# R libraries/modules
+# [For developers 2022-01-02: 'r-cran-cowplot r-cran-gridExtra' for all]
+top-level-r-cran =
diff --git a/reproduce/software/config/checksums.conf b/reproduce/software/config/checksums.conf
index 0e21912..7450e6d 100644
--- a/reproduce/software/config/checksums.conf
+++ b/reproduce/software/config/checksums.conf
@@ -1,7 +1,8 @@
# sha512 checksums of all the necessary software tarballs.
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
-# Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
+# Copyright (C) 2022 Pedram Ashofteh Ardakani <pedramardakani@pm.me>
#
# Copying and distribution of this file, with or without modification, are
# permitted in any medium without royalty provided the copyright notice and
@@ -14,54 +15,53 @@
# Basic/low-level programs and libraires (installed in any case)
# --------------------------------------------------------------
-bash-checksum = e210cd63ce1241636fbb14d1a105c83e9481a0312026f746f76f7115b777707170ddfe1840fb4d3a4093613048e2d1eedf926e843ad15ffc8d66f08525e8b04b
-binutils-checksum = 5ad795fab0803be83b53aa955fd5414c8408b4cf2a66eba2f8688298312934b4b1b0cbe9cf887d86de77f88adf1333d85fc9f6ab4a530e85a09b9b2dbf6aaf3f
-bzip2-checksum = 00ace5438cfa0c577e5f578d8a808613187eff5217c35164ffe044fbafdfec9e98f4192c02a7d67e01e5a5ccced630583ad1003c37697219b0f147343a3fdd12
-cert-checksum = a81dfa59c70788126a395c576e54cb8f61c1ea34da69b5cd42e2d83ee6426c2a26941360c7302793774ea98ca16846deb6e683144cc7fb6da6ef87b70447e4c8
-coreutils-checksum = 1c8f3584efd61b4b02e7ac5db8e103b63cfb2063432caaf1e64cb2dcc56d8c657d1133bbf10bd41468d6a1f31142e6caa81d16ae68fa3e6e84075c253613a145
-curl-checksum = 614f8d67e6ee942cb5e57f2da1a3faaf84ebff549ffe383aaf3751a4de3c8874ff482076afa27a52e910a137dec9b0f6e98265dc7d4ad7c95845b63f39236fd4
-dash-checksum = 9d55090115ac04f505d70e6790179331178950f96fe713b33fd698fa8bfa60d4eff1b68cb7b8a2f099d29c587d36034a17dccd6658ba1623ff0a625ac1fb9620
-diffutils-checksum = 7b12cf8aea1b9844773748f72272d9c6a38adae9c3c3a8c62048f91fb56c60b76035fa5f51665dceaf2cfbf1d1f4a3efdcc24bf47a5a16ff4350543314b12c9c
-file-checksum = 9cf1a7b769c56eb6f5b25c66ce85fa1300128396e445b2e53dbbd8951e5da973a7a07c4ef9f7ebd1fe945d47bdaf2cd9ef09bd2be6c217a0bcb907d9449835e6
-findutils-checksum = 650a24507f8f4ebff83ad28dd27daa4785b4038dcaadc4fe00823b976e848527074cce3f9ec34065b7f037436d2aa6e9ec099bc05d7472c29864ac2c69de7f2e
-flock-checksum = ddb997174c0653bc3d29410a5a16b6290e737aa40fbf4b746e2d1db1e88e5acb08ec11a25c27c8a5a5fbf5a00fcac17abeaa245e7df27bd975ae86364d400b86
+bash-checksum = 409843172032fbe7ef50004ddc30934e12d864e30935fa501dd8a2792c0c12c9c00e1108c05a95ba84b5ab4dd146d92e793b684df1f49ba51eba40f39d47f14d
+binutils-checksum = f3c10d8aeb50872278ece6ad31f7369bf12b2d09618812322b35e6750024b45f77fc32d498a9c579df3c152ce3da382f480281743eb5b53639aa287b7166aba3
+bzip2-checksum = 929cb1b2d0db8a505e2bafe3ce2c893c8f132dd617c0110e86e19f116b570a85de3c8a635483b530456fc8f8b6698c8190d21313e47607f442807b10489ac86e
+cert-checksum = 25b689ebac01ebde136bcef1ef4aa18389fcc618375755001f153b37060cfb327fd6d6afac25d4ddfcf78fd6678fa00ce49ba688e725e4a4c657c37cba4ec3ca
+coreutils-checksum = 887ca0ea9e6918c9e959033e625537335902a118af8384a050bbdfadf1c7fa30a92521dd4d9714055d869c1619c20f0f6bcecb89a99f22278a74668b393a5cf0
+curl-checksum = af977f4ea4b096d3de8d116cfe4f8abbfe5dad487745b6fbc1b13c2669d2bcfdcafa44c9c4cf89c4fb413b3c0986d4e25ae107d461e6295bf469ae3cbc31bb67
+dash-checksum = d8533e3fb29ab00369ee0e4f700a2e5ac2fc11a7c51d83fb93dbbe25d1e06b73718f6f583e368bd232edde41b03b53493657e81dc2e0ce439a9da8e1ed627923
+diffutils-checksum = 597cd0a14087722b78469e2238fcee25e78108f53ca43fbff1194d87625b4c7498a205f68c7bb44fcd437b8d642bba263ac40170780aede006076b11956fd06e
+file-checksum = f1cc951f8972073aa8d29842858d653ecf890e18840d29b848e3b7ac717b2cebe41091415f38cfda1e0d8d066d9e8200ca8a94b5a77f96b8a9b952a9a567284c
+findutils-checksum = a330ace9e7af668b5294c231f9fb45f23c1345d770d67d047d280cd1c6c0fe01f248ccd9877c170bebe8471618261e09a3d64909bb094c73584928005027d58e
+flock-checksum = f711815035e21b46572bf80e730a55822e5abf4cb29749e476ee6cf4d5027e9a7deeacf5f6b8c37f18f17a0cc7a6d98fb0be3936e97b122707f1cb2306d1e1d9
gawk-checksum = 682fadd3630d51d13d19443a9a93d4cba1bd6802dd078f1366f17f39d3fa5800f47037b103d742be9254475fdc251d90626f95a2e04b5ace117cfaecebca2281
-gcc-checksum = 42ae38928bd2e8183af445da34220964eb690b675b1892bbeb7cd5bb62be499011ec9a93397dba5e2fb681afadfc6f2767d03b9035b44ba9be807187ae6dc65e
+gcc-checksum = 7f9605b9527799f92b3251968f63e53bd0b74880952f0cbc58ade9b4eee60bac4990ad408c1a6067f7aa36225709061106f0d93bc581ec97711fc5ff6ae15127
gettext-checksum = f3083af79341bfdc849118333c1598812c12bc225d998181694648187088050160deb4777c252f72a7158e914c2967416489bc6167ef8505664497f2fb94ecbf
-git-checksum = a6159c0a15e3c5f9603157d4010664a6d74e7d65b9fe97a03b36fac12607248ed57980d96565841e88eae343001c167222232737d3af812608c8db011941df1a
-gmp-checksum = 9975e8766e62a1d48c0b6d7bbdd2fccb5b22243819102ca6c8d91f0edd2d3a1cef21c526d647c2159bb29dd2a7dcbd0d621391b2e4b48662cf63a8e6749561cd
-grep-checksum = 0f1506bd19971fbdcb47a111277ca63e8ad045456f096980852fd0a61c860f29f4b369bbaaa5cbce4b0a81718e3e3274d9a078b491f2109baa9a02ce600ee206
-gzip-checksum = 753fbcf5eb104bfc8a8eb81b69b8701f757b5158e6333b17438574169a4662642a122e1fdbd920a536edbcb77253d65fa571e4f507dbe72a70fee5eb161d6324
-isl-checksum = 85d0b40f4dbf14cb99d17aa07048cdcab2dc3eb527d2fbb1e84c41b2de5f351025370e57448b63b2b8a8cf8a0843a089c3263f9baee1542d5c2e1cb37ed39d94
-less-checksum = 79384ff3faa33aeb86da6027c8b264df78f9f8c799af43dc5340e2ca3d86053c9be168140bfa05734a4217e65ef9939652b004d6a536f64b2e0ef3b74b07f535
-libbsd-checksum = b75529785b16c93d31401187f8a58258fbebe565dac071c8311775c913af989f62cd29d5ce2651af3ea6221cffd31cf04826577d3e546ab9ca14340f297777b9
-libiconv-checksum = 365dac0b34b4255a0066e8033a8b3db4bdb94b9b57a9dca17ebf2d779139fe935caf51a465d17fd8ae229ec4b926f3f7025264f37243432075e5583925bb77b7
-libtool-checksum = a6eef35f3cbccf2c9e2667f44a476ebc80ab888725eb768e91a3a6c33b8c931afc46eb23efaee76c8696d3e4eed74ab1c71157bcb924f38ee912c8a90a6521a4
-libunistring-checksum = 01dcab6e05ea4c33572bf96cc0558bcffbfc0e62fc86410cef06c1597a0073d5750525fe2dee4fdb39c9bd704557fcbab864f9645958108a2e07950bc539fe54
-libxml2-checksum = cb7784ba4e72e942614e12e4f83f4ceb275f3d738b30e3b5c1f25edf8e9fa6789e854685974eed95b362049dbf6c8e7357e0327d64c681ed390534ac154e6810
-lzip-checksum = e2e229899002072322a5bf7fb7ef37ff7cc2d0ded7d6525679ac29cec5c1534de89f76bc01e15c3d76584099957d596b9e5b32d3f9cbfc6bc8d6370fc67d7d96
-m4-checksum = a92cad4441b3fd7c033837389ca3499494523d364a5fda043d92c517051510f1758b3b837f0477f42d2258a179ab79a4993e5d1694ef2673db6d96d1faff84fe
+git-checksum = fe41fca911b3f276ac0832c059054c3f51fcc97862d23d47b5bc813a766ef31f431595553f62655360766427ae65debf09f6e2ded318bc8062de2410c359eb51
+gmp-checksum = 2bec8840bceaeac542ce1f7870d5b971b631b055b49751ba4f78f98229d7eb9cd97c0a668f6d7cf81a53bf0b3d5d70e682af523f8f0eee0d7390727ff2bbb271
+grep-checksum = e8c28da23fcef11825c54794b3eebffd0eaf84b7ebf5e187fd06b5d4aeff2af1ac097f0166151b3f11f332dc72abf6097188d578f549496f4c7c96ccf479a61e
+gzip-checksum = 48db13116543ffff39ce03a058e5ca279d93581b8aead3b857c41b0aa3028aa787d5bb21ee18d002565ba3e016fed60d55062b3fd91c41a9877f9f7febc6ae27
+isl-checksum = 00821ccdde36540869b47b1f68837183dfc8eb0ae580a36ba92c25308618a25680adc8de7b9df4d93851b300d7e457c931e7ff3a40fa90e336c74bcab3504e41
+less-checksum = 31f733ecf4170b4f5dcf467b902d0753833f852739b03bf611137aea3672c03e00e9418fd5e289e82624c2dd42b0de06cb65a7a637dae78e7647e5c6ecf9fb57
+libiconv-checksum = 396be5305f899619596b72ccc8a192c5fcc8e12163fd23376833c957ee44dbf3d39461360bf7e32a049f9f12a851da7836bfc8a62842114fc10717ac92762987
+libtool-checksum = 379a16baf2123d3cdd28cf8e35e4bb1e0326ae7f431c42ab2a3bd42ccac99a622e5ff0a68703b81614c147bc02178612a5cecb105b8943937a05c0b3ec91fea0
+libunistring-checksum = f1e92717fc043c2d7c454e44700559b0bd75457fdf3da5e803d9d58d8ebdff261bc73f380f6350bb04ce04071687c0317d09b4e8615003187948be1cd4d0bab3
+libxml2-checksum = 0fdc16aa7536adfe1deaadd19cdc20101dd3b081bd0a2e59d8ecc2965b2ef4c30d2e2447ab5b0cdb861c57ddb89ace1bd4ce120d9436b751b37724a90981ba8c
+lzip-checksum = 8d063a5f0609caa4d568f7bad4c67fcdee70a16c37e67b9d3cccbae53f09c14fa8590816aeb37a3fe0bc71a642c55e9fb4b371c0f68585be9564af22584d5c99
+m4-checksum = 7f8845f99e64d6a45859b9d80b03352a5526b3de0311ca4d6dd6850e504d26dfc90cd21d1640b10382f786213f8fdf20183bff424b3c41ea11432315993ab829
make-checksum = ddf0fdcb9ee1b182ef294c5da70c1275288c99bef60e63a25c0abed2ddd44aba1770be4aab1db8cac81e5f624576f2127c5d825a1824e1c7a49df4f16445526b
-metastore-checksum = b2a5fdde9de5ddc1e6c368d5da1b2e97e4fdbaa138a7be281ccb40a81dd4a9bb1849d36b2d5d3f01205079bace60441f82a7002097ff3a7037340a35b0f1574a
-mpc-checksum = 72d657958b07c7812dc9c7cbae093118ce0e454c68a585bfb0e2fa559f1bf7c5f49b93906f580ab3f1073e5b595d23c6494d4d76b765d16dde857a18dd239628
-mpfr-checksum = d583555d08863bf36c89b289ae26bae353d9a31f08ee3894520992d2c26e5683c4c9c193d7ad139632f71c0a476d85ea76182702a98bf08dde7b6f65a54f8b88
-nano-checksum = d101e7f4802c079254e79340b433749dcd699fa9adec3f96e4218ec12f066a1f6b0954c27254bb6f019bc370ee2116817717870f4e2bc782c552442f2cc75195
-ncurses-checksum = 4c1333dcc30e858e8a9525d4b9aefb60000cfc727bc4a1062bace06ffc4639ad9f6e54f6bdda0e3a0e5ea14de995f96b52b3327d9ec633608792c99a1e8d840d
-openssl-checksum = 1523985ba90f38aa91aa6c2d57652f4e243cb2a095ce6336bf34b39b5a9b5b876804299a6825c758b65990e57948da532cca761aa12b10958c97478d04dd6d34
-patchelf-checksum = 39745662651cf0a9915685b2767a611ceab4286f8fa57eace342b3f44248431616e8563d4ac6709c97d8534229c73c05470239e462b7e74b36bf629a876dfbad
-perl-checksum = b00f3482f6961be043349a09445227e99472a8ae804919bfa0641de5bbd683249deb607f3b5c6c44ccfcf916408eac125132f9660191d574de0a9d1f17892bc1
-pkgconfig-checksum = 4861ec6428fead416f5cbbbb0bbad10b9152967e481d4b0ff2eb396a9f297f552984c9bb72f6864a37dcd8fca1d9ccceda3ef18d8f121938dbe4fdf2b870fe75
-readline-checksum = 41759d27bc3a258fefd7f4ff3277fa6ab9c21abb7b160e1a75aa8eba547bd90b288514e76264bd94fb0172da8a4faa54aab2c07b68a0356918ecf7f1969e866f
-sed-checksum = 7de25d9bc2981c63321c2223f3fbcab61d7b0df4fcf7d4394b72400b91993e1288d8bf53948ed5fffcf5a98c75265726a68ad4fb98e1d571bf768603a108c1c8
-tar-checksum = 4be18afeac54aec4af074cf2358cfade5aaebe2041c5075c5764a81114df4d002e90b28f4444bd1430783e7d6bed82abd0440ef5cb244695f2e56a9a41b42fbc
-texinfo-checksum = da55a0d0a760914386393c5e8e864540265d8550dc576f784781a6d72501918e8afce716ff343e5c2a0ce09cf921bfaf0a48ecb49f6182a7d10e920ae3ea17e7
-unzip-checksum = 0694e403ebc57b37218e00ec1a406cae5cc9c5b52b6798e0d4590840b6cdbf9ddc0d9471f67af783e960f8fa2e620394d51384257dca23d06bcd90224a80ce5d
-valgrind-checksum = 5695d1355226fb63b0c80809ed43bb077b6eed4d427792d9d7ed944c38b557a84fe3c783517b921e32f161228e10e4625bea0550faa4685872bb4454450cfa7f
-wget-checksum = 95fb064f0d79b0a3178a83322f58a85a3a036fb300ed759eb67a538f0bbacdd552f6cbeb60d63b4f0113e8467d923a5ce7ac5570b7a4ce1733b3dfd559bb33b2
-which-checksum = d2f04a5c5291f2d7d1226982da7cf999d36cfe24d3f7bda145508efcfb359511251d3c68b860c0ddcedd66b15a0587b648a35ab6d1f173707565305c506dfc61
-xz-checksum = 7443674247deda2935220fbc4dfc7665e5bb5a260be8ad858c8bd7d7b9f0f868f04ea45e62eb17c0a5e6a2de7c7500ad2d201e2d668c48ca29bd9eea5a73a3ce
-zip-checksum = c1c3d62bf1426476c0f9919b568013d6d7b03514912035f09ee283226d94c978791ad2af5310021e96c4c2bf320bfc9d0b8f4045c48e4667e034d98197e1a9b3
-zlib-checksum = 73fd3fff4adeccd4894084c15ddac89890cd10ef105dd5e1835e1e9bbb6a49ff229713bd197d203edfa17c2727700fce65a2a235f07568212d820dca88b528ae
+mpc-checksum = 85e044749c2a001a70af2c11fc4bac58709ca7df11bf6da3b03cca852ef336eb37c4fb1dafdffde2d13df35caa7c296a7cba19a4625b82dc3cccc5066ac95774
+mpfr-checksum = 23a30040511809b6a8ed33d5ef83f903e987f0e4b8e87c9bfa627fdf0ca6400330bda4fd727cf1d37f04f7804c77663ed983c6b5ed26f5d5bd772bf13f1f9eb5
+nano-checksum = e1ae28710ce05ef9c73357b9fb556fa6a7e670ae6fd786339677cb8a154446029dd9c4ead4eeab014691e941211a2d2c9faefc7695d1b97c2b8b445ba77ff067
+ncurses-checksum = 68b7c0388304791ce82ab86ea5c5725357f07bbae0c230af3e4722f75617778b6e28d1f0c1d65871379670e54fcc96ecd54f529a3462d207f3d930e6dad5a2dd
+openssl-checksum = f913eb1f654f702161e524a02ccdd028e26d418f26e334069d77ee0134f094809d5c86bfd65e8d05c9bf3d4f10d58a9f3c4452c8d6120741a90541114d82d5b6
+patchelf-checksum = f74409c00e7e50a88590267fa0c173f71239f1471985792f7bcdab750d202a9a25383c015987608bbcd6ace5b3642645d46dba63199ba54fabbcb51ae7fca8f6
+perl-checksum = 47c37f2698d7f46d2bd8a19abfd329ed58509a9eeb6c9ddb32e435434fcacd5b70419c01d0fa10d1c599cf43248ebc1538076e6bdedc56624fac887d480b8b8d
+pkgconfig-checksum = c37ab9336ac15a73bf5a23101f3fd8a04810f39b0679634e420e5d7bbd60a80cfc919122694cf83965beb071d2780681efc8d954ce99d77eb235a4dc7497e250
+podlators-checksum = c653205fd1e8e570750584cdb3a813a1250ae6f66d4e654b3521b80052867a3a6ba15ad3d7c8a22065f1febc36175257f9b5685570fcf0a87908c2dc9d42cb3d
+readline-checksum = 90d9571609b90f07573f831878f5131df17f1baabc064398c95915f13d44ea0907e41218be089377523751ca5b02a8486b5306ef6d0245432a0c224b7aa7fb94
+sed-checksum = 90d8fa16b2570baee061c128d6db688a3354796a6499014f12dc4c5966b34cbb8a04a47914e4ff8dc1815444ad0235565efef2a327b67c485e3324c2a1d07d78
+tar-checksum = 98d938d76a55dca1d1ba9a13f1aa2a3e8ae25ea422843e93daab98d373f4a72eebf052c88c87122ac16ed629863dc3fdb86dddf18d3ab5a2565b06f49b7d02bd
+texinfo-checksum = 6a706bd0f973b9bbe4fdbc3bfd475cc9699eb410d276cbc19a73255924313006e1b80a87803fa30f7245e3fcb610c829da31e38c91e791e19f11ca8b998914c4
+unzip-checksum = 5c1f3c417d5feed64b8c5dbc26b51dd84130b9ea43d77e810cc9e82cee6e965fa76e2636e5ba11a029eae3454a815a6081cc2828079fa3994c511d555b82f12c
+valgrind-checksum = a99e09e6d957ce435e64f4ce7b1a14e7e266282578171ce4e3bb3f405ce304e4df3b43d2fca59a6024c176f60c2ac82b3992afc810386e8de03c72f262b32702
+wget-checksum = cbce817981864d4dc424b330126b0d06231907004f1fcfc8d105ffc043bab59e93421738d36cf95013cc03b5b9f9813b1c242a3ef3926a907889ca78dfb336d4
+which-checksum = f886f5828359584b204f20a3b712d29c14879288e6255ab612784fce167bc9467c246bf48f9c52780069a95fe25761c3edda358428893967f4a1a1e5e874b2d5
+xz-checksum = a92bc2619f668a9e462945423035a090bc91c010d0788db8fb300851a3f93c04af2d433bc545195979d51312f59667c5814e46dd711b67fa8f89c0864873b81c
+zip-checksum = 433eda6a27074746a960952fa3b08028d5ba43fe976a1306dbace9209be5f89aa0554b17bba5815cd00984c0f64559c5fa0b754b620f6d646a2b145a8b599acc
+zlib-checksum = 97493906504451830655b300b4144deeeb9f41bdb9b3f3a3d5635c2b16bf90d543841dc70d028f1cee6b52ad1c604fdc18b18f559d1e4ed6523e6b3ce76fd8c5
@@ -71,150 +71,208 @@ zlib-checksum = 73fd3fff4adeccd4894084c15ddac89890cd10ef105dd5e1835e1e9bbb6a49ff
# ------------------------------------------
#
# These are programs and libraries that are optional, The ones in
-# `reproduce/software/config/TARGETS.conf' will be built as part of a
-# project. To specify a software there, just remove the `-checksum' suffix
+# 'reproduce/software/config/TARGETS.conf' will be built as part of a
+# project. To specify a software there, just remove the '-checksum' suffix
# from the list below.
-apachelog4cxx-checksum = aa59ce549c2c5cbeec031361dfce09cdfc3e62ee3bc9ecbc809507b7ec878c14409b98536b7d13c27690809c8e9d5ebafc3589c9fb5e4aecd5cc064943ae7d6b
-apr-checksum = daa140c83c7e2c45c3980d9dc81d34fa662bebd050653562c39572d0ddf2eaedb71767c518a59d77f59db9b32e00221ef48b9f72ec3666c4521dd511969f3706
-apr-util-checksum = 84da76e9b64da2de0996d4d6f3ab3f23db3724eb6352d218e0e8196bcc0b0a5d4fe791f41b4cc350ce3d04cce3bb3cf8bfb513d777d0cd030928368e6b55a536
-astrometrynet-checksum = 1ee3a3490fb687dc580e660a05ec21dc65972e6c1c97b9a5f648b9e4ac39bbb6b783b351d9b3fd8a4721ce01bb891166c18809f24144f6fc0d436955f0bc435e
-atlas-checksum = bf17306f09f2aa973cb776e2c9eacfb2409ad4d95d19802e1c4e0597d0a099fccdb5eaafe273c2682a41e41a3c6fabc8bbba4ce03180cffea40ede5df1d1f56e
-autoconf-checksum = c25e834251bfc2befe822614caf1c80d7e1314a83e7173304abc235fd15a958b8db9fbc801e8ad98328dfd6d9dbc425bfbbefec500fa268992ae7bbf4fa5bc35
-automake-checksum = 8bd5b1d698eeb1d969ca0a32184ad46014b3ec334e3b8a0caf0e462ea5e100fe4ccaa7dcc2224c5db6c87e215d594593db0bf4431649186afb821b0a711c1a82
-bison-checksum = 8763e7b08fbcdeea3200fcb8d65a2589a12b17988d088ace58d19ea09e44d32e73a34ab1d6c4a2106e7147149997aaeabaada22b07159a5a0055cf60a9db162b
-boost-checksum = 2844dddd3357e76909f2f3d008f686949f8b54aed29e1c650e18becd0b50701a273bb3754f1a4e56c0c056693b27c354c9ba54ddbec92599e192c2f6736fe3be
-cairo-checksum = 9eb27c4cf01c0b8b56f2e15e651f6d4e52c99d0005875546405b64f1132aed12fbf84727273f493d84056a13105e065009d89e94a8bfaf2be2649e232b82377f
-cdsclient-checksum = 2d7abf0079189b9dd19cb8919061445fd19ea9f7dfd54e8ceee26b743218cf62ab00eba0147abe82d9294223927f04b4cc3328620dfc9184a7049f8d515b29e4
-cfitsio-checksum = 08a13931726b0ee15bd4e2ad6dd4debb8268f3b0bc33adadec5c6a29295dd536bcccb3cc949721c6cebac6f43b6118e5e38332ac0ba8a07a43553416d8debae5
-cmake-checksum = e0591d5fb234f3e7b74d6d2aad44fbf3e19e69547bd428681ba6ad0461d4f3d2a154605808b4733531d2c66f0e91eb39a179ae0d89a37e92a3f20e9cae691468
-eigen-checksum = 34cf600914cce719d61511577ef9cd26fbdcb7a6fad1d0ab8396f98b887fac6a5577d3967e84a8f56225cc50de38f3b91f34f447d14312028383e32b34ea1972
-emacs-checksum = dfb26531d2c19cf9fb56505f03d799654b45e5f9528e777900e8280ed2c1d21e04c52f510528e31e015977c471ae63164cedee6174b7439ebcf479a21fc18064
-expat-checksum = 514ff2ef3c93af0b1715b7a08732db33c13a113c4c72422716a22ee26c09235deed71ec55510cee24c33bcd6b2347602bd71ce70a432d5583fb63765ff9e0e09
-fftw-checksum = ab918b742a7c7dcb56390a0a0014f517a6dff9a2e4b4591060deeb2c652bf3c6868aa74559a422a276b853289b4b701bdcbd3d4d8c08943acf29167a7be81a38
-flex-checksum = b4ef58d4a1d66b213e2f59df06959decf46d26b253cdc3f51cd26e2e2b505461ef23dafa974dd2005b1f0cafa5a83fe9258baf78004b2fdae6dfc299bc17bfd1
-freetype-checksum = cbb1b6bb7f99f6ecb473ce6027ec5f2868af939f793dd7b083b23e9823e18c4bcbac0b92483ebe70804ad7f4ef5bf4ea5c6b476e7f631a3e6a1b3e904a41e1a5
-gdb-checksum = 0ccd3a2a24963c848976848ba890f99a458569ff10da4d2bb6dd4782852662ab2df7fb9b3525dd4fd04f95070cea07e1bae6dbe2969e321cbb4004a033845d1c
-ghostscript-checksum = 32fb2a3d4e81ac9e281202aaed2f7811e80c939cbce3ffef7ec7cf78213e5da8a2f6c13d15f0c6c8fd24566579ba8b69364d4c66f4e4b7851f6df9209d0ff046
-ghostscript-fonts-gnu-checksum = 222cb81e6956d9724e746f5f4d5c7b47b04bfd75b889f437f946c29a5bf16b5ed57b7584cf7fb0d8561287ef9f2e1ac53e4d231b6eaf23cba2612b47e8a5f919
-ghostscript-fonts-std-checksum = d4fe6b097a3bdd9694dc704a2d986e22d649fbb7ca8302f872dff573525596a3f38246bd6dd166a5970a4fbca8cce6ebfb9a7030806b4662630afdf243438e1f
-gnuastro-checksum = e236814001c1740355cfb1490926c667d14c026c8e93249afb25c1fc934a8ef8c52178276b7ab5cdbeb819117f962a4c7f65df74f0fc35bf512388cbe3b26d39
-gperf-checksum = 855ebce5ff36753238a44f14c95be7afdc3990b085960345ca2caf1a2db884f7db74d406ce9eec2f4a52abb8a063d4ed000a36b317c9a353ef4e25e2cca9a3f4
-gsl-checksum = 0be8240715f0b86aba2c63d9f12da4dba4719d4e350e9308d279e0dd3b2f0519ea26fd2e38a17f3e8cf43aacbaa2455207a7ca0d6c305f3b8725e8ece2250a74
+apachelog4cxx-checksum = 2c4b907a47ae00c38ae9bcd5d215be5115bfb677646cac50538eb1c75824f594aa997dafc9a7828dc6bfd5f22f4c486f13e4210b9a1d8a9c98098d96384ef9c3
+apr-checksum = d0ddf8b10dd8d3a831b94e541d387414cc4d507ad48ff752d3274fb808afe6628d7951364aa3e0f71f373c78d0081411f6a0595d01c62bfb0a54bbc86002c82a
+apr-util-checksum = 6589948e3f9daf4ecd700bdbd2053fcb83005cec9d339278dda067996ba696e4a947116f066cee03214458aa15e0cb6f0df0103e7bc6f8f32327722eb7265f35
+astrometrynet-checksum = 066d8f6820d6bd9cdeef9334d150aef15a934c43944a96ac91b7184eff6c59876bcfed4ca221650b26b12b5e839228a8a7cb4ac3c72e599717d8cedfe6c77510
+atlas-checksum = 13634a63c686800bef8affcf4e8f5f89d08f485fc557eb1b9ba51a405926014e659e439268f655984bb7f59ffe497d594cd7a607103826f1046325d89796e1b0
+autoconf-checksum = daf3d17178d8c2d0f872a50217ba4d8a1481bad7e70ee5a58c5599717a01209cbd58e6149c72f6c4829b96dcf602bcfb828d47e32a9cfe3e5710c45f4f1736e7
+automake-checksum = 728284b89ab4607b5de5db6b8bf5ad8730cbd8ac094f583b93bb92c59dee35e021e823cddd412b0507b18a12cbbbfbec0fb0447c23d0afd167769641fe0755e5
+bison-checksum = 08c3e9bcf9fc01bc8b3a6c5e5f8ecdf628e07d6ce0874341e9df6d7b2925db7720a29b3be9a98d644f05a9e55e1130e5d182cb764e1481891414df93aeb2794f
+boost-checksum = cc7b78311538d6e8a6849d58da3b4d3062dc6b816f91a06b86609e6f1538d87ef9308c27af4db7a644e7c8e2b3de73c9f011a739e75262a0f36affdc313947f3
+cairo-checksum = 5de8528196b450d01da5cd4562dec0dd8c31028b77640b6c611172f7c5ad1dc9a4d30bd4e38259344c3ee42b979e29be702a3190aae5fbf47c82c79ee0c14a8e
+cdsclient-checksum = 24584eedeb84ab4666cbd2ed0b7264c92c63c9cf090595b75b01dd46f848419d6a7e5cff4db3946695f879d0ee2bfbeb527e88c2be85e769b577302d9b2ad2b7
+cfitsio-checksum = 51dc93aecadb08a402898e4c4bacf7a085a9799dda7678aa9e46d3096d51683a49bdb162f18c69b23cba47ffb78a35b11fc41a37f9b559af6da95589d3ad2e67
+cmake-checksum = 351ee5d50d65c6048508c3982842e94e74a8eba066adfd348bb4c79f41a26613c0306ea1b49e96d8272ddae3b2680d74caebcd8391a3bf72d702f30f92c48fab
+eigen-checksum = 39c1944e0daca50bb01e929edc98e2bfc234accb30ca019dcccfc7b02db5cc81035592be219ae93cc3ec16fe0255e7e4d1f29ca2ad3dfee7e63546c47cb2f807
+emacs-checksum = 91313dced8cd74f9e4a996eef42806aad00fb2576fc77b3694bb7b670624b6a1551ff49c2a3326dbeeba916e5732ca5975b5dc40b3b063f6e73a22839009abe9
+expat-checksum = 526f2ec03978f853ff76a911b5a168f58a7eac83b8405536afe2a6acf4611ebcb96acbc5df809fd18e4edeb7ca5059a3ce56dcbd12cb33eecf742c6361d83d19
+fftw-checksum = 28bd2f620399a415181027d30d7ea193aa487c7a277c9943d0051488908fd87e2731de0dfc3bebd22a6121d1deaa46037be8296a8a9cdb711f9fde4510c3d368
+flex-checksum = f15c5a1efc78d3de36572ece8ec1694f368ecf13a01ac3851be142bd63e16e9e5c62d10ce9ac586aabcbed533a47d0d97f7c9f467b729d5efa3316c7788230e2
+freetype-checksum = b3c7289cda3f89fa6e51143f2d1f1c82fa34808d0caa9262e7be5498c8f0fdb14e88d31ec9928e3add71f03208e40adb2fa94c2a4b4c6b7bfb81daf64b1578cb
+gdb-checksum = 4afd6660d2bbc4c48ce726062fb42217bdaa5974774407358b2624036a2ace48ed853746e4d6b83b9a409cd8fefc622b0727c79da8826e28e3d1444fa3a9d148
+ghostscript-checksum = 6dd1713bf40648ba6ed234a6c68582f87fa4075a191cf9af4f4ffa1854b94439c5ee4748e27ade3e8e0b7168fee1df3e9a9f3a32f70ac1ab9dd68e3878358ff2
+ghostscript-fonts-gnu-checksum = d8de17e5d920bd3803ecdc07aca224fa5b9a26db847ddae0ecc79488dc3e867ab3155a8565f20df7855e5228bc7f36a549b082ba0b8b56c9b948d6cfb655fdc0
+ghostscript-fonts-std-checksum = 40e01f88dd113c3120686c11da1c4cf4ee77c8db315b21cc936c3ccfb42cad4949e207298ec6d78d03327ff05122fcd75ac82d5e7cdc9698d4bbf1f72b19756a
+gnuastro-checksum = 97e3789c1462b895ac8f4141dd9fe6bff0f052fb584fc73a5a7b8192adb047947500363c512fb5a947981d68cee1aba3f3082753c0c2ec43a72e7d79693dc8dd
+gperf-checksum = 854cdb24337f14b0d4199cc97c4a1d2ec7f953ad8125a47a932d93f79bdc067e2de231859ac0e842f14b1a8d80c5606fdc2c295a370df5e85ea15023b478805b
+gsl-checksum = f9b92ef783d922f97fa40e934314648c627002e0b5c8067e6ca9b691cfacd6363efe68216db75ae1a093053c7aa6b56fda2769bd2e10b2c3a373339d7a0fc9c5
hdf5-checksum = f828ee9d63533effe1ad358230e5ce7b64c5016e49291d9533575f713cbfba496290fc0151fd9617898bdf36785984ddb38a9207f529d7702d4e23838fe050d8
-healpix-checksum = 29fe680d757bd94651bf029654257cb67286643aad510df4c2f0b06245174411376ec1beca64feebfac14a6fc0194525170635842916d79dcaddeddd9ac6f6c7
-help2man-checksum = 786a6bd4336c591cfeb0b4f2dc1429f6545e36514e7b238453c91368b8f531c46db2be025f02dc52e6dd8b971d6edbb4ff1a8e1b519f9253a3957ad7157790be
-imagemagick-checksum = ad4325df57769f9c4edf8ac71370cb9bb19e090e588d47eb0311e3f4895abd7a7edcbd2e7a495f21acd1daca97fa224bdf1fd978577588e45c11a7799c3d67f4
+healpix-checksum = c11949e92aae9919fd41de91cbab72beffe2519c1203ab153ba217c6b6c81f2c10ceb07b683da0d04fb53ef5c80a9b208bdbb5379f379b7da9d0611d2430cc8e
+help2man-checksum = 4e1656f323c889d2a8fd4603132fd14260de5dbf4d828122037ed561355e404458668f15adfd499c7ec600eb6fd653345cf4687377669f47d2a8ce391d5aed29
+icu-checksum = ab14b52a3fdf2dcde6b5160ab7218eac381b850d3c278324379741c49d71fa6040fbacca94c6937e6c9fc15843761121deff302ca6854da5ca1cd5b26a34e839
+imagemagick-checksum = 2132614540b7422c9772fcebe7e8e358994efcfb53d8e48fa52992313b09b191847e395bad305322c377a4697014353bb8c15adc4edfd712e038504fc7f17c5e
imfit-checksum = 15edd2349232c1c8e611b31d3a46b0700112d274515f54d0a0085bb4bfa6d3d5f8a15cd926516e043a29ce841accf3534ae58dbfb952d858dc9445199c957096
-lapack-checksum = 17786cb7306fccdc9b4a242de7f64fc261ebe6a10b6ec55f519deb4cb673cb137e8742aa5698fd2dc52f1cd56d3bd116af3f593a01dcf6770c4dcc86c50b2a7f
-libffi-checksum = 980ca30a8d76f963fca722432b1fe5af77d7a4e4d2eac5144fbc5374d4c596609a293440573f4294207e1bdd9fda80ad1e1cafb2ffb543df5a275bc3bd546483
-libidn-checksum = 0d66e10bf7a8de4b27f692a427d2c8e901b8bed73b0a36268d8f939205df81f6a30f0634fd3b87370d4e81c1327c0b10391fa122a0a5459c32a3541b8a2149ad
-libgit2-checksum = 7c307822b22e3771e5e908b115600310f7901b3250287532c498003b25a5b1e007bfa23592f16ec4d83c1567a9213710526f78cab7c120316e9a8fc74c5e57a9
-libjpeg-checksum = 74ea5af3545657d4ac03f8f7933913112cc2d982f0e379d0e5647f1acac21931468e53806297c30ebe180c7bcf84919a0ac20a4195afb03db03060d57904ef6c
+lapack-checksum = ff670e194a1d8c998f05e6143e01a09e6b43176c511217ea3c77742afd9f2566251c50fc23aeb916442401f7118c1d1fe21f0172382a7f4f2c516c1d7d873e24
+libbsd-checksum = 5c7d98474000af1271a36ab769e54aba41578e0b0f06e47af2986d6821b6586ac430ec04cc51b7836823834dd9d0aec9f4ab3af088b94f963b89729fa2cc95d8
+libffi-checksum = 6a4d46567512c3ce69a11c8d01fa0397cf653ea6207b5a86192a52367006f0f27fc08b9ae3559ef45da69433c7c39818199b29ac06bc2c7dc0fafefe7ae7b32d
+libidn-checksum = 8606bbdacc48b7fd572104896615acf0ca88f7c11a60c9eafc2ddb8477110a1f62c5684dbd5adf45b6417d9fc04a46add26052f63e464a81b3f4192443d13447
+libgit2-checksum = 7db8792f4280c484e324aa0f862714cd2ff3ea5c93aaf6953b08f9f2d7423e742d78f6fad11b5e5823747c81188e12432f247ee99dfaf331e4ce293713904ab2
+libjpeg-checksum = e27405f4caefe485a14b5017ffb17025373ac78c6a727c5d95e6b007f5b5e1256f028beb9410f0351d3733ad7f63967de2ed523cdfa4c406db690fddebf3e04b
+libmd-checksum = a598b61dcbd7c3daf25a4affccfcdb2f6d58a0d2057b4f24e8fb92cddbeb0d62f1f5e3dd55195dd83be405d187184a331ea080b936b6c336ad4271be03b0beb0
libnsl-checksum = a3c8f674357674b7ed4b26c05adde607f39be8d6dc9ff715448e1fcc5fc23d11fbb4ce85a6e493b79bdb0bb450dc3ffb1fb480715779f738d7bc016fae91621d
-libpaper-checksum = 3bf6ebb0af89931d2f72ea4a09a7fa958b2facda5f238983ec7bac39652e08614b33f0de3af74a03457b2a4203eee4950bf18a4b726e79aa64093ace6a1fb0bc
-libpng-checksum = 59e8c1059013497ae616a14c3abbe239322d3873c6ded0912403fc62fb260561768230b6ab997e2cccc3b868c09f539fd13635616b9fa0dd6279a3f63ec7e074
-libtiff-checksum = d213e5db09fd56b8977b187c5a756f60d6e3e998be172550c2892dbdb4b2a8e8c750202bc863fe27d0d1c577ab9de1710d15e9f6ed665aadbfd857525a81eea8
+libpaper-checksum = e1f37dfbbcc467c2269e228fa71893fd6c4738e1fd6cf24ce6e2ded568cfeeed80da097e51e833aa48e4ab9e1be0a5c75f9f4cf8bf2f050214c85013e9c24043
+libpng-checksum = 8bf8aaccf4b9efe15a18f0c7a0a6a942eba7bcdbaf61313f50af7e04a237ccc12f380b53ed88e5efa2d20b13f1a0f9f365b6bd7824d32350b3b3d9c6fa63eef6
+libtiff-checksum = 628e9e6b3ff5cdf661ac4c9cbee7fc10d5555554d39e447d9a0c4e99b9a81e4ae51e990f7e3a83b607145948a57a3d1d46fae13758d844ebb3e55bb307e28ebf
libtirpc-checksum = bcb6b5c062c1301aa1246ec93ae0a5c1d221b8421126d020863517cb814b43ed038fb6c0c2faf4e68ff133b69abefe4f4d42bfc870671da6c27ca941a30b155a
+metastore-checksum = ac1230686535a652e95024abaf6c5585bdab36f4e092bee5fa4deff2a913cd60f3a6bd8020c6887ccab97f0da3a284a0d4619fad5464a269e7b2040d6f7e6aaf
missfits-checksum = 32727f5eb30573a1cedacb8900e2536867e4815059eee32e64e3db65be9291b8a91b9f45b2c9f3cf6fc2a8cc448012ea3d502bdd9dee516008e17d5086aee795
-netpbm-checksum = 064720f8a9d0a502488e1af4daecdbf3936910996507ca6f311073a0ad842346692a148eb1ddf7b717f7b108f60500246cb4b83f4d3665f5fc285a84ae1d63d6
-openblas-checksum = 64a5f983b2f6e02cdb6e0f14433498cc5daa1ccfb49246f7a2dcd38f9982fa608f2abea069fe0e35012af8c1441c43d1f6418eaccd40795f5002fed1c36ce05d
-openmpi-checksum = aea02a66dc67820c86172fae80f5aa4c71e918525abbfaf06df8ba6898ff78ec3b0ef9638952b2c1d1a9c878fc0999d6f6d06e840679a68779d6b81f0336eb76
+netpbm-checksum = 051c05aa080e6563483053c16bc416b0e1368d6d7b130dec12a01017245a99252f80ddf7ae7322a4f8e9c90d833fc90d94324585659605d8dbb766d5fa2238c5
+openblas-checksum = 363588495451f872d4332a1c5e5ff4422202935641f490515e6f8144d3e1838ef737f39e77aaca396d23f9011b981961aa503e5d5935260231e31bb2280f890d
+openmpi-checksum = 88c73fd708dd5fe3a893d5517425a8a4c3e252fae4715df9a9b0f2311abe94cf5f71be92f153820fbaeb32c7d0ac0175d5165bdadcadc731427309102182c1bd
openssh-checksum = e280fa2d56f550efd37c5d2477670326261aa8b94d991f9eb17aad90e0c6c9c939efa90fe87d33260d0f709485cb05c379f0fd1bd44fc0d5190298b6398c9982
patch-checksum = 75d4e1544484da12185418cd4a1571994398140a91ac606fa08dd067004187dad77d1413f0eb3319b3fe4df076714615c98b29df06af052bb65960fa8b0c86bf
pcre-checksum = abac4c4f9df9e61d7d7761a9c50843882611752e1df0842a54318f358c28f5953025eba2d78997d21ee690756b56cc9f1c04a5ed591dd60654cc78ba16d9ecfb
-pixman-checksum = 1b0205dbe9d9185c68813ce577a889f3c83e83fbd9955c3a72d411c3b476e6be93fc246b5b6ef4ee17e2bb8eb6fb5559e01dff7feb6a6c4c6314f980e960d690
-python-checksum = 392459354b8438f76670ec2086b4618ee21f615ca4b22e5dbb366273665a470de5047a3f241925a62b60bf3cddde5984a05144a55d0c5ced43342f5b94576952
-R-checksum = b7330613ee9795f54cde3dd9f7509be83d9156fb8577c17179727ee01450db27704249f68bd48e0331e2df09c2d9833d8bb019c4f9ce9ba669df74650ff2e842
+pixman-checksum = 333732b99994f7ea636d647e0b6123075351b27601b5b6370c9bc821a1ab3921386ddb92b51e015f3fc35104ba09be1e0d7bef47f0b4f73036b01d1d70396dd9
+python-checksum = 30cc55a4ca7d65c55a967d9c43e2ebeeed2905f2fab01540140537e0384f943b6ab6699b1c2b91cb2b3834fe7d3d1976aec58ba4c5b9f209bc0ea9a0ff248332
+r-cran-checksum = 54cc07956a70c09b5a533188eb063d2a9dc67a8aa5648ec7f9c107f626220c9f6e17ab7175e65dd54a8d608a1ca4106c8ad2856709eb995ab66439b5f033e725
rpcsvc-proto-checksum = c3011d7d7ef97a4a751f6921df1a23e1dd8ac50fb0690c759d37010ed7be27968a2130e3b8872cb48d5914216f9d539096a424f1ec38a75f7ed899748151c6f4
scamp-checksum = 35034a367d2cd09dc51e727e0f23ef6234edc0d978fd71cda1e80391d86af160138cb57281f7f7f9047e35b1246a0de6b235414086a62524413ed423f498583f
scons-checksum = 0477038b014674049f12899b64584d44a85283d521b2422561e42020a5ae296a5af005684087c3ff410ed3fcbdcc5ff61998bc429eb29513f2a864138ffc4945
sextractor-checksum = 4035710f9b8a20a0bb1a3913dab2dadd8444c179bf6dee425e0e8bb66a772944ea189bfce89fd791d316a790fc4b2cb15a62633b19d1d5331b1803dec2e70af7
swarp-checksum = 80f4ade59738df3d4c9b47bda04148b53c6ba995d523fa8d1e02fb5d952b6078a53cc7d273849a033505de127a4f318b95adf2bf5a2dc38e8cc9bdaf5658487a
-swig-checksum = 5eaa2e06d8e4197fd02194051db1e518325dbb074a4c55a91099ad9c55193874f577764afc9029409a41bd520a95154095f26e33ef5add5c102bb2c1d98d33eb
+swig-checksum = ecb1d940f584c23df036a8f77288916003f861851dfdf836ddc5bc45b95312f9259150084191c1d9f086c006e64e12a4db22f7ea382fbb71667b811b8f99995d
tides-checksum = c3360ff0d023b43749ba09a33302ca059f017a157b3ce7cdcf4f1a1578e90d3e7fa420077043adbee6b1ebf94bd698c8d6b279012f36d2a05b4de5351e30e108
-util-linux-checksum = c95d26b9037d6b877a247e6aeb58d17aa80f7e1bd6b523a4e0fde559fe07b3d924ece6d373300fefb65d1f206b3f990aeddb3a03605040e72ce6d6ee88591021
+util-linux-checksum = 2e07b4765482182112c424cbfe5af0dc9c9a50918e335bf99846f3017b291f5df23e95c77942f2265e9b0d471201a0d53d3c79d916df1c61cf044a80e37f85ee
vim-checksum = 06ba43386fcf308520d88d7a68e9bc1fabd824b05078b8f9112500a2ad4e50a91f1a1c2925889b7c06dbce34307f12abf508e2172b05fd283f965cc06552eb6d
-wcslib-checksum = 8c98c4b575056e2d966b77a4bc951256d02ecee3a11847e140fd38d93afd0f76b3e906d590c952dc9fc58ceeb1ba062b19d8e1e676ee0032f5b7ed13a9dfa892
+wcslib-checksum = 008bb069bcc21f7944b37ca44e85aa5d343d44aeb24ad3920c2db716a984e3fb63cdad8fe05ae63b8850c5ce4ce5f81c524dfd14958ae38fcaef32f893814b45
xlsxio-checksum = 22870fda7bd4eefd5fea2a9ad7530c9049135129d9b69805091777e6b54b2fc6c3f0e69c6954f36bce54eebbfeccaf637cce9e271a593221a4296d6632470a6c
yaml-checksum = dadd7d8e0d88b5ebab005e5d521d56d541580198aa497370966b98c904586e642a1cd4f3881094eb57624f218d50db77417bbfd0ffdce50340f011e35e8c4c02
-zlib-checksum = 73fd3fff4adeccd4894084c15ddac89890cd10ef105dd5e1835e1e9bbb6a49ff229713bd197d203edfa17c2727700fce65a2a235f07568212d820dca88b528ae
# Xorg related packages
-util-macros-checksum = 3f51504b27f0478c136126f15110cf3cdbba218c4d74a8e974cca1381c6e8364609bd0c444f2fb19aa86a7f4e848dfce4f4da940463b224036f75a60b3d88619
-xorgproto-checksum = 2d10533e54987f293e1c8578aa742c38cf2fac8551db1d317cf5ee4a6a9600eced50f59c57c38b35dd210fe02d350294c967512f04cb9876d6c0c1f7e89cb199
-libxau-checksum = 3ca454ba466a807ea28b0f715066d73dc76ad312697b121d43e4d5766215052e9b7ffb8fe3ed3e496fa3f2a13f164ac692ff85cc428e26731b679f0f06a1d562
+util-macros-checksum = b29c388c60b8318a478656f0552210b2194ee302c082f694e5426ac5c3f4b97fc3e377cd6a7b2b97c3da3442cfb4a5e42dbae18e151ae39f5f84554016a01155
+xorgproto-checksum = b9471ce92f2a5fee9d32710191d7df63799027214ceace60fb2ae8616c888ad142d2a131dea5acc768e86bc71719873c72c20a2863fa821790b96faef44f603f
+libxau-checksum = bdca8935aa1a52bcd2748004d26c7405725c18021a4d9a67604c98c1ec3957cc85b5d987d2fcd9fa4ae5e59b6bbd6ab75712beee2d448bd733a7e3ffeb18e62f
libxdmcp-checksum = cb1d4650f97d66e73acd2465ec7d757b9b797cce2f85e301860a44997a461837eea845ec9bd5b639ec5ca34c804f8bdd870697a5ce3f4e270b687c9ef74f25ec
-xcb-proto-checksum = de66d568163b6da2be9d6c59984f3afa3acd119a781378638045fd68018665ef5c9af98f024e9962ba3eb7c7a4d85c27ba70ffafceb2324ccc6940f34de16690
-libxcb-checksum = b90a23204b0d2c29d8b115577edb01df0465e02d6a8876550fecd62375d24a5d5f872ddd5946772ddba077cadce75b12c7a6d218469dc30b5b92bc82188e8bc6
-fontconfig-checksum = f97f2a9db294fd72d416a7d76dd7db5934ade2cf76903764b09e7decc33e0e2eed1a1d35c5f1c7fd9ea39e2c7653b9e65365f0c6205e047e95e38ba5000dd100
-xtrans-checksum = 4fea89a3455c0e13321cbefa43340016dbb59bdd0dbdb5b796c1a6d2a6b1fd63cf1327b769ab426286b9c54b32ec764a50cd2b46228e4e43b841bda6b94de214
-libx11-checksum = fc18f0dc17ade1fc37402179f52e1f2b9c7b7d3a1a9590fea13046eb0c5193b4796289431cd99388eac01e8e59de77db45d2c9675d4f05ef8cf3ba6382c3dd31
-libxext-checksum = 09146397d95f80c04701be1cc0a9c580ab5a085842ac31d17dfb6d4c2e42b4253b89cba695e54444e520be359883a76ffd02f42484c9e2ba2c33a5a40c29df4a
-libice-checksum = 2f1ef2c32c833c71894a08fa7e7ed53f301f6c7bd22485d71c12884d8e8b36b99f362ec886349dcc84d08edc81c8b2cea035320831d64974edeba021b433c468
-libsm-checksum = 74c42e27029db78475e62025b4711dbac5e22d2f8e8a24be98a1c31b03c0fc4afe859928f851800ea0b76854f12147900dc4f27bbfd3d8ea45daaaf24b70a903
-libxt-checksum = 06248508b6fe5dfba8ceb4518475f656162351d78136eeb5d65086d680dabe9aca7bba3c94347f9c13ef03f82dab3ac19d0952ee610bc8c51c14cee7cf65f0b1
-libpthread-stubs-checksum = 5293c847f5d0c47a6956dd85b6630866f717e51e1e9c48fa10f70aa1e8268adc778eaf92504989c5df58c0dcde656f036248993b0ea5f79d4303012bfeff3c72
+xcb-proto-checksum = 385ea15eac6326b73849f1dae790619760f473f4c5471a3ccf990b01350931395e988cd83f632c94fe987eec74198f59835b8140686774710462e0019f90c168
+libxcb-checksum = c69c7e5d6c172aa0bd65d6f3ad25d9a8388194cf2281ee9c9e02baf73c351752b7c287fd04f8ee3429d5b55358806c81565046796263212379593db4e32c4287
+fontconfig-checksum = 12dfddb3b37d71555764fe97dcb439496f3fc0e6d34e4536a92eda1069cc78ceb1dda5b2c0b247f779a093a6c75c56afcac6f0cd099d897c2ce30740a9fbb183
+xtrans-checksum = 1c2fdd7cb906f012f7db5d42466734f89608bf3376b6bf4b115c2efbf71c4247587cc218ccfcfaa6d50f0cf336865d10fc604515b25bc6e95917db8acb7e3000
+libx11-checksum = 740cc5364d49cee8fd86f3dc0f43d7b358c94989717ea8991addea9b2307df6e5b2ae1f8f7aa3571f7294b8760c7aca7be9fdf5b477dec55da14e915dccafd7c
+libxext-checksum = 58f4e87f200bfde2b9a2cd85c1c39d5462b702e12274ffa5fae5bd3a08b3dfab1290b399c31224c86627d927106e4dbfcdec6152c11b169834442008228796d4
+libice-checksum = 9e3488abaff115ac919e5df85479044579802b38a10597425e13466c22c149ef56a8adba77e2adbbcccb8d2e72e23976313f4af3dcee66128b768c971c513932
+libsm-checksum = b9eec391f0438e85e32cf771afbb35bf215119ec31d9e31be40c1b653f2c3e894c2d19add0d6ff69e944d1230f681275e8abad86a8912ec37c1c41e0f1ce4d00
+libxt-checksum = 9ec0f95ae073360ea7a80ffca2dcd998c991363b79696550c4f88edf2c9cbf740caca23929660b77fffca4ca292465a101398e9e891cd00db85f8eceaa78cde0
+libpthread-stubs-checksum = a99695c007e17d81879d5c113652c5e1edab2d63aac9b2984c4f334d85e2cebc1d0b4e8134d6196ed8eea4d4da41972fca27bb8a0c76c2946460f785233a2c03
# Python packages
# ---------------
#
# Similar to optional programs and libraries above.
#
-# IMPORTANT: If you intend to change the version of any of the Python
-# modules/libraries below, please fix the hash strings of the respective
-# URL in `reproduce/software/make/python.mk'.
+# The sha512sum hash strings for the Python modules/libraries below should
+# correspond to the version numbers in
+# 'reproduce/software/conf/version.conf'. If you update the version and are
+# confident that the new version is safe to use, then you can update the
+# hash here.
asn1crypto-checksum = 44d442a6ddfa971e31e24712fe084368356deb5e1c4c3b3e813e0910931860215bc1c4f9eb2c4bd4fdef607c324086c096e9357068646efd28c97f2d4f85c62f
asteval-checksum = 4d64900b2f7dfdd098d6c8c102f9d9fd46f9ec265a54330e7d94479ba41f0ee0698855658e18b8b32b9c255159eb9a085af5f0306eb6508663d3fea7d2e00b4a
-astropy-checksum = c32e874d208f312f894643ab5b3d71dc37630e544da0ceb5ee998d752f9a055d32f6e4319f2cb6928637aaf8573bac58d2882bd636b6a89f5501e3ac7e5ab681
+astropy-checksum = 52b7feae68308445014665e3fc80201ca82955916fbe4d065dbb1a439885befb4f669932cec6fd7d359094138fab0ca188162efab764c4591d53896cdb335426
astroquery-checksum = 43846791d8469a26cf6bb8819db58b830cfe50a34bc0091c2e843dd7dc78b1317530855d432a3a567a9f6a6f4d2682382a32edc91ea01716246b99b3625ec521
-beautifulsoup4-checksum = 7aa77bc6008bbcbbbe91b0a850007ab237d2832b63a787fbd94b7cbf47d4276b185e0c61c134df73221406458edff2b75b6b8c2b53b543aa3bb1b0e2202dac5a
+beautifulsoup4-checksum = bf8fd3e54da63a506f294f0e5f1201fd46bf2edcc2db23e99eda995313b8a8d24db3cd2d1903853c539a9320bfb9eb4b29e311772bfddc0d125f0b4aec71e384
+beniget-checksum = 32a19d77323a0a21544ce7fbbb71cc5f4c66949dba280d81deb36f38364544d9fdfb3d24fc48a1ff6d251fb22c24357e81dd7fa4a7e6c11d8b931723e150a182
certifi-checksum = 6a6bf1ff98caefcdbf78a8c83e11e155368bacdd806f0ae0c6afa8f513667df6598e594b3584de61acdca3d6049f4a776937f2aa8672b602bd6db7b737f6074e
-cffi-checksum = af4fe47cf5d6f1126222898365cfa21e9f11d0e71b87d869014dbb37af30dca9ddf50c989030d0f610f50e8099e8dfd08a688d8c3629abbcc4f0294f5f91b817
+cffi-checksum = 50e2b8215ddedfa8f1d569680cc0023cf061c2c52b7b86b6b26ced56e5107b362aaf6dc10a77b9dbbbfca2e5611f7cfb42c59501f9ab7da29fb3ad3ccf418cb4
chardet-checksum = 61a03b23447a2bfe52ceed4dd1b9afdb5784da1933a623776883ee9f297e341f633e27f0ce0230bd5fdc5fdb5382105ab42736a74a417ddeb9f83af57455dba5
corner-checksum = ebd625ab1e4591b4c21d25ec706c35d37f560b727e1e0d6a79948c4a112ee6f21d3ca30162901a27715074e1345f3bdee1a0345c63e5fec24113e495fb094127
-cryptography-checksum = f14319e24d9dca52e74548cada5b78a6235f089ef875dbff4799e862f94da8b087f1b6e03e84dcef9fc7d7693c4a349c5f0cd54b8535806da777420ce8757d39
-cycler-checksum = b7d2ba19861ffaf4dea0444bfe68b5a6264a022d7b3f02c9ff5e5859e3901de12a90f8dc7469e995e09c418515b3df55dbf05a0cfe5368d40790a2c878a74819
-cython-checksum = 6216e63996e83b887cdcee6cd912d42e7da853640336b9190f5115d687848a902ee5a8edd6bfaef645c066b89e17dcd80ca1387688eb80a527ec23a0a4636e8f
+cryptography-checksum = 051b5007512521638981a8e975c7bbcb688b1a1c384babc81ba105c7bc2a7e776a8bf46af5939627528b2b57d99aaacf215a041018c47f6d3ae6d7a0e8eafccf
+cycler-checksum = bfd1b625c785b48f9795c4ef701ce1c8fdebaa0e82530b904ed7f481f644178834a1cbcdc6a2732bd5fa3a39f7bb543ce51b710a3e4101a6ecee89a6ff270ebc
+cython-checksum = 1db604326d0cbeb7b113bd163056ba1d4e3685aa66081911a836604dcb7f3654fedc91f4686a9e6514eed1f6cc8873b2d30ffbe6663844ce5bd04dbe5c120844
eigency-checksum = 1e7cdfc43071da5edba30a0d32cd655442b516f15c166b049a195d151dec8c20a2177ad69bed3bba4788a668fa25a4c551ef4990717ff98d5b2f407bffb214c8
emcee-checksum = dd60aace8879525fd3fe42b747d82170b24b2ed21f538f9186ba96b9d04c084812e3303f5d2e04119dabd2f9d3286d510b4d4a5324c71dd24b1c7e5f0a9a0ac6
entrypoints-checksum = aa1274362d3a4b00266103319ca51aa266605b4999c89a9d0673eb61bfae9e646cb0ec6b86c95544493f6fe048385a2c7641d64adca8f45815546fb1e663c858
-esutil-checksum = c1cf8e7912b16dc675fdc7195ecc2bbffcd156c1257b2df45afdf09cc487c49f2e18a66cd5520ab30c77e60abb8ca802e327075bdd0f23eb634cc4f5a7e974f3
+esutil-checksum = 7f1f22b0a05855819e3bd9bc2eee14942c2f536fb99f4af03575eb41d3d3fd1d2e6091ad076bfab4d51a8f30e213b607af35ed00d063fb2895b58f7a57cde5ae
+extension-helpers-checksum = ea62d537d0f76af9f975ad577d3a679e027fe0dd21f8a125ccb34bff1e4cfb69ecc15c8bceb406744033e91827fc8b78477e30e8f3cc45396bb6ed8102159b08
flake8-checksum = a25076bb7f978ca23af0f2016b30bf5fe4680371b4c0939361a6f8c496f3c846cf98ecadee35fc2dedd588116c8cd982529972dd2411ab139621912b69132a1e
future-checksum = c70565a660cf87c5e7e994bae0d4eb0b2b8b607ecb5ce65521c027bf0a39ce5699f0578413bd3c7edd5d01aeb1617de48dcea098a9e9021d8487a73007573030
-galsim-checksum = 11ecbfb9628cf85a4c25024c6f3a34bbdadf522f64e4e93e6428b545c65a48906a05eb7774d0faaacf52a48dddfd599bfc7cc7252ca854a18c3930f116533a3c
+galsim-checksum = d7e354778de87cf5c8964a7f252025eeeabc04ef2fb673c35adc536b449442aca261e2bd96befaaa872065376ac79cbe23d1ee7d034bcab5916f74b98112b8b8
+gast-checksum = 8311f48788daf060bee54ad8986eae054c2a426adcf97f1503cde37c4da2827cd0f664188e51f5c2f932a0a4ded78166a8466465595a640106060c8b6c7072a5
h5py-checksum = c49b04f7dcddf03f36d4f063d79ecbe544e0b1daee432d4a76cfa83dac3a1f2cb144f40a74fb85ea17cb5b778f57f709969ea5d1a2afc5bdd5aecbc9d732898d
healpy-checksum = installed-with-healpix
html5lib-checksum = 35939b4450893864da04e735ee5e0addacf1dd34bae6a6909c76572abf6bfded446a78a713dfde91c1485ba45867d7abeb6a45cf0545c16ea968707be7de5dd2
idna-checksum = 8ca5cfe6350c51250bafdac7c6e4ddd54c4a5d6bf7acbcef896760a759868c8e9df1fdf550121d8512fa3eb316dcf031ec6058e03b4f66eadee21b63e2187d33
jeepney-checksum = 43083994a7c6af84a5a68d3ff8f6dc4d9129ce9fa55517838fb62d9f62bb78bdf52067649d0b95d08d689b7d7475cb9b2a956662e265a776ad42dcf4ccc0ab63
+jinja2-checksum = db19498dbc9bd6419b474fbb465ce8c689b96f72d33ed1a31cee3c42e2c545dfd0fd5bfca610075e2e126fbdd27300a73efd76fb7ea449727ed61a6127806dcd
keyring-checksum = 3863f2cd89131d7e927e55691b4eb4b7c2599af189525293d0948aef5215efbbbd74d45db3cd4885ba5b18a79f0fa0b0edc3783a020a6702f6a6fb7ed86b2cf2
-kiwisolver-checksum = 80ed3d7429a2ed4944aa09766bebac2709a3a0e0a184ff414f04bf3c7ec3034ea7d0f70dff319922b870dedaf13d64fc87eff86a82e720ec93a2510885ad738d
+kiwisolver-checksum = d5aec6da0a5038f969227eb6320bf6ccdae2668b5730fe3e94ea1be5a2e64d121516abea16764c4659e05d821d044e76d97054385c0bc62780c3b0cd82468aed
lmfit-checksum = 8435328fd8baf92b424299c341aafb995dc45245e17b1774e9eaabdd994dd6ec3b2e70f7506d67b587631cebd2dc584fcd1a9902286925a61e2135b7e11ece21
lsstdesccoord-checksum = a66b8b702cd8122f0c8aee05abe0fc0c6299f8bdb99b151f54bd7ad7430d6bc9f843f7294a7f08f3ce9f8606ea0ced0796a63b79cc8a53ae73068b691e2e6871
-matplotlib-checksum = 2ffd9e79e300a37092ab4b5a7156c2f57e408975818abd413c74477ad622030c7636695467aab821c9996564cddec57e26ba6a8004f6639070964fcb7a510a75
-mpi4py-checksum = 041768f753c8188b2560fe92711861780f0d77eda3281433520c98bb1e9b4da6a89c364f2d1c4623868ffbbcfde34ef556198b1bef6fc1c4a9c19cd5e71b546c
+markupsafe-checksum = 4bc1171f1dc86516d6002d6dc3df2ccf78481480812025114b3a1511779b152a605904f0dc3aa0a6c8c282111a74564891a2b02650f90c44b667e5e0622c62c0
+matplotlib-checksum = f6fe468a0bed15b89cb158f9b09fe7339b925e9ba1d6c39387fb15ae30217e32a9be68ca3983c1284d5b4ab35375e7971edda97ab82960556775ce9730234384
+mpi4py-checksum = 68b1d4ffaab8425ed3eb6c4adf2c008a489d4086eba1dd042c29cba29c95f790d49db9c4bf2870c677f5d27e6e5103781a8113ade6e28fe6cea655487e5e07b5
mpmath-checksum = 58c69a801f65d73cc0eeb0d2c79277ed638568c656e7213d06ab4709c218aac908b2752377139010bed0e91bbfff01d129c60835ff0a928ba1185aded6de7c0a
-numpy-checksum = ddc5f5e262d6f7f6e1a8879165860427baa237b6aad5b09d5b943a36bb0d5922424aac4c0e32a2cc3e14087bf6eb671988b6ff6df0d9ab8c89724e093220eead
+numpy-checksum = 505b8e4aa403b04920584737b7f47f8bf238822de49024ba7defdb542e60c3ac37e35279e4dde4f2dc35ee128d1f21df3d202a7eb49adef64b256ad3bcaccccb
+packaging-checksum = 5a6552ea9f074dc737f0dd20d11901a98900770fa868c1e2fae6ffa9efb66156dbb8d3047feea28805f569d07d17f4286f5ef34f0f978d5b8449ca40cc7794a9
pexpect-checksum = 4cea4229332c1f3de26dfcad596877665b3c02e91d51ca3c45c1f9b44462adb7c82abc7b76eb09a73822c2d1ccc9d812574cf79bf6bc8fb0b7d2f1093962cc3f
+pillow-checksum = 3f43bdb65be1af6ac3a359bfb7ce74bc48b51ada4e197d61626c5aee242643e60f5f2370df99b12e6dd7afde521bb5dcaa5e44ecc4c94ac8bf070cb9aa8a86fb
pip-checksum = c6e13da3a57462371d32982c80575c5181592f5c6a8e70d60ec879e689442f4ad468e7aef97eb58c9da50a5a770385aa35e701eefd713a8e9fafeb12e11d956b
+ply-checksum = 29d3c4de42a74497083dc6780fbfccd319bd3be8098a2a59ea733988a159e760bdd0eead3482fda33a4e36d2b9b88916f9f9408db2fd3a6b7a0c51269b024485
pybind11-checksum = a863b92a03a23395ba67f6e4916c479ba800060e89a6d80e586533a23603df111b5cba9fd03b3ed500956169edfd8cb1b32a0bc05c42f928740beeea5bd63352
pycodestyle-checksum = 84e751a7d00048393b02ca743de5d71d1641e948ee1b4daebbdf2d07e0cd8f087ca4e81f826061114b40ef41920bbcd680c9f479e7cc1a159a70188425717208
pycparser-checksum = 7f830e1c9066ee2d297a55e2bf6db4bf6447b6d9da0145d11a88c3bb98505755fb7986eafa6e06ae0b7680838f5e5d6a6d188245ca5ad45c2a727587bac93ab5
+pyerfa-checksum = f11dc54f4c3a69dfeab601e8c7bf2be989a1df20280a2e39a8eae77d300b4bd0d9b5bf6d4a75b7774b6a6c4b994126defbcba7e63e3b5e60eb923d9446ca54bd
pyflakes-checksum = 7ebf5843b38146305c1063e070480fea8ec3b47fa1be546b1fafaeb242a688a5a001f978e7257fd71d5905b9a338b466ef17c7330725191587e9c40ba632c3f8
-pyparsing-checksum = fc8342780c38fd08c07d0b885c4d6c77d59f38098aa13c3abd05a790ee6caf2065a0bdab33bc290a6e2da123f4ac7e38c049e389b740546499dc48eba7ae431f
+pyparsing-checksum = 8cb58406eea1fd93d1c012577055d4cb806eaca90b7e94c11bf35acb0293f14fc9d8cbef658cb92368ad6f8689d2568097d9bc456af70e826db96c55e5858dd2
pypkgconfig-checksum = 1fd9aa973bd20a8fab864722598f1d19b94c23c7f2b522556b3182b19fe016bda7aa2be5e48a1b8fefa70a069611007d6d790e24defcb462e4594a382de85b00
python-dateutil-checksum = ec7da86203572582f883a4686acf8a732a2de4f396d809057eb51b2c60dbca5623a7fa90c2c0618c281a2282c60841739bd837731a51cc876f4ff369297f2f81
+pythran-checksum = 5d2d14480149b044ffc159990d2a9b02a236c54c348a899929f11637d0545fee7bb16806660aab6e096a22e1f2e220e344084e5d9c96b2318e51c16767b26c59
pyyaml-checksum = 8f27f92bdfa310a99dd6d83947332cc033fa18f0011998bb585ad5c4340a2da20d8c20bfdb53beaae15651198d1240c986818379b0a05b230f74d1f30f53e7fd
requests-checksum = 385e1d80993a21c09e7c4682500ca8c24155962ba41ecd8e73612722b2ff6618b736e827fc48ad1683b0d2bc7a420cfe680f5107860aca52656ef777f1d60104
-scipy-checksum = 45463df30a0f6270d9f4cf52235f31607904a6ae1375e12600e7f1ab2d27b1dc25a6211b49dceb71506be22c756890adaf9f81d9e6be7455def86c9caf0dc923
+scipy-checksum = ed7956f5686b13689d9a8246a8c3afa64a33f993b38fe0e9ed9e880d49298fea6014f10bc3e1a280f1474155cd691ee0409d133254c5f8cd133805d41b2f1ed4
secretstorage-checksum = 295e0f75c772edf153480730dc20051f06e60e040a18f169620cb4aaa37b8c1a254a496464d3794a38cf49fa884a7b561fe364816f0d12a5b2622c77259f03cf
-setuptools-checksum = 1bed75e7173ea0399f078c96cf5c1818dada7a4b86a93e61ff72da6a25694e321b9047e024617aa775b71a25fe8cb0848323902e1fb9c8c8a1f39434f0de6434
+setuptools-checksum = 92040b907eb5d15ae6a95f73c2f2229e7d4c1941c82890ce3920fcb5549240223433db6e1388b1131ba574540a55a0487ae9b802c9e43d0c1cfde699167ef3e6
setuptools_scm-checksum = 196d4785a1802875d89b9e54ae788e791a9c5cb685109784059955b691242984e42b96d77075116790935f56be82259bc2588d95d65ecbb101261d76daddb83c
+setuptools-rust-checksum = cdebf61a2d55846104a3f2ff5eb0c1e268285d2e1f074dc0080b99faf74e3bfa55af9dd5ecd12aba97b37888b8bf5d7388d7b42f2b0b9e906c9860309ea5aceb
sip_tpv-checksum = 5e03279cb3650dd506332dfcb31aa4a20f23f55b8a29fd18da5c6d422d1b7dc49e12362ceae2ff7417c874401b5e87a73ca1ac0f3c8747c8984e4269cad56c3f
-six-checksum = 937728372edf1e0ac13bbd706723d0de35e015c30d0ae41f789c5ed2e3669bb0db70cdc6e036ec2d437a6c4aa0d5d1e727b6d09ac34cca7e4e92e5d3b4775151
+six-checksum = 903334a9dcde08d654e5bf8f2bc06aa4d72449bd2b667505b9c191c38eb5b8259233fc791215584393cc70aea92ea29035460880a6a151f5defce5db084da1af
soupsieve-checksum = abdcbb6a13563e7afadd3056141587fdc3d7d644e346f789bca0a16242d860219e462491b0c624b287300af960fb8e3f85c79f5137580939a9fc8c3d6961478c
sympy-checksum = 6ae09be7260b1624b4f92d39c68d5cdf54e6e33010d9215f46d62d989c04cdbee6f9f9c8b11ebeda53257d154954fb926b3ab7335b738e33ad248764875b6ddb
-uncertainties-checksum = 420fa4f58ac8dff17875029bc3dcd6539c638e8c8ffa5bcc273b486d05f0d1cc71b1db140a5098c1cd6472f93b6869303f57f48675296e859814197d899327bd
+uncertainties-checksum = 5f09e75db2e4b2838858f2640a8427cd01cf691a6bec66e58f75125cb80515e2a7c164ef6d414dab2cb175a2cc696f42259729d0ebe4b4603f98e3f3ef9e262d
urllib3-checksum = 4c12d08076b0f260727d5aac780f5e9a24e0164755ff05b02a1f5a697876741ff13ba278fdd6e46ef678e8e1146bc39de1fc49ee10ee839229a70540a9424a99
virtualenv-checksum = 3306f59bbcb48ceec225c07083e0b6831379b3e632e4a23c376849559449de1a04db66e0e7ceabb40ddcd3ae984a2a18dfdc4f1c38777d4bc04537f85a0137ac
webencodings-checksum = b727b01bac6ec79bca517960d27b4c0668b295f25559471b9641c2c33dab55db6dac9c990952177964c6418382c22831b14d57df5e632d51d7abf97b61f24326
+wheel-checksum = 0a900806b97c368a6fdb763353c14a54e1fb629443af11432cbbaba5f2d7fa7532c6e4af3fbb3312f9fc2f4bfe285253d13b7c703439e5f3a2dd263850efbabf
+
+# R-CRAN packages
+# ---------------
+#
+# Similar to optional programs and libraries above.
+#
+# The sha512sum hash strings for the R modules/libraries below should
+# correspond to the version numbers in
+# 'reproduce/software/conf/versions.conf'. If you update the version and are
+# confident that the new version is safe to use, then you can update the
+# hash here.
+r-cran-cli-checksum = 29782b4cb00a94536c871276835a5f762f3deca8355c35b9366434cd2873604ea20f2746ef624eb4e6ec1514cb9079e8c101e50ace85fb1336f3befcf77be244
+r-cran-colorspace-checksum = b6c24e1dd104d444e09049df61fcaac680aafc1f530bcff07fc8ce5f2bcfc9a3521aac032a506b61dd10de885a914ed82c4095ee95113e407ffd0cd43662f098
+r-cran-cowplot-checksum = 39e3b29078ffa67e03911c21b4cc09a8dcfe0bf38fb180691e82f2ff87c993e78f89173a750dcf08424fa1e0ab39ae5d07fc59af792ac0333ef02cd9ea8afc03
+r-cran-crayon-checksum = 7034a7ac446361c90088d53f2079144a561e2d62095884edb85700512decf4dab9fb4a85fa6d0fcdddf3d5d8a3d219d56384d1d1af9c6351d81abb5778147cde
+r-cran-digest-checksum = 44a2978be00b7a90f88f05ad8ec114fb07a8e3816fa263749d44aa61f7706c3f802feaa7b7e9267cce1d0dd5c49e7f7ae3ccd9f968a7025aeef16181f7c0d70f
+r-cran-ellipsis-checksum = b82fab404c3e1bb9df2f7ee5ae6e62b237c15c661a417adabbd86f984a2128970e0ad5581e9a8b7541b9f9b07418de469398e6bc06775bd4a3c8bc1472947f62
+r-cran-fansi-checksum = 50a7a8e597b89b49fa33fa93861a906c38508a42980072027a5f2746f047b82333074248e5c7724faf4559eb6a4cdfe7a8ccfd2dc5b637a8f3d5a8584afdbc75
+r-cran-farver-checksum = 66918ddd268b3044625d9b691fa0a2251301400026bdd12524927ccd9f3976ae55189a11ecbbc042ceef0c61e7692577cbf630f3a45fee455a4f3bd5e19e6937
+r-cran-ggplot2-checksum = f515a4710c13f69d3ed8a8d59195aa3d2b52c345ed94a5f36ebe04e1bb621e07d0b5b79290ea42c81a764fc94b17573041a27c1fb06e11b08834ea9e3e89b39a
+r-cran-glue-checksum = 5e679884f45a00a011e284f59370cabc2d470515351324ba2748b06ec780d65b6f64901eb5589713e9b122b103e58cd040eb342c88d7d4a3734a6e23064552d1
+r-cran-gridExtra-checksum = c92c9cb46312300ad9477cfe849c11a9d79fcce1f909c5bdc76e46a6741a636f598a457883fd278e0b00a47eb385bce4489ad04d3cf93ce7def9ae8771dffc71
+r-cran-gtable-checksum = e3cc4328bbb42a59ff1b315f0b030a9fe240bc6b6844f460835c25497179c3a5d9ea10fa2af1aec4b7384e78a564eb99eb4c51067f98a57ef919f2e11f6fbafe
+r-cran-isoband-checksum = 253d0b768492b64d5c32ab20f3b896cfe206305b8a53b31055813bc6957069eb76ea94a403b931fdada06ab33ee91f7105ef080fcd88af0c05860865950d99d1
+r-cran-labeling-checksum = 0ba0cb33863f79868ec6f2289f29ddc742cc620b7ed85364099448d39d71a51cce45028097100aeeaf8a8a7e4ad0982b459ef20a15faed9c5c7db79e9f9b01fe
+r-cran-lifecycle-checksum = 1ac7aafb41925d19abfd78f454d1acd5102a72ee7017840f14cf54d035cf3999d5b6dc2979b00214358e3c26dfac3407c8b525b434e8882c123a92a132bb07df
+r-cran-magrittr-checksum = b0bca6000602b90c996efc3681799a8a965a5bb90eb5e3436b7a26be15e5a219ef6300f4086f0b6efda734272775688a8ca77a4094eda429926a9179125b87c7
+r-cran-MASS-checksum = 0d22ff69cff1414bf52d11aefd75b442c9c8fecb343fe3733c7b28a6b881f3c87ac8b6e4b0d76709c36e612523ef386d207d403c2292881c083e03d21b3709ec
+r-cran-mgcv-checksum = 83df262152d9ee5081b3d16711b5cae3860f7dd879f97db014b987edd6b110b11219bac0e07f8ca04169181291aea76399d465b9bf0e96a1b6aaf83692fdf5bc
+r-cran-munsell-checksum = 59536096c637d84b80f097bf400bcbb9fcb7ddf6267ce6b026e3c7b3cbcf548caef118439a1928dd8833359a18b14e49f38fe90ea7919c28c215b3b297c036a3
+r-cran-pillar-checksum = dd2bc53539ec6838cb8d5bb5ddf30cff9c43c5de0f8d148374d5a394385429a06243db48e7cf6bdb463bad7a130fb47b172ceaac8d04ca3205b4c4210ab672ff
+r-cran-pkgconfig-checksum = 0b636d9b1233a00974ac93eb2a1e0ad6b552fa043bfe9743ec74ef884c1d01a73c372586516abca8772a836c50dcd9eed4d10c50f6883fc68ba2494e79872aaf
+r-cran-R6-checksum = 5986510ff19c1e0129bb0b88d72020d30255e26d6da9a40c123b7b0c42c3e121188c8de34b79ad5eb0e0390eb4d59f2b7f0224b58679459bec0c4d677dec6ea9
+r-cran-RColorBrewer-checksum = 62aae85a0cf349a58ff51922886f71e2a9dc7b9548b061390f3b370f240bcec72340b3ebc03077a0374ce18ccf16121247e0500eca875ef2ab2f755c809e8f28
+r-cran-rlang-checksum = 8dc647e59dc767471345f9cfda7b77316215b0bf5a96e28f63e216d80f239ba9ed260b90926d175080e256d645f9a1b621d7602bfe27c32d0151758753b6cac8
+r-cran-scales-checksum = a1c693f993010613a33b9a165d5f853ad5f4fbbd790deae2bd5b19adc6ea0adbf27962e0f56807297ec56a71a6a5f57e3aa0ef78c266e74dc600da34c24d16e8
+r-cran-tibble-checksum = 4a4953b9083064ba10e240e304617f0b2c65039eb188f56761cfac228e99186054b543aef045607b63e5ebf9da2d20255f0ae724fd974c497507706d21a92864
+r-cran-utf8-checksum = 51b98a3fd07dd82fba249ff78f6e8b1318592151be49f1fe98c061051dca4d4506f7c7c8f1ebccadab4312cef7e01f700b39f4d5a6ba0b7227acd27e028f515d
+r-cran-vctrs-checksum = 23cde893f027f876f37b704a5074fafbbc376b85a053ccdc2862a3e6dc0b68ae2ac3511e2c61ad7a0a0d55524d3f3b724f39a4eeaf1a855ace62634ddea04b4f
+r-cran-viridisLite-checksum = f59a771b4cdca74df641c936812a008bdec99578cd1c82ee8f3119087dbdc72e307b2f3093df971ee7166c010a246fb5ef6c05cec4e51f7cfe721f3343e0fd6a
+r-cran-withr-checksum = 97877fa1ec21244285366818ad51dbe9c5086b377c1217d4ffc250a1f83cd142cc28d35f9aac0527f26bb2e3f0b399537b386a27de2fe546c301029f59614f65
diff --git a/reproduce/software/config/numpy-scipy.cfg b/reproduce/software/config/numpy-scipy.cfg
index 3a3171e..09c1627 100644
--- a/reproduce/software/config/numpy-scipy.cfg
+++ b/reproduce/software/config/numpy-scipy.cfg
@@ -7,8 +7,8 @@
# appropriate sections. Not all packages will use all sections so you
# should leave out sections that your package does not use.
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
-# Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
#
# Copying and distribution of this file, with or without modification, are
# permitted in any medium without royalty provided the copyright notice and
@@ -18,7 +18,7 @@
# IMPORTANT NOTE
# --------------
#
-# The `ALL' grouping does't apply to ATLAS!!!!!
+# The 'ALL' grouping does't apply to ATLAS!!!!!
[ALL]
library_dirs = @LIBDIR@
diff --git a/reproduce/software/config/servers-backup.conf b/reproduce/software/config/servers-backup.conf
index 8db0ce1..64de818 100644
--- a/reproduce/software/config/servers-backup.conf
+++ b/reproduce/software/config/servers-backup.conf
@@ -3,7 +3,7 @@
# is irrelevant). Note that this is not a to be read as a variable but will
# be parsed as a list.
#
-# Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2020-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# Copying and distribution of this file, with or without modification, are
# permitted in any medium without royalty provided the copyright notice and
diff --git a/reproduce/software/config/software_acknowledge_context.sh b/reproduce/software/config/software_acknowledge_context.sh
index 788ab94..f719d5e 100755
--- a/reproduce/software/config/software_acknowledge_context.sh
+++ b/reproduce/software/config/software_acknowledge_context.sh
@@ -10,8 +10,8 @@
# your project to make a smoothly readable English text. Afterwards, please
# feel free to modify them as you wish.
#
-# Copyright (C) 2020-2021 Boud Roukema <boud@cosmo.torun.pl>
-# Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2020-2022 Boud Roukema <boud@cosmo.torun.pl>
+# Copyright (C) 2020-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This script is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
diff --git a/reproduce/software/config/texlive-packages.conf b/reproduce/software/config/texlive-packages.conf
index d4900f1..05dd0e2 100644
--- a/reproduce/software/config/texlive-packages.conf
+++ b/reproduce/software/config/texlive-packages.conf
@@ -1,10 +1,10 @@
# Necessary packages to install in TeX Live.
#
# If any extra TeX package is necessary to build your paper, just add its
-# name to this variable (you can check in `ctan.org' to find the official
+# name to this variable (you can check in 'ctan.org' to find the official
# name).
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# Copying and distribution of this file, with or without modification, are
# permitted in any medium without royalty provided the copyright notice and
@@ -12,11 +12,11 @@
# warranty.
-# Note on `tex' and `fancyhdr': These two packages are installed along with
+# Note on 'tex' and 'fancyhdr': These two packages are installed along with
# the basic installation scheme that we used to install tlmgr, they will be
-# ignored in the `tlmgr install' command, but will be used later when we
+# ignored in the 'tlmgr install' command, but will be used later when we
# want their versions.
texlive-typewriter-pkgs = courier inconsolata xkeyval upquote
-texlive-packages = times IEEEtran cite xcolor pgfplots ps2eps \
+texlive-packages = times ieeetran cite xcolor pgfplots ps2eps \
listings ulem etoolbox multibib \
$(texlive-typewriter-pkgs)
diff --git a/reproduce/software/config/texlive.conf b/reproduce/software/config/texlive.conf
index 94a4c89..950dd4f 100644
--- a/reproduce/software/config/texlive.conf
+++ b/reproduce/software/config/texlive.conf
@@ -2,7 +2,7 @@
#
# installdir: Install directory
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# Copying and distribution of this file, with or without modification, are
# permitted in any medium without royalty provided the copyright notice and
diff --git a/reproduce/software/config/urls.conf b/reproduce/software/config/urls.conf
index a311524..7177411 100644
--- a/reproduce/software/config/urls.conf
+++ b/reproduce/software/config/urls.conf
@@ -3,8 +3,8 @@
# If un-commented the URLs of this file will be used. Otherwise default
# servers (primarily on 'zenodo.org') will be checked for the tarball.
#
-# Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
-# Copyright (C) 2020-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+# Copyright (C) 2020-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2020-2022 Raul Infante-Sainz <infantesainz@gmail.com>
#
# Copying and distribution of this file, with or without modification, are
# permitted in any medium without royalty provided the copyright notice and
@@ -19,15 +19,15 @@
# -------------------------------------------------------------
#bash-url = http://akhlaghi.org/src
#binutils-url = http://ftp.gnu.org/gnu/binutils
-#bzip2-url = http://akhlaghi.org/maneage-software
+#bzip2-url = https://sourceware.org/pub/bzip2
#cert-url = http://akhlaghi.org/maneage-software
#coreutils-url = http://ftp.gnu.org/gnu/coreutils
#curl-url = https://curl.haxx.se/download
-#dash-url = http://akhlaghi.org/maneage-software
+#dash-url = http://gondor.apana.org.au/~herbert/dash/files
#diffutils-url = http://ftp.gnu.org/gnu/diffutils
#file-url = ftp://ftp.astron.com/pub/file
#findutils-url = http://ftp.gnu.org/gnu/findutils
-#flock-url = http://akhlaghi.org/src
+#flock-url = https://github.com/discoteq/flock/releases
#gawk-url = http://ftp.gnu.org/gnu/gawk
#gcc-url = http://ftp.gnu.org/gnu/gcc/gcc-$(gcc-version)
#gettext-url = https://ftp.gnu.org/gnu/gettext
@@ -36,8 +36,7 @@
#grep-url = http://ftp.gnu.org/gnu/grep
#gzip-url = http://akhlaghi.org/src
#isl-url = ftp://gcc.gnu.org/pub/gcc/infrastructure
-less-url = http://www.greenwoodsoftware.com/less/
-#libbsd-url = http://libbsd.freedesktop.org/releases
+#less-url = http://www.greenwoodsoftware.com/less/
#libiconv-url = https://ftp.gnu.org/pub/gnu/libiconv
#libtool-url = http://ftp.gnu.org/gnu/libtool
#libunistring-url = http://ftp.gnu.org/gnu/libunistring
@@ -45,15 +44,15 @@ less-url = http://www.greenwoodsoftware.com/less/
#lzip-url = http://akhlaghi.org/src
#m4-url = http://akhlaghi.org/maneage-software
#make-url = http://akhlaghi.org/src
-#metastore-url = http://akhlaghi.org/maneage-software
#mpc-url = http://ftp.gnu.org/gnu/mpc
#mpfr-url = http://www.mpfr.org/mpfr-current
#nano-url = https://www.nano-editor.org/dist/v$(word 1, $(subst ., ,$(nano-version)))
#ncurses-url = http://ftp.gnu.org/gnu/ncurses
#openssl-url = http://www.openssl.org/source
-#patchelf-url = http://nixos.org/releases/patchelf/patchelf-$(patchelf-version)
+#patchelf-url = https://github.com/NixOS/patchelf/releases/download/$(patchelf-version)
#perl-url = $(shell echo https://www.cpan.org/src/$$(echo $(perl-version) | sed -e's/\./ /g' | awk '{printf("%d.0", $$1)}'))
#pkgconfig-url = http://pkg-config.freedesktop.org/releases
+#podlators-url = https://www.eyrie.org/~eagle/software/podlators/
#readline-url = http://ftp.gnu.org/gnu/readline
#sed-url = http://ftp.gnu.org/gnu/sed
#tar-url = http://ftp.gnu.org/gnu/tar
@@ -73,30 +72,31 @@ less-url = http://www.greenwoodsoftware.com/less/
# ------------------------------------------
#
# These are programs and libraries that are optional, The ones in
-# `reproduce/software/config/TARGETS.conf' will be built as
+# 'reproduce/software/config/TARGETS.conf' will be built as
# part of a project. To specify a software there, just remove the
-# `-url' suffix from the list below.
+# '-url' suffix from the list below.
+
#apachelog4cxx-url = http://akhlaghi.org/maneage-software
-#apr-url = https://www-us.apache.org/dist/apr
-#apr-util-url = https://www-us.apache.org/dist/apr
+#apr-url = https://archive.apache.org/dist/apr/
+#apr-util-url = https://archive.apache.org/dist/apr/
#astrometrynet-url = http://astrometry.net/downloads
#atlas-url = https://sourceforge.net/projects/math-atlas/files/Stable/$(atlas-version)/atlas$(atlas-version).tar.bz2/download
#autoconf-url = http://akhlaghi.org/maneage-software
#automake-url = http://ftp.gnu.org/gnu/automake
#bison-url = http://ftp.gnu.org/gnu/bison
-#boost-url = $(shell vstr=$$(echo $(boost-version) | sed -e's/\./_/g'); echo https://dl.bintray.com/boostorg/release/$(boost-version)/source)
+#boost-url = https://www.boost.org/users/download/
#cairo-url = https://www.cairographics.org/releases
#cdsclient-url = http://cdsarc.u-strasbg.fr/ftp/pub/sw
#cfitsio-url = https://heasarc.gsfc.nasa.gov/FTP/software/fitsio/c
#cmake-url = $(shell majv=$$(echo $(cmake-version) | sed -e's/\./ /' | awk '{printf("%d.%d", $$1, $$2)}'); echo https://cmake.org/files/v$$majv)
-#eigen-url = http://bitbucket.org/eigen/eigen/get/$(eigen-version).tar.gz
+#eigen-url = https://eigen.tuxfamily.org
#emacs-url = http://ftp.gnu.org/gnu/emacs
-#expat-url = $(shell vstr=$$(echo $(expat-version) | sed -e's/\./_/g'); echo https://github.com/libexpat/libexpat/releases/download/R_$$vstr/expat-$(expat-version).tar.lz)
-#fftw-url = ftp://ftp.fftw.org/pub/fftw
+#expat-url = https://github.com/libexpat/libexpat/releases
+#fftw-url = https://fftw.org
#flex-url = https://github.com/westes/flex/files/981163
#freetype-url = https://download.savannah.gnu.org/releases/freetype
#gdb-url = http://ftp.gnu.org/gnu/gdb
-#ghostscript-url = $(shell v=$$(echo $(ghostscript-version) | sed -e's/\.//'); echo https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs$$v)
+#ghostscript-url = https://www.ghostscript.com/releases/gsdnld.html
#gnuastro-url = http://ftp.gnu.org/gnu/gnuastro
#gperf-url = http://ftp.gnu.org/pub/gnu/gperf
#gs-fonts-gnu-url = https://downloads.sourceforge.net/gs-fonts
@@ -105,28 +105,33 @@ less-url = http://www.greenwoodsoftware.com/less/
#hdf5-url = $(shell majorver=$$(echo $(hdf5-version) | sed -e 's/\./ /g' | awk '{printf("%d.%d", $$1, $$2)}'); echo https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-$$majorver/hdf5-$(hdf5-version)/src)
#healpix-url = http://akhlaghi.org/maneage-software
#help2man-url = http://ftp.gnu.org/gnu/help2man
-#ibpaper-url=http://ftp.debian.org/debian/pool/main/libp/libpaper/
-#imagemagick-url = http://akhlaghi.org/maneage-software
+#libpaper-url = http://ftp.debian.org/debian/pool/main/libp/libpaper/
+#icu-url = https://github.com/unicode-org/icu/releases
+#imagemagick-url = https://download.imagemagick.org/ImageMagick/download
#imfit-url = http://www.mpe.mpg.de/~erwin/resources/imfit
#lapack-url = http://www.netlib.org/lapack
-#libffi-url = ftp://sourceware.org/pub/libffi
+#libbsd-url = http://libbsd.freedesktop.org/releases
+#libffi-url = https://sourceware.org/libffi/
#libgit2-url = http://akhlaghi.org/src
#libidn-url = https://ftp.gnu.org/gnu/libidn
#libjpeg-url = http://ijg.org/files
+#libmd-url = https://archive.hadrons.org/software/libmd/
#libnsl-url = http://akhlaghi.org/maneage-software
+#libpaper-url=http://ftp.debian.org/debian/pool/main/libp/libpaper/
#libpng-url = https://download.sourceforge.net/libpng
#libtiff-url = https://download.osgeo.org/libtiff
#libtirpc-url = https://downloads.sourceforge.net/libtirpc
+#metastore-url = http://akhlaghi.org/maneage-software
#minizip-url =
#missfits-url = https://www.astromatic.net/download/missfits
-#netpbm-url = http://akhlaghi.org/maneage-software
+#netpbm-url = https://sourceforge.net/projects/netpbm/files/super_stable/
#openblas-url = https://github.com/xianyi/OpenBLAS/archive
#openmpi-url = $(shell majorver=$$(echo $(openmpi-version) | sed -e 's/\./ /g' | awk '{printf("%d.%d", $$1, $$2)}'); echo https://download.open-mpi.org/release/open-mpi/v$$majorver)
#openssh-url = https://artfiles.org/openbsd/OpenSSH/portable
#patch-url = http://ftp.gnu.org/gnu/patch
#pixman-url = https://www.cairographics.org/releases
-#python-url = http://akhlaghi.org/src
-#R-url = $(shell majver=$$(echo $(R-version) | sed -e's/\./ /g' | awk '{print $$1}'); echo https://cran.r-project.org/src/base/R-$$majver)
+#python-url = https://www.python.org/downloads/
+#r-cran-url = $(shell majver=$$(echo $(r-cran-version) | sed -e's/\./ /g' | awk '{print $$1}'); echo https://cran.r-project.org/src/base/R-$$majver)
#rpcsvc-proto-url = https://github.com/thkukuk/rpcsvc-proto/releases/download
#scamp-url = http://akhlaghi.org/maneage-software
#scons-url = https://sourceforge.net/projects/scons/files/scons/$(scons-version)/scons-$(scons-version).tar.gz/download
@@ -149,8 +154,10 @@ less-url = http://www.greenwoodsoftware.com/less/
#libxau-url = https://www.x.org/pub/individual/lib
#libxdmcp-url = https://www.x.org/pub/individual/lib
#libxext-url = https://www.x.org/pub/individual/lib
+#libice-url = https://www.x.org/archive/individual/lib/
#util-macros-url = https://www.x.org/pub/individual/util
#xcb-proto-url = https://xorg.freedesktop.org/archive/individual/proto
+#libxcb-url = https://xcb.freedesktop.org/dist/
#xorgproto-url = https://xorg.freedesktop.org/archive/individual/proto
#xtrans-url = https://www.x.org/pub/individual/lib
diff --git a/reproduce/software/config/versions.conf b/reproduce/software/config/versions.conf
index 102ebbd..34d074c 100644
--- a/reproduce/software/config/versions.conf
+++ b/reproduce/software/config/versions.conf
@@ -1,7 +1,8 @@
# Versions of the various dependencies
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
-# Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
+# Copyright (C) 2022 Pedram Ashofteh Ardakani <pedramardakani@pm.me>
#
# Copying and distribution of this file, with or without modification, are
# permitted in any medium without royalty provided the copyright notice and
@@ -15,47 +16,47 @@
# Basic/low-level programs and libraires (installed in any case)
# --------------------------------------------------------------
#
-# CLASS:BASIC (important identifier, don't modify this line)
-bash-version = 5.0.18
-binutils-version = 2.35
-coreutils-version = 8.32
-curl-version = 7.71.1
-dash-version = 0.5.10.2
-diffutils-version = 3.7
-file-version = 5.39
-findutils-version = 4.7.0
-flock-version = 0.2.3
+# CLASS:BASIC (important identifier for 'awk'; don't modify this line)
+bash-version = 5.1.8
+binutils-version = 2.37
+coreutils-version = 9.1
+curl-version = 7.79.1
+dash-version = 0.5.11.5
+diffutils-version = 3.8
+file-version = 5.41
+findutils-version = 4.8.0
+flock-version = 0.4.0
gawk-version = 5.1.0
-gcc-version = 10.2.0
+gcc-version = 11.2.0
gettext-version = 0.21
-git-version = 2.28.0
-gmp-version = 6.2.0
-grep-version = 3.4
-gzip-version = 1.10
+git-version = 2.36.0
+gmp-version = 6.2.1
+grep-version = 3.7
+gzip-version = 1.11
isl-version = 0.18
-less-version = 563
-libbsd-version = 0.10.0
+less-version = 590
libiconv-version = 1.16
libtool-version = 2.4.6
-libunistring-version = 0.9.10
-libxml2-version = 2.9.9
-lzip-version = 1.22-rc2
-m4-version = 1.4.18-patched
+libunistring-version = 1.0
+libxml2-version = 2.9.12
+lzip-version = 1.22
+m4-version = 1.4.19
make-version = 4.3
-metastore-version = 1.1.2-23-fa9170b
-mpc-version = 1.1.0
-mpfr-version = 4.0.2
-nano-version = 5.2
-ncurses-version = 6.2
-patchelf-version = 0.10
-perl-version = 5.32.0
+mpc-version = 1.2.1
+mpfr-version = 4.1.0
+nano-version = 6.0
+ncurses-version = 6.3
+openssl-version = 3.0.0
+patchelf-version = 0.13
+perl-version = 5.34.0
pkgconfig-version = 0.29.2
-readline-version = 8.0
+podlators-version = 4.14
+readline-version = 8.1.1
sed-version = 4.8
-tar-version = 1.32
-texinfo-version = 6.7
+tar-version = 1.34
+texinfo-version = 6.8
unzip-version = 6.0
-wget-version = 1.20.3
+wget-version = 1.21.2
which-version = 2.21
xz-version = 5.2.5
zip-version = 3.0
@@ -67,7 +68,16 @@ zlib-version = 1.2.11
# When updating the version of these programs/libraries, please look into
# the build rule first: In one way or another, the version string becomes
# necessary during their build and must be accounted for.
-bzip2-version = 1.0.6
+bzip2-version = 1.0.8
+
+# PEM Certificate File
+# --------------------
+#
+# This file should be shipped with operating systems and can be found in
+# '/etc/ssl/cert.pem' (which may be a symbolic link to its actual
+# location). The current version was taken from an up-to-date ArchLinux at
+# the date that is also equal to the version.
+certpem-version = 2021-10-15
@@ -77,108 +87,104 @@ bzip2-version = 1.0.6
# ------------------------------------------
#
# These are programs and libraries that are optional, The ones in
-# `reproduce/software/config/TARGETS.conf' will be built as
+# 'reproduce/software/config/TARGETS.conf' will be built as
# part of a project. To specify a software there, just remove the
-# `-version' suffix from the list below.
+# '-version' suffix from the list below.
#
-# CLASS:HIGHLEVEL (important identifier, don't modify this line.)
-apachelog4cxx-version = 0.10.0-603-014954db
+# CLASS:HIGHLEVEL (important identifier for 'awk'; don't modify this line)
+apachelog4cxx-version = 0.12.1
apr-version = 1.7.0
apr-util-version = 1.6.1
-astrometrynet-version = 0.80
+astrometrynet-version = 0.85
atlas-version = 3.10.3
-autoconf-version = 2.69.200-babc
-automake-version = 1.16.2
-bison-version = 3.7
-boost-version = 1.73.0
+autoconf-version = 2.71
+automake-version = 1.16.5
+bison-version = 3.8.2
+boost-version = 1.77.0
cairo-version = 1.16.0
cdsclient-version = 3.84
-cfitsio-version = 3.48
-cmake-version = 3.18.1
-eigen-version = 3.3.7
-emacs-version = 27.1
-expat-version = 2.2.9
-fftw-version = 3.3.8
-flex-version = 2.6.4-91-ga631f5d
-freetype-version = 2.10.2
-gdb-version = 9.2
-ghostscript-version = 9.52
+cfitsio-version = 4.0.0
+cmake-version = 3.21.4
+eigen-version = 3.4.0
+emacs-version = 28.1
+expat-version = 2.4.1
+fftw-version = 3.3.10
+flex-version = 2.6.4
+freetype-version = 2.11.0
+gdb-version = 11.1
+ghostscript-version = 9.55.0
ghostscript-fonts-gnu-version = 6.0
ghostscript-fonts-std-version = 8.11
-gnuastro-version = 0.12
+gnuastro-version = 0.16.1-e0f1
gperf-version = 3.1
-gsl-version = 2.6
-healpix-version = 3.50
-help2man-version = 1.47.11
-imagemagick-version = 7.0.8-67
+gsl-version = 2.7
+help2man-version = 1.48.5
+icu-version = 70.1
+imagemagick-version = 7.1.0-13
imfit-version = 1.6.1
-libffi-version = 3.2.1
-libidn-version = 1.36
-libjpeg-version = v9b
+libbsd-version = 0.11.3
+libffi-version = 3.4.2
+libidn-version = 1.38
+libjpeg-version = 9d
+libmd-version = 1.0.4
libnsl-version = 1.2.0-4a062cf
libpaper-version = 1.1.28
libpng-version = 1.6.37
-libtiff-version = 4.0.10
+libtiff-version = 4.3.0
libtirpc-version = 1.2.6
+metastore-version = 1.1.2-23-fa9170b
missfits-version = 2.8.0
-openblas-version = 0.3.10
-openmpi-version = 4.0.4
+openblas-version = 0.3.18
+openmpi-version = 4.1.1
openssh-version = 8.0p1
patch-version = 2.7.6
pcre-version = 8.44
-pixman-version = 0.38.0
-python-version = 3.8.5
-R-version = 4.0.2
+pixman-version = 0.40.0
+python-version = 3.10.0
+r-cran-version = 4.1.2
rpcsvc-proto-version = 1.4
scamp-version = 2.6.7
scons-version = 3.0.5
sextractor-version = 2.25.0
swarp-version = 2.38.0
-swig-version = 3.0.12
+swig-version = 4.0.2
tides-version = 2.0
-util-linux-version = 2.35
+util-linux-version = 2.37.2
+valgrind-version = 3.18.1
vim-version = 8.2
xlsxio-version = 0.2.21
yaml-version = 0.2.5
# Xorg packages
-util-macros-version = 1.19.2
-xorgproto-version = 2020.1
+util-macros-version = 1.19.3
+xorgproto-version = 2021.5
libxau-version = 1.0.9
libxdmcp-version = 1.1.3
-xcb-proto-version = 1.14
+xcb-proto-version = 1.14.1
libxcb-version = 1.14
-fontconfig-version = 2.13.1
+fontconfig-version = 2.13.94
xtrans-version = 1.4.0
-libx11-version = 1.6.9
+libx11-version = 1.7.2
libxext-version = 1.3.4
libice-version = 1.0.10
libsm-version = 1.2.3
-libxt-version = 1.2.0
+libxt-version = 1.2.1
libpthread-stubs-version = 0.4
# Version-dependent build
# -----------------------
lapack-version = 3.8.0
-libgit2-version = 1.0.1
-wcslib-version = 7.3
+libgit2-version = 1.3.0
+wcslib-version = 7.7
# Netpbm's questions in the configure steps maybe change with different or
# new versions.
-netpbm-version = 10.86.99
+netpbm-version = 10.73.38
-# Minizip is installed with the same `zlib' tarball, and they have the same
+# Minizip is installed with the same 'zlib' tarball, and they have the same
# version.
minizip-version = $(zlib-version)
-# From version 1.2 OpenSSL may not need a manual addition, as described in
-# its comments and `https://savannah.nongnu.org/bugs/?58263'. If it doesn't
-# cause problems, put it back in the list of "Basic/low-level" tools.
-openssl-version = 1.1.1a
-
-# Version 3.15.0 needs two patches, please check if they are necessary on
-# any future release.
-valgrind-version = 3.15.0
# Be careful with updateing hdf5 because h5py 2.10.0 doesn't work with
# version 1.12.0.
@@ -187,6 +193,7 @@ hdf5-version = 1.10.5
+
# Python packages
# ---------------
#
@@ -194,65 +201,147 @@ hdf5-version = 1.10.5
#
# IMPORTANT: If you intend to change the version of any of the Python
# modules/libraries below, please fix the hash strings of the respective
-# URL in `reproduce/software/make/python.mk'.
+# URL in 'reproduce/software/make/python.mk'.
#
-# CLASS:PYTHON (important identifier, don't modify this line.)
+# CLASS:PYTHON-START (important identifier for 'awk'; don't modify this line)
asn1crypto-version = 0.24.0
asteval-version = 0.9.16
-astropy-version = 4.0
-astroquery-version = 0.4
-beautifulsoup4-version = 4.7.1
+astropy-version = 5.0
+beautifulsoup4-version = 4.10.0
+beniget-version = 0.4.1
certifi-version = 2018.11.29
-cffi-version = 1.12.2
+cffi-version = 1.15.0
chardet-version = 3.0.4
corner-version = 2.0.1
-cryptography-version = 2.6.1
-cycler-version = 0.10.0
-cython-version = 0.29.21
+cycler-version = 0.11.0
+cython-version = 0.29.24
eigency-version = 1.77
emcee-version = 3.0.1
entrypoints-version = 0.3
-esutil-version = 0.6.4
+esutil-version = 0.6.9
+extension-helpers-version = 0.1
flake8-version = 3.7.8
future-version = 0.18.1
-galsim-version = 2.2.1
+galsim-version = 2.3.3
+gast-version = 0.5.3
h5py-version = 2.10.0
html5lib-version = 1.0.1
idna-version = 2.8
jeepney-version = 0.4
-keyring-version = 18.0.0
+jinja2-version = 3.0.3
kiwisolver-version = 1.0.1
lmfit-version = 0.9.14
lsstdesccoord-version = 1.2.0
+markupsafe-version = 2.0.1
matplotlib-version = 3.3.0
-mpi4py-version = 3.0.3
+mpi4py-version = 3.1.3
mpmath-version = 1.1.0
-numpy-version = 1.19.1
+numpy-version = 1.21.3
+packaging-version = 21.3
pexpect-version = 4.7.0
+pillow-version = 8.4.0
pip-version = 19.0.2
+ply-version = 3.11
pycodestyle-version = 2.5.0
pycparser-version = 2.19
+pyerfa-version = 2.0.0.1
pyflakes-version = 2.1.1
pybind11-version = 2.5.0
-pyparsing-version = 2.3.1
+pyparsing-version = 3.0.4
pypkgconfig-version = 1.5.1
python-dateutil-version = 2.8.0
+pythran-version = 0.11.0
pyyaml-version = 5.1
requests-version = 2.21.0
-scipy-version = 1.5.2
-secretstorage-version = 3.1.1
-setuptools-version = 41.6.0
+scipy-version = 1.7.3
+setuptools-version = 58.3.0
setuptools_scm-version = 3.3.3
sip_tpv-version = 1.1
-six-version = 1.12.0
+six-version = 1.16.0
soupsieve-version = 1.8
sympy-version = 1.4
-uncertainties-version = 3.1.2
+uncertainties-version = 3.1.6
urllib3-version = 1.24.1
webencodings-version = 0.5.1
+wheel-version = 0.37.0
+# CLASS:PYTHON-END (important identifier for 'awk'; don't modify this line)
# Special Python modules:
#
# Healpy: When any Python module is requested, healpix will also build its
# Python module Healpy.
-healpy-version = xxxxx
+#
+# Bug: Healpix 3.50 doesn't yet support Python 3.10, so we are commenting
+# it here. When future versions fix the problem, we'll un-comment it again.
+#healpy-version = xxxxx
+
+
+
+
+
+# Problematic software that are currently ignored:
+
+# Healpix 3.80 has different installation scheme that is not yet
+# implemented here, and unfortunately healpix 3.50 has conflicts with
+# CFITSIO's new version macro and Python 3.10. So for now, we are ignoring
+# it.
+#healpix-version = 3.50
+
+# Setuptools-rust crash (https://savannah.nongnu.org/bugs/index.php?61731),
+# so it and its dependencies are being ignored: 'cryptography', and thus
+# 'secretstorage' and thus 'keyring' and thus 'astroquery'.
+#setuptools-rust-version = 1.1.2
+#cryptography-version = 36.0.1
+#secretstorage-version = 3.1.1
+#keyring-version = 18.0.0
+#astroquery-version = 0.4
+
+
+
+
+
+# R-CRAN (i.e. 'R') packages
+# ------------------
+#
+# Similar to optional programs and libraries above.
+#
+# Notation: The R package and variables associated with it are by default
+# called 'R-CRAN' (or 'r-cran') in Maneage, because searching on a single
+# letter string 'R' is extremely inefficient.
+#
+# IMPORTANT: If you intend to change the version of any of the R-CRAN
+# modules/libraries below, please fix the hash strings of the respective
+# packages in 'reproduce/software/conf/checksums.conf'.
+#
+# CLASS:R-CRAN-START (important identifier for 'awk'; don't modify this line)
+r-cran-cli-version = 2.5.0
+r-cran-colorspace-version = 2.0-1
+r-cran-cowplot-version = 1.1.1
+r-cran-crayon-version = 1.4.1
+r-cran-digest-version = 0.6.27
+r-cran-ellipsis-version = 0.3.2
+r-cran-fansi-version = 0.5.0
+r-cran-farver-version = 2.1.0
+r-cran-ggplot2-version = 3.3.4
+r-cran-glue-version = 1.4.2
+r-cran-gridExtra-version = 2.3
+r-cran-gtable-version = 0.3.0
+r-cran-isoband-version = 0.2.4
+r-cran-labeling-version = 0.4.2
+r-cran-lifecycle-version = 1.0.0
+r-cran-magrittr-version = 2.0.1
+r-cran-MASS-version = 7.3-54
+r-cran-mgcv-version = 1.8-36
+r-cran-munsell-version = 0.5.0
+r-cran-pillar-version = 1.6.1
+r-cran-pkgconfig-version = 2.0.3
+r-cran-R6-version = 2.5.0
+r-cran-RColorBrewer-version = 1.1-2
+r-cran-rlang-version = 0.4.11
+r-cran-scales-version = 1.1.1
+r-cran-tibble-version = 3.1.2
+r-cran-utf8-version = 1.2.1
+r-cran-vctrs-version = 0.3.8
+r-cran-viridisLite-version = 0.4.0
+r-cran-withr-version = 2.4.2
+# CLASS:R-CRAN-END (important identifier for 'awk'; don't modify this line)
diff --git a/reproduce/software/make/README.md b/reproduce/software/make/README.md
index 8d12d7a..76ab5c1 100644
--- a/reproduce/software/make/README.md
+++ b/reproduce/software/make/README.md
@@ -1,7 +1,7 @@
Software building instructions
------------------------------
-Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>\
+Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>\
See the end of the file for license conditions.
This directory contains Makefiles that are called by the high-level
diff --git a/reproduce/software/make/atlas-multiple.mk b/reproduce/software/make/atlas-multiple.mk
index e782578..b92fbfc 100644
--- a/reproduce/software/make/atlas-multiple.mk
+++ b/reproduce/software/make/atlas-multiple.mk
@@ -3,12 +3,12 @@
# ------------------------------------------------------------------------
# !!!!! IMPORTANT NOTES !!!!!
#
-# This Makefile will be run during the initial `./project configure'
+# This Makefile will be run during the initial './project configure'
# script. It is not included into the reproduction pipe after that.
#
# ------------------------------------------------------------------------
#
-# Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This Makefile is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
diff --git a/reproduce/software/make/atlas-single.mk b/reproduce/software/make/atlas-single.mk
index 0934b66..720bad6 100644
--- a/reproduce/software/make/atlas-single.mk
+++ b/reproduce/software/make/atlas-single.mk
@@ -3,12 +3,12 @@
# ------------------------------------------------------------------------
# !!!!! IMPORTANT NOTES !!!!!
#
-# This Makefile will be run during the initial `./project configure'
+# This Makefile will be run during the initial './project configure'
# script. It is not included into the reproduction pipe after that.
#
# ------------------------------------------------------------------------
#
-# Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This Makefile is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
diff --git a/reproduce/software/make/basic.mk b/reproduce/software/make/basic.mk
index fb378b8..5967a1d 100644
--- a/reproduce/software/make/basic.mk
+++ b/reproduce/software/make/basic.mk
@@ -4,7 +4,7 @@
# ------------------------------------------------------------------------
# !!!!! IMPORTANT NOTES !!!!!
#
-# This Makefile will be run by the initial `./project configure' script. It
+# This Makefile will be run by the initial './project configure' script. It
# is not included into the project afterwards.
#
# This Makefile builds low-level and basic tools that are necessary in any
@@ -21,8 +21,9 @@
#
# ------------------------------------------------------------------------
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
-# Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
+# Copyright (C) 2022 Pedram Ashofteh Ardakani <pedramardakani@pm.me>
#
# This Makefile is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -59,7 +60,7 @@ ibidir = $(BDIR)/software/installed/version-info/proglib
# Ultimate Makefile target. GNU Nano (a simple and very light-weight text
# editor) is installed by default, it is recommended to have it in the
# 'basic.mk', so Maneaged projects can be edited on any system (even when
-# there is no command-line text editor is available).
+# there is no command-line text editor available).
targets-proglib = low-level-links \
gcc-$(gcc-version) \
nano-$(nano-version)
@@ -90,17 +91,20 @@ export CPPFLAGS := -I$(idir)/include $(CPPFLAGS) \
-Wno-nullability-completeness
export LDFLAGS := $(rpath_command) -L$(ildir) $(LDFLAGS)
+# Disable built-in rules (which are not needed here!)
+.SUFFIXES:
+
# This is the "basic" tools where we are relying on the host operating
# system, but are slowly populating our basic software envirnoment. To run
-# (system or template) programs, `LD_LIBRARY_PATH' is necessary, so here,
+# (system or template) programs, 'LD_LIBRARY_PATH' is necessary, so here,
# we'll first tell the programs to look into any possible pre-defined
-# `LD_LIBRARY_PATH', then we'll add our own newly installed libraries. We
+# 'LD_LIBRARY_PATH', then we'll add our own newly installed libraries. We
# will also make sure that there is no "current directory" in it (by
-# removing a starting or trailing `:' and any occurance of `::'.
+# removing a starting or trailing ':' and any occurance of '::'.
export LD_LIBRARY_PATH := $(shell echo $(LD_LIBRARY_PATH):$(ildir) \
| sed -e's/::/:/g' -e's/^://' -e's/:$$//')
-# RPATH is automatically written in macOS, so `DYLD_LIBRARY_PATH' is
+# RPATH is automatically written in macOS, so 'DYLD_LIBRARY_PATH' is
# ultimately redundant. But on some systems, even having a single value
# causes crashs (see bug #56682). So we'll just give it no value at all.
export DYLD_LIBRARY_PATH :=
@@ -150,10 +154,10 @@ backupservers = $(filter-out $(topbackupserver),$(backupservers_all))
#
# About ccache: ccache acts like a wrapper over the C compiler and is made
# to avoid/speed-up compiling of identical files in a system (it is
-# commonly used on large servers). It actually makes `gcc' or `g++' a
+# commonly used on large servers). It actually makes 'gcc' or 'g++' a
# symbolic link to itself so it can control them internally. So, for our
# purpose here, it is very annoying and can cause many complications. We
-# thus remove any part of PATH of that has `ccache' in it before making
+# thus remove any part of PATH of that has 'ccache' in it before making
# symbolic links to the programs we are not building ourselves.
#
# The double quotations after the starting 'export PATH' are necessary in
@@ -172,9 +176,9 @@ makelink = origpath="$$PATH"; \
| tr '\n' :)"; \
if type $(1) > /dev/null 2> /dev/null; then \
if [ x$(3) = x ]; then \
- ln -sf "$$(realpath $$(which $(1)))" $(ibdir)/$(1); \
+ ln -sf "$$(realpath $$(command -v $(1)))" $(ibdir)/$(1); \
else \
- ln -sf "$$(realpath $$(which $(1)))" $(ibdir)/$(3); \
+ ln -sf "$$(realpath $$(command -v $(1)))" $(ibdir)/$(3); \
fi; \
else \
if [ "x$(strip $(2))" = xmandatory ]; then \
@@ -189,11 +193,11 @@ $(ibdir) $(ildir):; mkdir $@
$(ibidir)/low-level-links: $(ibidir)/grep-$(grep-version) \
| $(ibdir) $(ildir)
- # Hardware specific
+# Hardware specific
$(call makelink,lp) # For printing, necessary for R.
$(call makelink,lpr) # For printing, necessary for R.
- # Mac OS specific
+# Mac OS specific
$(call makelink,mig)
$(call makelink,xcrun)
$(call makelink,sysctl)
@@ -201,14 +205,14 @@ $(ibidir)/low-level-links: $(ibidir)/grep-$(grep-version) \
$(call makelink,dsymutil)
$(call makelink,install_name_tool)
- # On Mac OS, libtool is different compared to GNU Libtool. The
- # libtool we'll build in the high-level dependencies has the
- # executable name `glibtool'.
+# On Mac OS, libtool is different compared to GNU Libtool. The
+# libtool we'll build in the high-level dependencies has the
+# executable name 'glibtool'.
$(call makelink,libtool)
- # Necessary libraries:
- # Libdl (for dynamic loading libraries at runtime)
- # POSIX Threads library for multi-threaded programs.
+# Necessary libraries:
+# Libdl (for dynamic loading libraries at runtime)
+# POSIX Threads library for multi-threaded programs.
for l in dl pthread; do
if [ -f /usr/lib/lib$$l.a ]; then
for f in /usr/lib/lib$$l.*; do
@@ -218,8 +222,8 @@ $(ibidir)/low-level-links: $(ibidir)/grep-$(grep-version) \
fi
done
- # We want this to be empty (so it doesn't interefere with the other
- # files in `ibidir'.
+# We want this to be empty (so it doesn't interefere with the other
+# files in 'ibidir'.
touch $@
@@ -249,26 +253,26 @@ $(ibidir)/gzip-$(gzip-version): | $(ibdir) $(ildir) $(lockdir)
echo "GNU Gzip $(gzip-version)" > $@
$(ibidir)/xz-$(xz-version): $(ibidir)/gzip-$(gzip-version)
- tarball=xz-$(xz-version).tar.gz
+ tarball=xz-$(xz-version).tar.lz
$(call import-source, $(xz-url), $(xz-checksum))
$(call gbuild, xz-$(xz-version), static)
echo "XZ Utils $(xz-version)" > $@
$(ibidir)/bzip2-$(bzip2-version): $(ibidir)/gzip-$(gzip-version)
- # Download the tarball.
- tarball=bzip2-$(bzip2-version).tar.gz
+# Download the tarball.
+ tarball=bzip2-$(bzip2-version).tar.lz
$(call import-source, $(bzip2-url), $(bzip2-checksum))
- # Bzip2 doesn't have a `./configure' script, and its Makefile
- # doesn't build a shared library. So we can't use the `gbuild'
- # function here and we need to take some extra steps (inspired
- # from the GNU/Linux from Scratch (LFS) guide for Bzip2):
- # 1) The `sed' call is for relative installed symbolic links.
- # 2) The special Makefile-libbz2_so builds shared libraries.
- #
- # NOTE: the major version number appears in the final symbolic
- # link.
+# Bzip2 doesn't have a './configure' script, and its Makefile doesn't
+# build a shared library. So we can't use the 'gbuild' function here
+# and we need to take some extra steps (inspired from the GNU/Linux
+# from Scratch (LFS) guide for Bzip2):
+#
+# 1) The 'sed' call is for relative installed symbolic links.
+# 2) The special Makefile-libbz2_so builds shared libraries.
+#
+# NOTE: the major version number appears in the final symbolic link.
tdir=bzip2-$(bzip2-version)
if [ $(static_build) = yes ]; then
makecommand="make LDFLAGS=-static"
@@ -283,7 +287,7 @@ $(ibidir)/bzip2-$(bzip2-version): $(ibidir)/gzip-$(gzip-version)
fi
cd $(ddir)
rm -rf $$tdir
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
cd $$tdir
sed -e 's@\(ln -s -f \)$$(PREFIX)/bin/@\1@' Makefile \
> Makefile.sed
@@ -296,14 +300,13 @@ $(ibidir)/bzip2-$(bzip2-version): $(ibidir)/gzip-$(gzip-version)
cd ..
rm -rf $$tdir
cd $(ildir)
- ln -fs libbz2.so.1.0 libbz2.so
+ ln -fs libbz2.so.$(bzip2-version) libbz2.so
echo "Bzip2 $(bzip2-version)" > $@
$(ibidir)/unzip-$(unzip-version): $(ibidir)/gzip-$(gzip-version)
- tarball=unzip-$(unzip-version).tar.gz
- v=$$(echo $(unzip-version) | sed -e's/\.//')
+ tarball=unzip-$(unzip-version).tar.lz
$(call import-source, $(unzip-url), $(unzip-checksum))
- $(call gbuild, unzip$$v, static,, \
+ $(call gbuild, unzip-$(unzip-version), static,, \
-f unix/Makefile generic \
CFLAGS="-DBIG_MEM -DMMAP",,pwd, \
-f unix/Makefile generic \
@@ -311,10 +314,9 @@ $(ibidir)/unzip-$(unzip-version): $(ibidir)/gzip-$(gzip-version)
echo "Unzip $(unzip-version)" > $@
$(ibidir)/zip-$(zip-version): $(ibidir)/gzip-$(gzip-version)
- tarball=zip-$(zip-version).tar.gz
- v=$$(echo $(zip-version) | sed -e's/\.//')
+ tarball=zip-$(zip-version).tar.lz
$(call import-source, $(zip-url), $(zip-checksum))
- $(call gbuild, zip$$v, static,, \
+ $(call gbuild, zip-$(zip-version), static,, \
-f unix/Makefile generic \
CFLAGS="-DBIG_MEM -DMMAP",,pwd, \
-f unix/Makefile generic \
@@ -324,10 +326,10 @@ $(ibidir)/zip-$(zip-version): $(ibidir)/gzip-$(gzip-version)
# Some programs (like Wget and CMake) that use zlib need it to be dynamic
# so they use our custom build. So we won't force a static-only build.
#
-# Note for a static-only build: Zlib's `./configure' doesn't use Autoconf's
-# configure script, it just accepts a direct `--static' option.
+# Note for a static-only build: Zlib's './configure' doesn't use Autoconf's
+# configure script, it just accepts a direct '--static' option.
$(ibidir)/zlib-$(zlib-version): $(ibidir)/gzip-$(gzip-version)
- tarball=zlib-$(zlib-version).tar.gz
+ tarball=zlib-$(zlib-version).tar.lz
$(call import-source, $(zlib-url), $(zlib-checksum))
$(call gbuild, zlib-$(zlib-version))
echo "Zlib $(zlib-version)" > $@
@@ -344,11 +346,12 @@ $(ibidir)/tar-$(tar-version): \
$(ibidir)/zlib-$(zlib-version) \
$(ibidir)/bzip2-$(bzip2-version) \
$(ibidir)/unzip-$(unzip-version)
- # Since all later programs depend on Tar, the configuration will be
- # stuck here, only making Tar. So its more efficient to built it on
- # multiple threads (when the user's Make doesn't pass down the
- # number of threads).
- tarball=tar-$(tar-version).tar.gz
+
+# Since all later programs depend on Tar, the configuration will hit
+# a bottleneck here: only making Tar. So its more efficient to built
+# it on multiple threads (even when the user's Make doesn't pass down
+# the number of threads).
+ tarball=tar-$(tar-version).tar.lz
$(call import-source, $(tar-url), $(tar-checksum))
$(call gbuild, tar-$(tar-version), , , -j$(numthreads) V=1)
echo "GNU Tar $(tar-version)" > $@
@@ -370,7 +373,7 @@ $(ibidir)/tar-$(tar-version): \
# a prerequisite (and forgetting in others causing bugs), we'll put it as a
# dependancy of 'tar'.
$(ibidir)/patchelf-$(patchelf-version): $(ibidir)/tar-$(tar-version)
- tarball=patchelf-$(patchelf-version).tar.gz
+ tarball=patchelf-$(patchelf-version).tar.lz
$(call import-source, $(patchelf-url), $(patchelf-checksum))
if [ x$(on_mac_os) = xyes ]; then
echo "" > $@
@@ -395,75 +398,76 @@ $(ibidir)/patchelf-$(patchelf-version): $(ibidir)/tar-$(tar-version)
# basic dependencies.
#
# Unfortunately Make needs dynamic linking in two instances: when loading
-# objects (dynamically linked libraries), or when using the `getpwnam'
+# objects (dynamically linked libraries), or when using the 'getpwnam'
# function (for tilde expansion). The first can be disabled with
-# `--disable-load', but unfortunately I don't know any way to fix the
+# '--disable-load', but unfortunately I don't know any way to fix the
# second. So, we'll have to build it dynamically for now.
$(ibidir)/ncurses-$(ncurses-version): $(ibidir)/patchelf-$(patchelf-version)
- tarball=ncurses-$(ncurses-version).tar.gz
+ tarball=ncurses-$(ncurses-version).tar.lz
$(call import-source, $(ncurses-url), $(ncurses-checksum))
- # Delete the library that will be installed (so we can make sure
- # the build process completed afterwards and reset the links).
+# Delete the library that will be installed (so we can make sure the
+# build process completed afterwards and reset the links).
rm -f $(ildir)/libncursesw*
- # Delete the (possibly existing) low-level programs that depend on
- # `readline', and thus `ncurses'. Since these programs are actually
- # used during the building of `ncurses', we need to delete them so
- # the build process doesn't use the project's Bash and AWK, but the
- # host's.
+# Delete the (possibly existing) low-level programs that depend on
+# 'readline', and thus 'ncurses'. Since these programs are actually
+# used during the building of 'ncurses', we need to delete them so
+# the build process doesn't use the project's Bash and AWK, but the
+# host's.
rm -f $(ibdir)/bash* $(ibdir)/awk* $(ibdir)/gawk*
- # Standard build process.
+# Standard build process.
$(call gbuild, ncurses-$(ncurses-version), static, \
--with-shared --enable-rpath --without-normal \
--without-debug --with-cxx-binding \
--with-cxx-shared --enable-widec --enable-pc-files \
--with-pkg-config=$(ildir)/pkgconfig, -j$(numthreads))
- # Unfortunately there are many problems with `ncurses' using
- # "normal" (or 8-bit) characters. The standard way that will work
- # is to build it with wide character mode as you see above in the
- # configuration (or the `w' prefix you see below). Also, most
- # programs (and in particular Bash and AWK), first look for other
- # (mostly obsolete) libraries like tinfo, which define the same
- # symbols. The links below address both situations: we need to fool
- # higher-level packages to find this library even if they aren't
- # explicitly mentioning its name correctly (as a value to `-l' at
- # link time in their configure scripts).
- #
- # This part is taken from the Arch GNU/Linux build script[1], then
- # extended to Mac thanks to Homebrew's script [2].
- #
- # [1] https://git.archlinux.org/svntogit/packages.git/tree/trunk/PKGBUILD?h=packages/ncurses
- # [2] https://github.com/Homebrew/homebrew-core/blob/master/Formula/ncurses.rb
- #
- # Since we can't have comments, in the connected script, here is a
- # summary:
- #
- # 1. We find the actual suffix of the library, from the file that
- # is not a symbolic link (starting with `-' in the output of
- # `ls -l').
- #
- # 2. We make symbolic links to all the "ncurses", "ncurses++",
- # "form", "panel" and "menu" libraries to point to their
- # "wide" (character) library.
- #
- # 3. We make symbolic links to the "tic" and "tinfo" libraries to
- # point to the same `libncursesw' library.
- #
- # 4. Some programs link with "curses" (not "ncurses", notice the
- # starting "n"), so we'll also make links for these to point
- # to the `libncursesw' library.
- #
- # 5. A link is made to also be able to include files from the
- # `ncurses' headers.
+# Unfortunately there are many problems with 'ncurses' using "normal"
+# (or 8-bit) characters. The standard way that will work is to build
+# it with wide character mode as you see above in the configuration
+# (or the 'w' prefix you see below). Also, most programs (and in
+# particular Bash and AWK), first look for other (mostly obsolete)
+# libraries like tinfo, which define the same symbols. The links
+# below address both situations: we need to fool higher-level
+# packages to find this library even if they aren't explicitly
+# mentioning its name correctly (as a value to '-l' at link time in
+# their configure scripts).
+#
+# This part is taken from the Arch GNU/Linux build script[1], then
+# extended to Mac thanks to Homebrew's script [2].
+#
+# [1] https://git.archlinux.org/svntogit/packages.git/tree/trunk/PKGBUILD?h=packages/ncurses
+# [2] https://github.com/Homebrew/homebrew-core/blob/master/Formula/ncurses.rb
+#
+# Since we can't have comments, in the connected script, here is a
+# summary:
+#
+# 1. We find the actual suffix of the library, from the file that
+# is not a symbolic link (starting with '-' in the output of 'ls
+# -l').
+#
+# 2. We make symbolic links to all the "ncurses", "ncurses++",
+# "form", "panel" and "menu" libraries to point to their "wide"
+# (character) library.
+#
+# 3. We make symbolic links to the "tic" and "tinfo" libraries to
+# point to the same 'libncursesw' library.
+#
+# 4. Some programs link with "curses" (not "ncurses", notice the
+# starting "n"), so we'll also make links for these to point to
+# the 'libncursesw' library.
+#
+# 5. A link is made to also be able to include files from the
+# 'ncurses' headers.
if [ x$(on_mac_os) = xyes ]; then so="dylib"; else so="so"; fi
if [ -f $(ildir)/libncursesw.$$so ]; then
+ unalias ls || true # avoid decorated 'ls' commands with extra characters
sov=$$(ls -l $(ildir)/libncursesw* \
| awk '/^-/{print $$NF}' \
- | sed -e's|'$(ildir)/libncursesw.'||')
+ | sed -e "s;$(ildir)/libncursesw\.;;")
cd "$(ildir)"
for lib in ncurses ncurses++ form panel menu; do
@@ -488,7 +492,7 @@ $(ibidir)/ncurses-$(ncurses-version): $(ibidir)/patchelf-$(patchelf-version)
$(ibidir)/readline-$(readline-version): \
$(ibidir)/ncurses-$(ncurses-version)
- tarball=readline-$(readline-version).tar.gz
+ tarball=readline-$(readline-version).tar.lz
$(call import-source, $(readline-url), $(readline-checksum))
$(call gbuild, readline-$(readline-version), static, \
--with-curses --disable-install-examples, \
@@ -496,12 +500,12 @@ $(ibidir)/readline-$(readline-version): \
echo "GNU Readline $(readline-version)" > $@
-# IMPORTANT: Even though we have enabled `rpath', Bash doesn't write the
+# IMPORTANT: Even though we have enabled 'rpath', Bash doesn't write the
# absolute adddress of the libraries it depends on! Therefore, if we
-# configure Bash with `--with-installed-readline' (so the installed version
+# configure Bash with '--with-installed-readline' (so the installed version
# of Readline, that we build below as a prerequisite or AWK, is used) and
-# you run `ldd $(ibdir)/bash' on the resulting binary, it will say that it
-# is linking with the system's `readline'. But if you run that same command
+# you run 'ldd $(ibdir)/bash' on the resulting binary, it will say that it
+# is linking with the system's 'readline'. But if you run that same command
# within a rule in this project, you'll see that it is indeed linking with
# our own built readline.
#
@@ -509,11 +513,11 @@ $(ibidir)/readline-$(readline-version): \
# released as patches. Therefore we'll need to make our own fully-working
# and updated tarball to build the proper version of Bash. You download and
# apply them to the original tarball and make a new one with the following
-# series of commands (just replace `NUMBER' with the total number of
+# series of commands (just replace 'NUMBER' with the total number of
# patches that you want to apply).
#
# $ number=NUMBER
-# $ tar xf bash-5.0.tar.gz
+# $ tar -xf bash-5.0.tar.gz
# $ cd bash-5.0
# $ for i in $(seq 1 $number); do \
# pname=bash50-$(printf "%03d" $i); \
@@ -529,18 +533,18 @@ $(ibidir)/bash-$(bash-version): \
$(ibidir)/gettext-$(gettext-version) \
$(ibidir)/readline-$(readline-version)
- # Download the tarball.
+# Download the tarball.
tarball=bash-$(bash-version).tar.lz
$(call import-source, $(bash-url), $(bash-checksum))
- # Delete the (possibly) existing Bash executable in the project,
- # let it use the default shell of the host.
+# Delete the (possibly) existing Bash executable in the project,
+# let it use the default shell of the host.
rm -f $(ibdir)/bash
- # Bash has many `--enable' features which are already enabled by
- # default. As described in the manual, they are mainly useful when
- # you disable them all with `--enable-minimal-config' and enable a
- # subset using the `--enable' options.
+# Bash has many '--enable' features which are already enabled by
+# default. As described in the manual, they are mainly useful when
+# you disable them all with '--enable-minimal-config' and enable a
+# subset using the '--enable' options.
if [ "x$(static_build)" = xyes ]; then stopt="--enable-static-link"
else stopt=""
fi;
@@ -553,22 +557,20 @@ $(ibidir)/bash-$(bash-version): \
--with-curses=yes, \
-j$(numthreads))
- # Atleast on GNU/Linux systems, Bash doesn't include RPATH by
- # default. So, we have to manually include it, currently we are
- # only doing this on GNU/Linux systems (using the `patchelf'
- # program).
+# Atleast on GNU/Linux systems, Bash doesn't include RPATH by
+# default. So, we have to manually include it, currently we are only
+# doing this on GNU/Linux systems (using the 'patchelf' program).
if [ -f $(ibdir)/patchelf ]; then
$(ibdir)/patchelf --set-rpath $(ildir) $(ibdir)/bash;
fi
- # To be generic, some systems use the `sh' command to call the
- # shell. By convention, `sh' is just a symbolic link to the
- # preferred shell executable. So we'll define `$(ibdir)/sh' as a
- # symbolic link to the Bash that we just built and installed.
- #
- # Just to be sure that the installation step above went well,
- # before making the link, we'll see if the file actually exists
- # there.
+# To be generic, some systems use the 'sh' command to call the
+# shell. By convention, 'sh' is just a symbolic link to the preferred
+# shell executable. So we'll define '$(ibdir)/sh' as a symbolic link
+# to the Bash that we just built and installed.
+#
+# Just to be sure that the installation step above went well, before
+# making the link, we'll see if the file actually exists there.
ln -fs $(ibdir)/bash $(ibdir)/sh
echo "GNU Bash $(bash-version)" > $@
@@ -584,9 +586,9 @@ $(ibidir)/bash-$(bash-version): \
# Level 4: Most other programs
# ----------------------------
-# In Perl, The `-shared' flag will cause problems while building on macOS,
+# In Perl, The '-shared' flag will cause problems while building on macOS,
# so we'll only use this configuration option when we are GNU/Linux
-# systems. However, since the whole option must be used (which includes `='
+# systems. However, since the whole option must be used (which includes '='
# and empty space), its easier to define the variable as a Make variable
# outside the recipe, not as a shell variable inside it.
ifeq ($(on_mac_os),yes)
@@ -595,7 +597,7 @@ else
perl-conflddlflags = -Dlddlflags="-shared $$LDFLAGS"
endif
$(ibidir)/perl-$(perl-version): $(ibidir)/patchelf-$(patchelf-version)
- tarball=perl-$(perl-version).tar.gz
+ tarball=perl-$(perl-version).tar.lz
$(call import-source, $(perl-url), $(perl-checksum))
major_version=$$(echo $(perl-version) \
| sed -e's/\./ /g' \
@@ -605,7 +607,7 @@ $(ibidir)/perl-$(perl-version): $(ibidir)/patchelf-$(patchelf-version)
| awk '{printf("%d.%d", $$1, $$2)}')
cd $(ddir)
rm -rf perl-$(perl-version)
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
cd perl-$(perl-version)
./Configure -des \
-Dusethreads \
@@ -615,7 +617,7 @@ $(ibidir)/perl-$(perl-version): $(ibidir)/patchelf-$(patchelf-version)
-Dprivlib=$(idir)/share/perl$$major_version/core_perl \
-Darchlib=$(idir)/lib/perl$$major_version/$$base_version/core_perl \
-Dsitelib=$(idir)/share/perl$$major_version/site_perl \
- -Dsitearch=$(idir)/lib/perl$$major_version/$$basever/site_perl \
+ -Dsitearch=$(idir)/lib/perl$$major_version/$$base_version/site_perl \
-Dvendorlib=$(idir)/share/perl$$major_version/vendor_perl \
-Dvendorarch=$(idir)/lib/perl$$major_version/$$base_version/vendor_perl \
-Dscriptdir=$(idir)/bin/core_perl \
@@ -641,17 +643,17 @@ $(ibidir)/perl-$(perl-version): $(ibidir)/patchelf-$(patchelf-version)
# Coreutils
# ---------
#
-# For some reason, Coreutils doesn't include `rpath' in its installed
+# For some reason, Coreutils doesn't include 'rpath' in its installed
# executables (even though it says that by default its included and that
-# even when calling `--enable-rpath=yes'). So we have to manually add
-# `rpath' to Coreutils' executables after the standard build is
+# even when calling '--enable-rpath=yes'). So we have to manually add
+# 'rpath' to Coreutils' executables after the standard build is
# complete.
#
# One problem is that Coreutils installs many very basic executables which
# might be in used by other programs. So we must make sure that when
# Coreutils is being built, no other program is being built in
# parallel. The solution to the many executables it installs is to make a
-# fake installation (with `DESTDIR'), and get a list of the contents of the
+# fake installation (with 'DESTDIR'), and get a list of the contents of the
# directory to find the names.
#
# The echo after the PatchELF loop is to avoid a crash if the last
@@ -664,32 +666,33 @@ $(ibidir)/coreutils-$(coreutils-version): \
$(ibidir)/perl-$(perl-version) \
$(ibidir)/openssl-$(openssl-version)
- # Import, unpack and enter the source directory.
- tarball=coreutils-$(coreutils-version).tar.xz
+# Import, unpack and enter the source directory.
+ tarball=coreutils-$(coreutils-version).tar.lz
$(call import-source, $(coreutils-url), $(coreutils-checksum))
cd $(ddir)
rm -rf coreutils-$(coreutils-version)
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
cd coreutils-$(coreutils-version)
- # Set the configure script to use our shell, note that we can't
- # assume GNU SED here yet (it installs after Coreutils).
+# Set the configure script to use our shell, note that we can't
+# assume GNU SED here yet (it installs after Coreutils).
sed -e's|\#\! /bin/sh|\#\! $(ibdir)/bash|' \
-e's|\#\!/bin/sh|\#\! $(ibdir)/bash|' \
configure > configure-tmp
mv configure-tmp configure
chmod +x configure
- # Configure, build and install Coreutils.
+# Configure, build and install Coreutils.
./configure --prefix=$(idir) SHELL=$(ibdir)/bash \
LDFLAGS="$(LDFLAGS)" CPPFLAGS="$(CPPFLAGS)" \
--disable-silent-rules --with-openssl=yes
make SHELL=$(ibdir)/bash -j$(numthreads)
make SHELL=$(ibdir)/bash install
- # Fix RPATH if necessary.
+# Fix RPATH if necessary.
if [ -f $(ibdir)/patchelf ]; then
make SHELL=$(ibdir)/bash install DESTDIR=junkinst
+ unalias ls || true # avoid decorated 'ls' commands with extra characters
instprogs=$$(ls junkinst/$(ibdir))
for f in $$instprogs; do
$(ibdir)/patchelf --set-rpath $(ildir) $(ibdir)/$$f
@@ -697,12 +700,34 @@ $(ibidir)/coreutils-$(coreutils-version): \
echo "PatchELF applied to all programs."
fi
- # Come back up to the unpacking directory, delete the source
- # directory and write the final target.
+# Come back up to the unpacking directory, delete the source
+# directory and write the final target.
cd ..
rm -rf coreutils-$(coreutils-version)
echo "GNU Coreutils $(coreutils-version)" > $@
+# Podlators
+#
+# POD is short for "Plain Old Documentation", that is the format used in
+# Perl's documentation. Podlators provies two executables pod2man and
+# pod2text convert this into the roff format (used in man pages) or pod2 It
+# is used by some software like OpenSSL to create their man pages.
+$(ibidir)/podlators-$(podlators-version): $(ibidir)/perl-$(perl-version)
+ tarball=podlators-$(podlators-version).tar.lz
+ $(call import-source, $(podlators-url), $(podlators-checksum))
+ cd $(ddir)
+ rm -rf podlators-$(podlators-version)
+ tar -xf $(tdir)/$$tarball
+ cd podlators-$(podlators-version)
+ perl Makefile.PL
+ make
+ make install
+ ln -sf $(ibdir)/site_perl/pod2man $(ibdir)/pod2man
+ ln -sf $(ibdir)/site_perl/pod2text $(ibdir)/pod2text
+ cd ..
+ rm -rf podlators-$(podlators-version)
+ echo "podlators $(podlators-version)" > $@
+
# OpenSSL
#
# Until we find a nice and generic way to create an updated CA file in the
@@ -710,26 +735,26 @@ $(ibidir)/coreutils-$(coreutils-version): \
# along with the other tarballs.
$(idir)/etc:; mkdir $@
$(idir)/etc/ssl: | $(idir)/etc; mkdir $@
-$(ibidir)/openssl-$(openssl-version): $(ibidir)/perl-$(perl-version) \
+$(ibidir)/openssl-$(openssl-version): $(ibidir)/podlators-$(podlators-version) \
| $(idir)/etc/ssl
- # First download the certificates and copy them into the
- # installation directory.
- tarball=cert.pem
+# First download the certificates and copy them into the
+# installation directory.
+ tarball=cert.pem-$(certpem-version)
$(call import-source, $(cert-url), $(cert-checksum))
- cp $(tdir)/cert.pem $(idir)/etc/ssl/cert.pem
+ cp $(tdir)/cert.pem-$(certpem-version) $(idir)/etc/ssl/cert.pem
- # Now download the OpenSSL tarball.
- tarball=openssl-$(openssl-version).tar.gz
+# Now download the OpenSSL tarball.
+ tarball=openssl-$(openssl-version).tar.lz
$(call import-source, $(openssl-url), $(openssl-checksum))
- # According to OpenSSL's Wiki (link bellow), it can't automatically
- # detect Mac OS's structure. It will need some help. So we'll use
- # the `on_mac_os' Make variable that we defined in the configure
- # script and help it with some extra configuration options and an
- # environment variable.
- #
- # https://wiki.openssl.org/index.php/Compilation_and_Installation
+# According to OpenSSL's Wiki (link bellow), it can't automatically
+# detect Mac OS's structure. It will need some help. So we'll use the
+# 'on_mac_os' Make variable that we defined in the configure script
+# and help it with some extra configuration options and an
+# environment variable.
+#
+# https://wiki.openssl.org/index.php/Compilation_and_Installation
if [ x$(on_mac_os) = xyes ]; then
export KERNEL_BITS=64
copt="shared no-ssl2 no-ssl3 enable-ec_nistp_64_gcc_128"
@@ -743,28 +768,19 @@ $(ibidir)/openssl-$(openssl-version): $(ibidir)/perl-$(perl-version) \
--with-zlib-include=$(idir)/include, \
-j$(numthreads), , ./config )
- # Manually insert RPATH inside the OpenSSL library.
+# Manually insert RPATH inside the two created libraries.
if [ -f $(ibdir)/patchelf ]; then
- patchelf --set-rpath $(ildir) $(ildir)/libssl.so; \
+ patchelf --set-rpath $(ildir) $(ildir)/libssl.so
+ patchelf --set-rpath $(ildir) $(ildir)/libcrypto.so
fi
- # Bug 58263 (https://savannah.nongnu.org/bugs/?58263): In OpenSSL
- # Version 1.1.1a (also checked in 1.1.1g), `openssl/ec.h' fails to
- # include `openssl/openconf.h' on some OSs. The SED hack below
- # inserts a hardwired element of `openssl/openconf.h' that is
- # needed to include sections of code `f` that are deprecated in
- # 1.2.0, but not yet in 1.1.1. This problem may be solved in
- # version 1.2.x, so please check again in that bug.
- mv -v $(idir)/include/openssl/ec.h $(idir)/include/openssl/ec.h.orig
- sed -e 's,\(# include .openssl/opensslconf\.h.\),\1\n#ifndef DEPRECATEDIN_1_2_0\n#define DEPRECATEDIN_1_2_0(f) f;\n#endif\n,' \
- $(idir)/include/openssl/ec.h.orig > $(idir)/include/openssl/ec.h
-
- # Build the final target.
+# Build the final target.
echo "OpenSSL $(openssl-version)" > $@
+
# Downloaders
# -----------
@@ -773,13 +789,13 @@ $(ibidir)/openssl-$(openssl-version): $(ibidir)/perl-$(perl-version) \
# cURL can optionally link with many different network-related libraries on
# the host system that we are not yet building in the template. Many of
# these are not relevant to most science projects, so we are explicitly
-# using `--without-XXX' or `--disable-XXX' so cURL doesn't link with
+# using '--without-XXX' or '--disable-XXX' so cURL doesn't link with
# them. Note that if it does link with them, the configuration will crash
# when the library is updated/changed by the host, and the whole purpose of
# this project is avoid dependency on the host as much as possible.
$(ibidir)/curl-$(curl-version): $(ibidir)/coreutils-$(coreutils-version)
- tarball=curl-$(curl-version).tar.gz
+ tarball=curl-$(curl-version).tar.lz
$(call import-source, $(curl-url), $(curl-checksum))
$(call gbuild, curl-$(curl-version), , \
@@ -809,8 +825,8 @@ $(ibidir)/curl-$(curl-version): $(ibidir)/coreutils-$(coreutils-version)
# GNU Wget
#
# Note that on some systems (for example GNU/Linux) Wget needs to explicity
-# link with `libdl', but on others (for example Mac OS) it doesn't. We
-# check this at configure time and define the `needs_ldl' variable.
+# link with 'libdl', but on others (for example Mac OS) it doesn't. We
+# check this at configure time and define the 'needs_ldl' variable.
#
# Also note that since Wget needs to load outside libraries dynamically, it
# gives a segmentation fault when built statically.
@@ -823,12 +839,12 @@ $(ibidir)/wget-$(wget-version): \
$(ibidir)/libiconv-$(libiconv-version) \
$(ibidir)/coreutils-$(coreutils-version)
- # Download the tarball.
+# Download the tarball.
tarball=wget-$(wget-version).tar.lz
$(call import-source, $(wget-url), $(wget-checksum))
- # We need to explicitly disable `libiconv', because of the
- # `pkg-config' and `libiconv' problem.
+# We need to explicitly disable 'libiconv', because of the
+# 'pkg-config' and 'libiconv' problem.
libs="-pthread"
if [ x$(needs_ldl) = xyes ]; then libs="$$libs -ldl"; fi
$(call gbuild, wget-$(wget-version), , \
@@ -861,13 +877,13 @@ $(ibidir)/wget-$(wget-version): \
# there is no access to the system's PATH.
$(ibidir)/diffutils-$(diffutils-version): \
$(ibidir)/coreutils-$(coreutils-version)
- tarball=diffutils-$(diffutils-version).tar.xz
+ tarball=diffutils-$(diffutils-version).tar.lz
$(call import-source, $(diffutils-url), $(diffutils-checksum))
$(call gbuild, diffutils-$(diffutils-version), static,,V=1)
echo "GNU Diffutils $(diffutils-version)" > $@
$(ibidir)/file-$(file-version): $(ibidir)/coreutils-$(coreutils-version)
- tarball=file-$(file-version).tar.gz
+ tarball=file-$(file-version).tar.lz
$(call import-source, $(file-url), $(file-checksum))
$(call gbuild, file-$(file-version), static, \
--disable-libseccomp, V=1)
@@ -875,7 +891,7 @@ $(ibidir)/file-$(file-version): $(ibidir)/coreutils-$(coreutils-version)
$(ibidir)/findutils-$(findutils-version): \
$(ibidir)/coreutils-$(coreutils-version)
- tarball=findutils-$(findutils-version).tar.xz
+ tarball=findutils-$(findutils-version).tar.lz
$(call import-source, $(findutils-url), $(findutils-checksum))
$(call gbuild, findutils-$(findutils-version), static,,V=1)
echo "GNU Findutils $(findutils-version)" > $@
@@ -885,19 +901,19 @@ $(ibidir)/gawk-$(gawk-version): \
$(ibidir)/mpfr-$(mpfr-version) \
$(ibidir)/coreutils-$(coreutils-version)
- # Download the tarball.
+# Download the tarball.
tarball=gawk-$(gawk-version).tar.lz
$(call import-source, $(gawk-url), $(gawk-checksum))
- # AWK doesn't include RPATH by default, so we'll have to manually
- # include it using the `patchelf' program (which was a dependency
- # of Bash). Just note that AWK produces two executables (for
- # example `gawk-4.2.1' and `gawk') and a symbolic link `awk' to one
- # of those executables.
+# AWK doesn't include RPATH by default, so we'll have to manually
+# include it using the 'patchelf' program (which was a dependency of
+# Bash). Just note that AWK produces two executables (for example
+# 'gawk-4.2.1' and 'gawk') and a symbolic link 'awk' to one of those
+# executables.
$(call gbuild, gawk-$(gawk-version), static, \
--with-readline=$(idir))
- # Correct the RPATH on systems that have installed patchelf.
+# Correct the RPATH on systems that have installed patchelf.
if [ -f $(ibdir)/patchelf ]; then
if [ -f $(ibdir)/gawk ]; then
$(ibdir)/patchelf --set-rpath $(ildir) $(ibdir)/gawk
@@ -908,31 +924,31 @@ $(ibidir)/gawk-$(gawk-version): \
fi
fi
- # Build final target.
+# Build final target.
echo "GNU AWK $(gawk-version)" > $@
$(ibidir)/libiconv-$(libiconv-version): \
$(ibidir)/pkg-config-$(pkgconfig-version)
- tarball=libiconv-$(libiconv-version).tar.gz
+ tarball=libiconv-$(libiconv-version).tar.lz
$(call import-source, $(libiconv-url), $(libiconv-checksum))
$(call gbuild, libiconv-$(libiconv-version), static)
echo "GNU libiconv $(libiconv-version)" > $@
$(ibidir)/libunistring-$(libunistring-version): \
$(ibidir)/libiconv-$(libiconv-version)
- tarball=libunistring-$(libunistring-version).tar.xz
+ tarball=libunistring-$(libunistring-version).tar.lz
$(call import-source, $(libunistring-url), $(libunistring-checksum))
$(call gbuild, libunistring-$(libunistring-version), static,, \
-j$(numthreads))
echo "GNU libunistring $(libunistring-version)" > $@
$(ibidir)/libxml2-$(libxml2-version): $(ibidir)/patchelf-$(patchelf-version)
- # The libxml2 tarball also contains Python bindings which are built
- # and installed to a system directory by default. If you don't need
- # the Python bindings, the easiest solution is to compile without
- # Python support: `./configure --without-python'. If you really need
- # the Python bindings, use `--with-python-install-dir=DIR' instead.
- tarball=libxml2-$(libxml2-version).tar.gz
+# The libxml2 tarball also contains Python bindings which are built
+# and installed to a system directory by default. If you don't need
+# the Python bindings, the easiest solution is to compile without
+# Python support: './configure --without-python'. If you really need
+# the Python bindings, use '--with-python-install-dir=DIR' instead.
+ tarball=libxml2-$(libxml2-version).tar.lz
$(call import-source, $(libxml2-url), $(libxml2-checksum))
$(call gbuild, libxml2-$(libxml2-version), static, \
--without-python, V=1)
@@ -946,8 +962,8 @@ $(ibidir)/gettext-$(gettext-version): \
$(ibidir)/libunistring-$(libunistring-version)
tarball=gettext-$(gettext-version).tar.lz
$(call import-source, $(gettext-url), $(gettext-checksum))
- $(call gbuild, gettext-$(gettext-version), static,, \
- V=1 -j$(numthreads))
+ $(call gbuild, gettext-$(gettext-version), static, \
+ --without-emacs, V=1 -j$(numthreads))
echo "GNU gettext $(gettext-version)" > $@
$(ibidir)/git-$(git-version): \
@@ -955,7 +971,7 @@ $(ibidir)/git-$(git-version): \
$(ibidir)/curl-$(curl-version) \
$(ibidir)/gettext-$(gettext-version) \
$(ibidir)/libiconv-$(libiconv-version)
- tarball=git-$(git-version).tar.xz
+ tarball=git-$(git-version).tar.lz
if [ x$(on_mac_os) = xyes ]; then
export LDFLAGS="$$LDFLAGS -lcharset"
fi
@@ -979,7 +995,7 @@ $(ibidir)/gmp-$(gmp-version): \
# and generally to view large files easily when the project is built in a
# container with a minimal OS.
$(ibidir)/less-$(less-version): $(ibidir)/ncurses-$(ncurses-version)
- tarball=less-$(less-version).tar.gz
+ tarball=less-$(less-version).tar.lz
$(call import-source, $(less-url), $(less-checksum))
$(call gbuild, less-$(less-version), static,,-j$(numthreads))
if [ -f $(ibdir)/patchelf ]; then
@@ -988,9 +1004,9 @@ $(ibidir)/less-$(less-version): $(ibidir)/ncurses-$(ncurses-version)
echo "Less $(less-version)" > $@
# On Mac OS, libtool does different things, so to avoid confusion, we'll
-# prefix GNU's libtool executables with `glibtool'.
+# prefix GNU's libtool executables with 'glibtool'.
$(ibidir)/libtool-$(libtool-version): $(ibidir)/m4-$(m4-version)
- tarball=libtool-$(libtool-version).tar.xz
+ tarball=libtool-$(libtool-version).tar.lz
$(call import-source, $(libtool-url), $(libtool-checksum))
$(call gbuild, libtool-$(libtool-version), static, \
--program-prefix=g, V=1 -j$(numthreads))
@@ -998,162 +1014,44 @@ $(ibidir)/libtool-$(libtool-version): $(ibidir)/m4-$(m4-version)
echo "GNU Libtool $(libtool-version)" > $@
$(ibidir)/grep-$(grep-version): $(ibidir)/coreutils-$(coreutils-version)
- tarball=grep-$(grep-version).tar.xz
+ tarball=grep-$(grep-version).tar.lz
$(call import-source, $(grep-url), $(grep-checksum))
$(call gbuild, grep-$(grep-version), static,,V=1)
echo "GNU Grep $(grep-version)" > $@
-$(ibidir)/libbsd-$(libbsd-version): $(ibidir)/coreutils-$(coreutils-version)
- tarball=libbsd-$(libbsd-version).tar.xz
- $(call import-source, $(libbsd-url), $(libbsd-checksum))
- if [ x$(on_mac_os) = xyes ]; then
- echo "" > $@
- else
- $(call gbuild, libbsd-$(libbsd-version), static,,V=1)
- echo "Libbsd $(libbsd-version)" > $@
- fi
-
-# We need to apply a patch to the M4 source to be used properly on macOS.
-# The patch [1] was inspired by Homebrew's build instructions [1].
-#
-# [1] https://raw.githubusercontent.com/macports/macports-ports/edf0ee1e2cf/devel/m4/files/secure_snprintf.patch
-# [2] https://github.com/Homebrew/homebrew-core/blob/master/Formula/m4.rb
-#
# M4 doesn't depend on PatchELF, but just to be consistent with the
# levels/phases introduced here (where the compressors are level 1,
# PatchELF is level 2, and ...), we'll set it as a dependency.
$(ibidir)/m4-$(m4-version): $(ibidir)/patchelf-$(patchelf-version)
- tarball=m4-$(m4-version).tar.gz
+ tarball=m4-$(m4-version).tar.lz
$(call import-source, $(m4-url), $(m4-checksum))
- cd $(ddir)
- unpackdir=m4-$(m4-version)
- rm -rf $$unpackdir
- tar xf $(tdir)/$$tarball
- mv m4-* $$unpackdir
- cd $$unpackdir
- if [ x$(on_mac_os) = xyes ]; then
- sed 's|if !(((__GLIBC__ > 2|if !defined(__APPLE__) \&\& !(((__GLIBC__ > 2|' \
- lib/vasnprintf.c > lib/vasnprintf_edited.c
- mv lib/vasnprintf_edited.c lib/vasnprintf.c
- fi
- ./configure --prefix=$(idir) LDFLAGS="$(LDFLAGS)" \
- CPPFLAGS="$(CPPFLAGS)"
- make V=1 -j$(numthreads)
- make V=1 install
- cd ..
- rm -rf $$unpackdir
+ $(call gbuild, m4-$(m4-version), static,,V=1)
echo "GNU M4 $(m4-version)" > $@
-# Metastore is used (through a Git hook) to restore the source modification
-# dates of files after a Git checkout. Another Git hook saves all file
-# metadata just before a commit (to allow restoration after a
-# checkout). Since this project is managed in Makefiles, file modification
-# dates are critical to not having to redo the whole analysis after
-# checking out between branches.
-#
-# Note that we aren't using the standard version of Metastore, but a fork
-# of it that is maintained in this repository:
-# https://gitlab.com/makhlaghi/metastore-fork
-#
-# Note that the prerequisites `coreutils', `gawk' and `sed' are not
-# metastore oficial dependencies, but they are necessaries to run our steps
-# before and after the installation.
-#
-# Libbsd is not necessary on macOS systems, because macOS is already a
-# BSD-based distribution. But on GNU/Linux systems, it is necessary.
-$(ibidir)/metastore-$(metastore-version): \
- $(ibidir)/sed-$(sed-version) \
- $(ibidir)/git-$(git-version) \
- $(ibidir)/gawk-$(gawk-version) \
- $(ibidir)/libbsd-$(libbsd-version) \
- $(ibidir)/coreutils-$(coreutils-version)
-
- # Download the tarball.
- tarball=metastore-$(metastore-version).tar.gz
- $(call import-source, $(metastore-url), $(metastore-checksum))
-
- # Metastore doesn't have any `./configure' script. So we'll just
- # call `pwd' as a place-holder for the `./configure' command.
- #
- # File attributes are also not available on some systems, since the
- # main purpose here is modification dates (and not attributes),
- # we'll also set the `NO_XATTR' flag.
- #
- # After installing Metastore, write the relevant hooks into this
- # system's Git hooks, while setting the system-specific
- # directories/files.
- #
- # Note that the metastore -O and -G options used in this template
- # are currently only available in a fork of `metastore' hosted at:
- # https://github.com/mohammad-akhlaghi/metastore
- #
- # Checking for presence of `.git'. When the project source is
- # downloaded from a non-Git source (for example from arXiv), there
- # is no `.git' directory to work with. So until we find a better
- # solution, avoid the step to to add the Git hooks.
- current_dir=$$(pwd); \
- $(call gbuild, metastore-$(metastore-version), static,, \
- NO_XATTR=1 V=1,,pwd,PREFIX=$(idir))
-
- # Correct RPATH when necessary.
- if [ -f $(ibdir)/patchelf ]; then
- $(ibdir)/patchelf --set-rpath $(ildir) $(ibdir)/metastore
- fi
-
- # If this project is being built in a directory version controlled
- # by Git, copy the hooks into the Git configuation.
- if [ -f $(ibdir)/metastore ]; then
- if [ -d .git ]; then
- user=$$(whoami)
- group=$$(groups | awk '{print $$1}')
- cd $$current_dir
- for f in pre-commit post-checkout; do
- sed -e's|@USER[@]|'$$user'|g' \
- -e's|@GROUP[@]|'$$group'|g' \
- -e's|@BINDIR[@]|$(ibdir)|g' \
- -e's|@TOP_PROJECT_DIR[@]|'$$current_dir'|g' \
- reproduce/software/shell/git-$$f > .git/hooks/$$f
- chmod +x .git/hooks/$$f
- done
- fi
- echo "Metastore (forked) $(metastore-version)" > $@
- else
- echo; echo; echo
- echo "*****************"
- echo "metastore couldn't be installed!"
- echo
- echo "Its used for preserving timestamps on Git commits."
- echo "Its useful for development, not simple running of "
- echo "the project. So we won't stop the configuration "
- echo "because it wasn't built."
- echo "*****************"
- echo "" > $@
- fi
-
$(ibidir)/mpfr-$(mpfr-version): $(ibidir)/gmp-$(gmp-version)
- tarball=mpfr-$(mpfr-version).tar.xz
+ tarball=mpfr-$(mpfr-version).tar.lz
$(call import-source, $(mpfr-url), $(mpfr-checksum))
$(call gbuild, mpfr-$(mpfr-version), static, , , make check)
echo "GNU Multiple Precision Floating-Point Reliably $(mpfr-version)" > $@
$(ibidir)/pkg-config-$(pkgconfig-version): $(ibidir)/patchelf-$(patchelf-version)
- # Download the tarball.
- tarball=pkg-config-$(pkgconfig-version).tar.gz
+# Download the tarball.
+ tarball=pkg-config-$(pkgconfig-version).tar.lz
$(call import-source, $(pkgconfig-url), $(pkgconfig-checksum))
- # An existing `libiconv' can cause a conflict with `pkg-config',
- # this is why `libiconv' depends on `pkg-config'. On a clean build,
- # `pkg-config' is built first. But when we don't have a clean build
- # (and `libiconv' exists) there will be a problem. So before
- # re-building `pkg-config', we'll remove any installation of
- # `libiconv'.
+# An existing 'libiconv' can cause a conflict with 'pkg-config', this
+# is why 'libiconv' depends on 'pkg-config'. On a clean build,
+# 'pkg-config' is built first. But when we don't have a clean build
+# (and 'libiconv' exists) there will be a problem. So before
+# re-building 'pkg-config', we'll remove any installation of
+# 'libiconv'.
rm -f $(ildir)/libiconv* $(idir)/include/iconv.h
- # Some Mac OS systems may have a version of the GNU C Compiler
- # (GCC) installed that doesn't support some necessary features of
- # building Glib (as part of pkg-config). So to be safe, for Mac
- # systems, we'll make sure it will use LLVM's Clang.
+# Some Mac OS systems may have a version of the GNU C Compiler (GCC)
+# installed that doesn't support some necessary features of building
+# Glib (as part of pkg-config). So to be safe, for Mac systems, we'll
+# make sure it will use LLVM's Clang.
if [ x$(on_mac_os) = xyes ]; then export compiler="CC=clang"
else export compiler=""
fi
@@ -1163,7 +1061,7 @@ $(ibidir)/pkg-config-$(pkgconfig-version): $(ibidir)/patchelf-$(patchelf-version
echo "pkg-config $(pkgconfig-version)" > $@
$(ibidir)/sed-$(sed-version): $(ibidir)/coreutils-$(coreutils-version)
- tarball=sed-$(sed-version).tar.xz
+ tarball=sed-$(sed-version).tar.lz
$(call import-source, $(sed-url), $(sed-checksum))
$(call gbuild, sed-$(sed-version), static,,V=1)
echo "GNU Sed $(sed-version)" > $@
@@ -1171,7 +1069,7 @@ $(ibidir)/sed-$(sed-version): $(ibidir)/coreutils-$(coreutils-version)
$(ibidir)/texinfo-$(texinfo-version): \
$(ibidir)/perl-$(perl-version) \
$(ibidir)/gettext-$(gettext-version)
- tarball=texinfo-$(texinfo-version).tar.xz
+ tarball=texinfo-$(texinfo-version).tar.lz
$(call import-source, $(texinfo-url), $(texinfo-checksum))
$(call gbuild, texinfo-$(texinfo-version), static)
if [ -f $(ibdir)/patchelf ]; then
@@ -1181,14 +1079,14 @@ $(ibidir)/texinfo-$(texinfo-version): \
echo "GNU Texinfo $(texinfo-version)" > $@
$(ibidir)/which-$(which-version): $(ibidir)/coreutils-$(coreutils-version)
- tarball=which-$(which-version).tar.gz
+ tarball=which-$(which-version).tar.lz
$(call import-source, $(which-url), $(which-checksum))
$(call gbuild, which-$(which-version), static)
echo "GNU Which $(which-version)" > $@
# GNU ISL is necessary to build GCC.
$(ibidir)/isl-$(isl-version): $(ibidir)/gmp-$(gmp-version)
- tarball=isl-$(isl-version).tar.bz2
+ tarball=isl-$(isl-version).tar.lz
$(call import-source, $(isl-url), $(isl-checksum))
if [ $(host_cc) = 1 ]; then
echo "" > $@
@@ -1200,7 +1098,7 @@ $(ibidir)/isl-$(isl-version): $(ibidir)/gmp-$(gmp-version)
# GNU MPC is necessary to build GCC.
$(ibidir)/mpc-$(mpc-version): $(ibidir)/mpfr-$(mpfr-version)
- tarball=mpc-$(mpc-version).tar.gz
+ tarball=mpc-$(mpc-version).tar.lz
$(call import-source, $(mpc-url), $(mpc-checksum))
if [ $(host_cc) = 1 ]; then
echo "" > $@
@@ -1223,33 +1121,34 @@ $(ibidir)/mpc-$(mpc-version): $(ibidir)/mpfr-$(mpfr-version)
# -----------------------
#
# The installation of Binutils can cause problems during the build of other
-# programs (http://savannah.nongnu.org/bugs/?56294), but its necessary for
+# programs since it provides the linker that is used to build them
+# (http://savannah.nongnu.org/bugs/?56294). However, it is necessary for
# GCC. Therefore, we'll set all other basic programs as Binutils
-# prerequisite and GCC (the final basic target) ultimately just depends on
-# Binutils.
+# prerequisites, so GCC (the almost-final basic target) ultimately just
+# depends on Binutils.
$(ibidir)/binutils-$(binutils-version): \
- $(ibidir)/sed-$(sed-version) \
+ $(ibidir)/git-$(git-version) \
$(ibidir)/isl-$(isl-version) \
$(ibidir)/mpc-$(mpc-version) \
- $(ibidir)/wget-$(wget-version) \
- $(ibidir)/grep-$(grep-version) \
+ $(ibidir)/sed-$(sed-version) \
$(ibidir)/file-$(file-version) \
$(ibidir)/gawk-$(gawk-version) \
+ $(ibidir)/grep-$(grep-version) \
+ $(ibidir)/wget-$(wget-version) \
$(ibidir)/which-$(which-version) \
- $(ibidir)/texinfo-$(texinfo-version) \
$(ibidir)/libtool-$(libtool-version) \
- $(ibidir)/metastore-$(metastore-version) \
- $(ibidir)/findutils-$(findutils-version) \
+ $(ibidir)/texinfo-$(texinfo-version) \
+ $(ibidir)/coreutils-$(coreutils-version) \
$(ibidir)/diffutils-$(diffutils-version) \
- $(ibidir)/coreutils-$(coreutils-version)
+ $(ibidir)/findutils-$(findutils-version)
- # Download the tarball.
+# Download the tarball.
tarball=binutils-$(binutils-version).tar.lz
$(call import-source, $(binutils-url), $(binutils-checksum))
- # Binutils' assembler (`as') and linker (`ld') will conflict with
- # other compilers. So if we don't build our own compiler, we'll use
- # the host opertating system's equivalents by just making links.
+# Binutils' assembler ('as') and linker ('ld') will conflict with
+# other compilers. So if we don't build our own compiler, we'll use
+# the host opertating system's equivalents by just making links.
if [ x$(on_mac_os) = xyes ]; then
$(call makelink,as)
$(call makelink,ar)
@@ -1261,20 +1160,20 @@ $(ibidir)/binutils-$(binutils-version): \
echo "" > $@
else
- # Build binutils with the standard 'gbuild' function.
+# Build binutils with the standard 'gbuild' function.
$(call gbuild, binutils-$(binutils-version), static, \
--with-lib-path=$(sys_library_path), \
-j$(numthreads) )
- # The `ld' linker of Binutils needs several `*crt*.o' files from
- # the host's GNU C Library to run. On some systems these object
- # files aren't installed in standard places. We defined
- # `LIBRARY_PATH' and that fixed the problem for many
- # systems. However, some software (for example ImageMagick)
- # over-write `LIBRARY_PATH', therefore there is no other way than
- # to put a link to these necessary files in our local build
- # directory. IMPORTANT NOTE: later, when we build the GNU C
- # Library in the project, we should remove this step.
+# The 'ld' linker of Binutils needs several '*crt*.o' files from
+# the host's GNU C Library to run. On some systems these object
+# files aren't installed in standard places. We defined
+# 'LIBRARY_PATH' and that fixed the problem for many
+# systems. However, some software (for example ImageMagick)
+# over-write 'LIBRARY_PATH', therefore there is no other way than
+# to put a link to these necessary files in our local build
+# directory. IMPORTANT NOTE: later, when we build the GNU C Library
+# in the project, we should remove this step.
if ! [ x"$(sys_library_path)" = x ]; then
for f in $(sys_library_path)/*crt*.o; do
b=$$($(ibdir)/basename $$f)
@@ -1282,11 +1181,11 @@ $(ibidir)/binutils-$(binutils-version): \
done
fi
- # Write the final target.
+# Write the final target.
echo "GNU Binutils $(binutils-version)" > $@
fi
-# We are having issues with `libiberty' (part of GCC) on Mac. So for now,
+# We are having issues with 'libiberty' (part of GCC) on Mac. So for now,
# GCC won't be built there. Since almost no natural science paper's
# processing depends so strongly on the compiler used, for now, this isn't
# a bad assumption, but we are indeed searching for a solution.
@@ -1295,7 +1194,7 @@ $(ibidir)/binutils-$(binutils-version): \
# environment. So, we'll build GCC after building all the basic tools that
# are often used in a configure and build scripts of GCC components.
#
-# Objective C and Objective C++ is necessary for installing `matplotlib'.
+# Objective C and Objective C++ is necessary for installing 'matplotlib'.
#
# We are currently having problems installing GCC on macOS, so for the time
# being, if the project is being run on a macOS, we'll just set a link.
@@ -1322,45 +1221,41 @@ $(ibidir)/gcc-$(gcc-version): $(ibidir)/binutils-$(binutils-version)
echo; exit 1
}
- # Download the tarball.
- tarball=gcc-$(gcc-version).tar.xz
+# Download the tarball.
+ tarball=gcc-$(gcc-version).tar.lz
$(call import-source, $(gcc-url), $(gcc-checksum))
- # To avoid any previous build in '.local/bin' causing problems in
- # this build/links of this GCC, we'll first delete all the possibly
- # built/existing compilers in this project. Note that GCC also
- # installs several executables like this 'x86_64-pc-linux-gnu-gcc',
- # 'x86_64-pc-linux-gnu-gcc-ar' or 'x86_64-pc-linux-gnu-g++'.
+# To avoid any previous build in '.local/bin' causing problems in
+# this build/links of this GCC, we'll first delete all the possibly
+# built/existing compilers in this project. Note that GCC also
+# installs several executables like this 'x86_64-pc-linux-gnu-gcc',
+# 'x86_64-pc-linux-gnu-gcc-ar' or 'x86_64-pc-linux-gnu-g++'.
rm -f $(ibdir)/*g++ $(ibdir)/cpp $(ibdir)/gfortran
rm -rf $(ildir)/gcc $(ildir)/libcc* $(ildir)/libgcc*
rm -f $(ibdir)/*gcc* $(ibdir)/gcov* $(ibdir)/cc $(ibdir)/c++
rm -rf $(ildir)/libgfortran* $(ildir)/libstdc* rm $(idir)/x86_64*
- # GCC builds is own libraries in '$(idir)/lib64'. But all other
- # libraries are in '$(idir)/lib'. Since this project is only for a
- # single architecture, we can trick GCC into building its libraries
- # in '$(idir)/lib' by defining the '$(idir)/lib64' as a symbolic
- # link to '$(idir)/lib'.
+# Build (or set links) to GCC.
if [ $(host_cc) = 1 ]; then
- # Put links to the host's tools in '.local/bin'. Note that some
- # macOS systems have both a native clang *and* a GNU C Compiler
- # (note that this is different from the "normal" macOS situation
- # where 'gcc' actually points to clang, here we mean when 'gcc'
- # is actually the GNU C Compiler).
- #
- # In such cases, the GCC isn't complete and using it will cause
- # problems when building high-level tools (for example openBLAS,
- # rpcsvc-proto, CMake, xlsxio, Python or Matplotlib among
- # others). To avoid such situations macOSs are configured like
- # this: we'll simply set 'gcc' to point to 'clang' and won't set
- # 'gcc' to point to the system's 'gcc'.
- #
- # Also, note that LLVM's clang doesn't have a C Pre-Processor. So
- # we will only put a link to the host's 'cpp' if the system is
- # not macOS. On macOS systems that have a real GCC installed,
- # having GNU CPP in the project build directory is known to cause
- # problems with 'libX11'.
+# Put links to the host's tools in '.local/bin'. Note that some
+# macOS systems have both a native clang *and* a GNU C Compiler
+# (note that this is different from the "normal" macOS situation
+# where 'gcc' actually points to clang, here we mean when 'gcc' is
+# actually the GNU C Compiler).
+#
+# In such cases, the GCC isn't complete and using it will cause
+# problems when building high-level tools (for example openBLAS,
+# rpcsvc-proto, CMake, xlsxio, Python or Matplotlib among
+# others). To avoid such situations macOSs are configured like
+# this: we'll simply set 'gcc' to point to 'clang' and won't set
+# 'gcc' to point to the system's 'gcc'.
+#
+# Also, note that LLVM's clang doesn't have a C Pre-Processor. So
+# we will only put a link to the host's 'cpp' if the system is not
+# macOS. On macOS systems that have a real GCC installed, having
+# GNU CPP in the project build directory is known to cause problems
+# with 'libX11'.
$(call makelink,gfortran)
if [ x$(on_mac_os) = xyes ]; then
$(call makelink,clang)
@@ -1373,52 +1268,61 @@ $(ibidir)/gcc-$(gcc-version): $(ibidir)/binutils-$(binutils-version)
$(call makelink,g++)
fi
- # We also want to have the two 'cc' and 'c++' in the build
- # directory that point to the selected compiler. With the checks
- # above, 'gcc' and 'g++' will point to the proper compiler, so
- # we'll use them to define 'cc' and 'c++'.
+# We also want to have the two 'cc' and 'c++' in the build
+# directory that point to the selected compiler. With the checks
+# above, 'gcc' and 'g++' will point to the proper compiler, so
+# we'll use them to define 'cc' and 'c++'.
$(call makelink,gcc,,cc)
$(call makelink,g++,,c++)
- # Get the first line of the compiler's '--version' output and put
- # that into the target (so we know want compiler was used).
+# Get the first line of the compiler's '--version' output and put
+# that into the target (so we know want compiler was used).
ccinfo=$$(gcc --version | awk 'NR==1')
echo "C compiler (""$$ccinfo"")" > $@
else
- # Mark the current directory.
+# Mark the current directory.
current_dir=$$(pwd)
- # We don't want '.local/lib' and '.local/lib64' to be separate.
- ln -fs $(ildir) $(idir)/lib64
-
- # By default we'll build GCC in the RAM to avoid building so many
- # files and possibly harming the hard-drive or SSD. But if the
- # RAM doesn't have enough space, we can't use it.
+# By default 'ddir' (where GCC is decompressed and built) is in the
+# RAM (on systems that support '/dev/shm'). This is done to avoid
+# building so many small/temporary files and possibly harming the
+# hard-drive or SSD. But if the RAM doesn't have enough space, we
+# should use the hard-drive or SSD. During its build GCC's build
+# directory will become about 7GB (multiple of 1024 bytes, for GCC
+# 11.2.0). So at this step, we are making sure we have more than
+# 7.5GiB (multiple of 1000 bytes, which corresponds to 7.32GB)
+# before GCC starts to build. Note that the 4th column of 'df' is
+# the "available" space at the time of running, not the full
+# space. So the background RAM that the OS will be using during
+# Maneage is accounted for. Also consider that GCC is built alone
+# (no other Maneage software is built at the same time as GCC).
in_ram=$$(df $(ddir) \
- | awk 'NR==2{print ($$4>10000000) ? "yes" : "no"}'); \
+ | awk 'NR==2{print ($$4>7500000) ? "yes" : "no"}'); \
if [ $$in_ram = "yes" ]; then odir=$(ddir)
else
- odir=$(BDIR)/software/build-tmp-gcc
+ odir=$(BDIR)/software/build-tmp-gcc-due-to-lack-of-space
if [ -d $$odir ]; then rm -rf $$odir; fi
mkdir $$odir
fi
- # Go into the proper directory, unpack GCC and prepare the
- # 'build' directory inside it for all the built files.
+# Go into the proper directory, unpack GCC and prepare the 'build'
+# directory inside it for all the built files.
cd $$odir
rm -rf gcc-$(gcc-version)
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
if [ $$odir != $(ddir) ]; then
ln -s $$odir/gcc-$(gcc-version) $(ddir)/gcc-$(gcc-version)
fi
cd gcc-$(gcc-version)
+
+# Set the build directory for the processing.
mkdir build
cd build
- # Configure, build and install GCC, if any of three steps fails,
- # the error message will be printed.
+# Configure, build and install GCC, if any of three steps fails,
+# the error message will be printed.
if ! ../configure SHELL=$(ibdir)/bash \
--prefix=$(idir) \
--with-mpc=$(idir) \
@@ -1437,26 +1341,25 @@ $(ibidir)/gcc-$(gcc-version): $(ibidir)/binutils-$(binutils-version)
--enable-languages=c,c++,fortran,objc,obj-c++ \
--disable-nls \
--disable-libada \
- --disable-multilib \
- --disable-multiarch; then error_message; fi
+ --disable-multilib; then error_message; fi
if ! make SHELL=$(ibdir)/bash -j$(numthreads); then error_message; fi
if ! make SHELL=$(ibdir)/bash install; then error_message; fi
- # We need to manually fix the RPATH inside GCC's libraries, the
- # programs built by GCC already have RPATH.
+# We need to manually fix the RPATH inside GCC's libraries, the
+# programs built by GCC already have RPATH.
tempname=$$odir/gcc-$(gcc-version)/build/rpath-temp-copy
if [ -f $(ibdir)/patchelf ]; then
- # Go over all the installed GCC libraries (its executables are
- # fine!).
+# Go over all the installed GCC libraries (its executables are
+# fine!).
for f in $$(find $(idir)/libexec/gcc -type f) $(ildir)/libstdc++*; do
- # Make sure this is a static library, copy it to a temporary
- # name (to avoid any possible usage of the file while it is
- # being corrected), and add RPATH inside of it and put the
- # corrected file back in its place. In the case of the
- # standard C++ library, we also need to manually insert a
- # linking to libiconv.
+# Make sure this is a static library, copy it to a temporary
+# name (to avoid any possible usage of the file while it is
+# being corrected), and add RPATH inside of it and put the
+# corrected file back in its place. In the case of the standard
+# C++ library, we also need to manually insert a linking to
+# libiconv.
if file $$f | grep -q "dynamically linked"; then
cp $$f $$tempname
patchelf --set-rpath $(ildir) $$tempname
@@ -1470,8 +1373,8 @@ $(ibidir)/gcc-$(gcc-version): $(ibidir)/binutils-$(binutils-version)
done
fi
- # Come back up to the un-packing directory and delete the GCC
- # source directory.
+# Come back up to the un-packing directory and delete the GCC
+# source directory.
cd ../..
rm -rf gcc-$(gcc-version)
cd $$current_dir
@@ -1480,11 +1383,11 @@ $(ibidir)/gcc-$(gcc-version): $(ibidir)/binutils-$(binutils-version)
rm $(ddir)/gcc-$(gcc-version);
fi
- # Set 'cc' to point to 'gcc'.
+# Set 'cc' to point to 'gcc'.
ln -sf $(ibdir)/gcc $(ibdir)/cc
ln -sf $(ibdir)/g++ $(ibdir)/c++
- # Write the final target.
+# Write the final target.
echo "GNU Compiler Collection (GCC) $(gcc-version)" > $@
fi
@@ -1503,18 +1406,19 @@ $(ibidir)/gcc-$(gcc-version): $(ibidir)/binutils-$(binutils-version)
# If the project is built in a minimal environment, there is no text
# editor, making it hard to work on the project. By default a minimal
# (relatively user-friendly: GNU Nano) text editor will thus also be built
-# at the end of the "basic" tools. More advanced editors are available as
-# optional high-level programs. GNU Nano is a very light-weight and small
-# command-line text editor (around 3.5 Mb after installation!).
+# at the end of the "basic" tools. More advanced editors (for example Emacs
+# and Vim) are available as optional high-level programs. GNU Nano is a
+# very light-weight and small command-line text editor (around 3.5 Mb after
+# installation!).
#
# The editor is a top-level target in the basic tools (given to
# 'targets-proglib' above). Hence nothing depends on it, and it just
# depends on GCC. This is done because some projects may choose to not have
-# nano (and use their own optional high-level text editor). To do this,
-# they just have to manually remove 'nano' from 'targets-proglib' above and
+# nano (and use their own optional high-level text editor). To do this, you
+# can just have to manually remove 'nano' from 'targets-proglib' above and
# add their optional text editor in 'TARGETS.conf'.
$(ibidir)/nano-$(nano-version): $(ibidir)/gcc-$(gcc-version)
- tarball=nano-$(nano-version).tar.xz
+ tarball=nano-$(nano-version).tar.lz
$(call import-source, $(nano-url), $(nano-checksum))
$(call gbuild, nano-$(nano-version), static)
echo "GNU Nano $(nano-version)" > $@
diff --git a/reproduce/software/make/build-rules.mk b/reproduce/software/make/build-rules.mk
index 66c77bc..c25dfb1 100644
--- a/reproduce/software/make/build-rules.mk
+++ b/reproduce/software/make/build-rules.mk
@@ -3,7 +3,7 @@
# imported into 'basic.mk' and 'high-level.mk'. They should be activated
# with Make's 'Call' function.
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This Makefile is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -29,8 +29,13 @@
# its checksum and if it is correct, remove the extra suffix.
#
# Arguments:
-# 1: The optional URL to use for this tarball.
-# 2: The expeced checksum of the tarball.
+# 1: The optional base URL (directory) to use for this tarball.
+# 2: The expected checksum of the tarball.
+# 3: The upstream name of the tarball file, if not automatically derived
+# from the version number.
+# 4: [Optional]: Alternative upstream base URL (directory) for the
+# tarball, to be used in preference to user or Maneage backup
+# servers.
#
# Necessary shell variables
# 'tarball': This is the name of the actual tarball file without a
@@ -56,7 +61,14 @@ import-source = final=$(tdir)/$$tarball; \
tarballurl=$(topbackupserver)/$$tarball; \
else \
bservers="$(backupservers_all)"; \
- tarballurl=$$url/$$tarball; \
+ if [ "x$(strip $(3))" = "x" ]; then \
+ tarballurl=$$url/$$tarball; \
+ else \
+ tarballurl=$$url/$(strip $(3)); \
+ fi; \
+ fi; \
+ if [ x"$(4)" != x ]; then \
+ bservers="$(strip $(4)) $$bservers"; \
fi; \
if [ -f $(ibdir)/wget ]; then \
downloader="wget --no-use-server-timestamps -O"; \
@@ -89,6 +101,48 @@ import-source = final=$(tdir)/$$tarball; \
+# Double-check an already downloaded R source
+# -------------------------------------------
+#
+# It is probably too late to protect the system if you have already
+# installed an insecure or wrong R package. However, it's still useful
+# to check that the source package is the one that you think it is.
+#
+# Calculate the checksum and exit with a non-zero error code if
+# there's a mismatch, after informing the user.
+#
+# Arguments:
+# 1: The expected checksum of the tarball.
+#
+# Necessary shell variables
+# 'tarball': This is the name of the actual tarball file without a
+# directory.
+double-check-R-source = final=$(tdir)/R-project/$$tarball; \
+ exp_checksum="$(strip $(1))"; \
+ if [ x"$$exp_checksum" = x"NO-CHECK-SUM" ]; then \
+ result=0; \
+ else \
+ if type sha512sum > /dev/null 2>/dev/null; then \
+ checksum=$$(sha512sum "$$final" | awk '{print $$1}'); \
+ if [ x"$$checksum" = x"$$exp_checksum" ]; then \
+ result=0; \
+ else \
+ echo "ERROR: Non-matching checksum: $$final"; \
+ echo "Checksum should be: $$exp_checksum"; \
+ echo "Checksum is: $$checksum"; \
+ result=1; \
+ exit 1; \
+ fi; \
+ else \
+ echo "ERROR: sha512sum is unavailable."; \
+ exit 1; \
+ fi; \
+ fi
+
+
+
+
+
# Unpack a tarball
# ----------------
#
@@ -106,7 +160,7 @@ uncompress = csuffix=$$(echo $$utarball \
intarrm=0; \
intar=$$utarball; \
fi; \
- if tar xf $$intar; then \
+ if tar -xf $$intar; then \
if [ x$$intarrm = x1 ]; then rm $$intar; fi; \
else \
echo; echo "Tar error"; exit 1; \
@@ -166,10 +220,13 @@ gbuild = if [ x$(static_build) = xyes ] && [ "x$(2)" = xstatic ]; then \
else shellop="SHELL=/bin/sh"; \
fi; \
\
+ if [ x$$gbuild_prefix = x ]; then prefixdir="$(idir)"; \
+ else prefixdir="$$gbuild_prefix"; fi; \
+ \
if [ -f "$$confscript" ]; then \
if [ x"$(strip $(1))" = x"zlib-$(zlib-version)" ]; then \
- configop="--prefix=$(idir)"; \
- else configop="$$shellop --prefix=$(idir)"; \
+ configop="--prefix=$$prefixdir"; \
+ else configop="$$shellop --prefix=$$prefixdir"; \
fi; \
fi; \
\
@@ -190,7 +247,7 @@ gbuild = if [ x$(static_build) = xyes ] && [ "x$(2)" = xstatic ]; then \
make "$$shellop" install $(7); \
cd ..; \
fi; \
- rm -rf $(1)
+ rm -rf $(1);
diff --git a/reproduce/software/make/high-level.mk b/reproduce/software/make/high-level.mk
index 6ea782c..34c829b 100644
--- a/reproduce/software/make/high-level.mk
+++ b/reproduce/software/make/high-level.mk
@@ -3,7 +3,7 @@
# ------------------------------------------------------------------------
# !!!!! IMPORTANT NOTES !!!!!
#
-# This Makefile will be run by the initial `./project configure' script. It
+# This Makefile will be run by the initial './project configure' script. It
# is not included into the project afterwards.
#
# This Makefile builds the high-level (optional) software in Maneage that
@@ -12,8 +12,8 @@
#
# ------------------------------------------------------------------------
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
-# Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
#
# This Makefile is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -60,6 +60,9 @@ patchdir = "$(shell pwd)"/reproduce/software/patches
itidir = $(BDIR)/software/installed/version-info/tex
ictdir = $(BDIR)/software/installed/version-info/cite
ipydir = $(BDIR)/software/installed/version-info/python
+ircrandir = $(BDIR)/software/installed/version-info/r-cran
+ilibrcrandir = $(BDIR)/software/installed/lib/R/library
+
# Targets to build.
ifeq ($(strip $(all_highlevel)),1)
@@ -75,7 +78,7 @@ ifeq ($(strip $(all_highlevel)),1)
# included here because there is no explicit target for them: they will
# be built as part of the other package.
targets-proglib := $(filter-out minizip-% lapack-% ghostscript-fonts-%, \
- $(shell awk '/^# CLASS:PYTHON/{good=0} \
+ $(shell awk '/^# CLASS:(PYTHON|R-CRAN)/{good=0} \
good==1 && !/^#/ && $$1 ~ /-version$$/ { \
printf("%s %s ", $$1, $$3)} \
/^# CLASS:HIGHLEVEL/{good=1}' \
@@ -85,14 +88,25 @@ ifeq ($(strip $(all_highlevel)),1)
# List all existing Python packages.
targets-python := $(shell \
- awk '/^# CLASS:PYTHON/{good=1} \
- good==1 && !/^#/ && $$1 ~ /-version$$/ {printf("%s %s ",$$1,$$3)}' \
- reproduce/software/config/versions.conf | sed 's/version //g')
+ awk '/^# CLASS:PYTHON-START/{good=1} good; \
+ /^# CLASS:PYTHON-END/{good=0}' \
+ reproduce/software/config/versions.conf \
+ | awk '!/^#/' \
+ | sed 's/-version = /-/g')
+
+ # List all existing R-CRAN packages.
+ targets-r-cran := $(shell \
+ awk '/^# CLASS:R-CRAN-START/{good=1} good; \
+ /^# CLASS:R-CRAN-END/{good=0}' \
+ reproduce/software/config/versions.conf \
+ | awk '!/^#/' \
+ | sed 's/-version = /-/g')
else
- # Append the version of each software to its name. We are using a Make
+ # Append the version of each software package to its name. We are using a Make
# feature where a variable name is defined with another variable.
targets-python := $(foreach p,$(top-level-python),$(p)-$($(p)-version))
+ targets-r-cran := $(foreach p,$(top-level-r-cran),$(p)-$($(p)-version))
targets-proglib := $(foreach p,$(top-level-programs),$(p)-$($(p)-version))
endif
@@ -100,6 +114,7 @@ endif
# Ultimate Makefile target.
all: $(foreach p, $(targets-proglib), $(ibidir)/$(p)) \
$(foreach p, $(targets-python), $(ipydir)/$(p)) \
+ $(foreach p, $(targets-r-cran), $(ircrandir)/$(p)) \
$(itidir)/texlive
# Define the shell environment
@@ -112,14 +127,18 @@ all: $(foreach p, $(targets-proglib), $(ibidir)/$(p)) \
#
# To investigate:
#
-# 1) Set SHELL to `$(ibdir)/env - NAME=VALUE $(ibdir)/bash' and set all
-# the parameters defined bellow as `NAME=VALUE' statements before
+# 1) Set SHELL to '$(ibdir)/env - NAME=VALUE $(ibdir)/bash' and set all
+# the parameters defined bellow as 'NAME=VALUE' statements before
# calling Bash. This will enable us to completely ignore the user's
# native environment.
#
-# 2) Add `--noprofile --norc' to `.SHELLFLAGS' so doesn't load the
+# 2) Add '--noprofile --norc' to '.SHELLFLAGS' so doesn't load the
# user's environment.
#
+# 3) Add the '-u' flag so that an error occurs if an environment
+# variable is empty; this reduces the chance of catastrophic
+# file removal with 'rm -fr ../../$${FORGOT_TO_DEFINE_THIS}'.
+#
# Shell settings similar to 'basic.mk':
.ONESHELL:
export PATH := $(ibdir)
@@ -131,6 +150,9 @@ export PKG_CONFIG_LIBDIR := $(ildir)/pkgconfig
export CPPFLAGS := -I$(idir)/include -Wno-nullability-completeness
export PKG_CONFIG_PATH := $(ildir)/pkgconfig:$(idir)/share/pkgconfig
+# Disable built-in rules (which are not needed here!)
+.SUFFIXES:
+
# Settings specific to this Makefile.
export CC := $(ibdir)/gcc
export CXX := $(ibdir)/g++
@@ -147,16 +169,16 @@ export C_INCLUDE_PATH := $(iidir)
export CPLUS_INCLUDE_PATH := $(iidir)
endif
-# Recipe startup script, see `reproduce/software/shell/bashrc.sh'.
+# Recipe startup script, see 'reproduce/software/shell/bashrc.sh'.
export PROJECT_STATUS := configure_highlevel
export BASH_ENV := $(shell pwd)/reproduce/software/shell/bashrc.sh
# Until we build our own C library, without this, our GCC won't be able to
# compile anything! Note that on most systems (in particular
-# non-Debian-based), `sys_cpath' will be empty.
+# non-Debian-based), 'sys_cpath' will be empty.
export CPATH := $(sys_cpath)
-# RPATH is automatically written in macOS, so `DYLD_LIBRARY_PATH' is
+# RPATH is automatically written in macOS, so 'DYLD_LIBRARY_PATH' is
# ultimately redundant. But on some systems, even having a single value
# causes crashs (see bug #56682). So we'll just give it no value at all.
export DYLD_LIBRARY_PATH :=
@@ -164,8 +186,8 @@ export DYLD_LIBRARY_PATH :=
# On Debian-based OSs, the basic C libraries are in a target-specific
# location, not in standard places. Until we merge the building of the C
# library, it is thus necessary to include this location here. On systems
-# that don't need it, `sys_library_path' is just empty. This is necessary
-# for `ld'.
+# that don't need it, 'sys_library_path' is just empty. This is necessary
+# for 'ld'.
#
# If this variable is not defined, it will be interpretted as the current
# directory. In this case, when the program source has a 'specs' directory,
@@ -179,12 +201,18 @@ endif
# Building flags:
#
# C++ flags: when we build GCC, the C++ standard library needs to link with
-# libiconv. So it is necessary to generically include `-liconv' for all C++
+# libiconv. So it is necessary to generically include '-liconv' for all C++
# builds.
ifeq ($(host_cc),0)
export CXXFLAGS := -liconv
endif
+# Custom installation prefix for software that can cause conflicts with
+# others, to avoid crowding the to Maneage installed software directory,
+# we'll put them all in a 'custom' directory.
+idircustom = $(idir)/custom
+$(idircustom):; mkdir $@
+
# Servers to use as backup. Maneage already has some fixed servers that can
# be used to download software tarballs. They are in a configuation
# file. But we give precedence to the "user" backup servers.
@@ -211,6 +239,8 @@ backupservers = $(filter-out $(topbackupserver),$(backupservers_all))
# Import rules to build specialized software
include reproduce/software/make/xorg.mk
include reproduce/software/make/python.mk
+include reproduce/software/make/r-cran.mk
+
@@ -226,61 +256,27 @@ include reproduce/software/make/python.mk
#
# We would prefer to build static libraries, but some compilers like LLVM
# don't have static capabilities, so they'll only build dynamic/shared
-# libraries. Therefore, we can't use the easy `.a' suffix for static
+# libraries. Therefore, we can't use the easy '.a' suffix for static
# libraries as targets and there are different conventions for shared
# library names.
-
-# Until version 0.11.0 is released, we are using the version corresponding
-# to commit 014954db (603 commits after version 0.10.0, most recent when
-# first importing log4cxx into this project).
-#
-# Note that after cloning the project, the following changes are necessary
-# in `configure.ac'.
-# - Update the final name of the tarball and its version (from `git
-# - describe') by modifying the `AC_INIT' line:
-# AC_INIT([apachelog4cxx], [0.10.0-603-014954db])
-# - Because of the long file names in the project, some files will not be
-# packaged by default, so pass the `tar-ustar' option to Automake (the
-# `AM_INIT_AUTOMAKE' line of `configure.ac':
-# AM_INIT_AUTOMAKE([foreign subdir-objects -Wall tar-ustar])
-#
-# You can then simply bootstrap the project and make the distribution
-# tarball like this:
-# ./autogen.sh && ./configure && make -j8 && make dist-lzip
-#
-# Unfortunately we have to re-run the `autogen.sh' script on the tarball to
-# build it because it will complain about the version of libtool, so until
-# the version 0.11.0 of log4cxx, we'll have to run `autogen.sh' on the
-# unpacked source also.
$(ibidir)/apachelog4cxx-$(apachelog4cxx-version): \
+ $(ibidir)/cmake-$(cmake-version) \
$(ibidir)/expat-$(expat-version) \
$(ibidir)/apr-util-$(apr-util-version) \
$(ibidir)/automake-$(automake-version)
- tarball=apachelog4cxx-$(apachelog4cxx-version).tar.lz
+ tarball=apache-log4cxx-$(apachelog4cxx-version).tar.lz
$(call import-source, $(apachelog4cxx-url), $(apachelog4cxx-checksum))
- pdir=apachelog4cxx-$(apachelog4cxx-version)
- rm -rf $(ddir)/$$pdir
- topdir=$(pwd)
- cd $(ddir)
- tar xf $(tdir)/$$tarball
- cd $$pdir
- ./autogen.sh
- ./configure SHELL=$(ibdir)/bash --prefix=$(idir)
- make -j$(numthreads) SHELL=$(ibdir)/bash
- make install
- cd ..
- rm -rf $$pdir
- cd $$topdir
+ $(call cbuild, apache-log4cxx-$(apachelog4cxx-version), static)
echo "Apache log4cxx $(apachelog4cxx-version)" > $@
$(ibidir)/apr-$(apr-version):
- tarball=apr-$(apr-version).tar.gz
+ tarball=apr-$(apr-version).tar.lz
$(call import-source, $(apr-url), $(apr-checksum))
$(call gbuild, apr-$(apr-version), ,--disable-static)
echo "Apache Portable Runtime $(apr-version)" > $@
$(ibidir)/apr-util-$(apr-util-version): $(ibidir)/apr-$(apr-version)
- tarball=apr-util-$(apr-util-version).tar.gz
+ tarball=apr-util-$(apr-util-version).tar.lz
$(call import-source, $(apr-util-url), $(apr-util-checksum))
$(call gbuild, apr-util-$(apr-util-version), , \
--disable-static \
@@ -291,20 +287,19 @@ $(ibidir)/apr-util-$(apr-util-version): $(ibidir)/apr-$(apr-version)
$(ibidir)/atlas-$(atlas-version):
- tarball=lapack-$(lapack-version).tar.gz
+ tarball=lapack-$(lapack-version).tar.lz
$(call import-source, $(lapack-url), $(lapack-checksum))
- tarball=atlas-$(atlas-version).tar.bz2
+ tarball=atlas-$(atlas-version).tar.lz
$(call import-source, $(atlas-url), $(atlas-checksum))
- # Get the operating system specific features (how to get
- # CPU frequency and the library suffixes). To make the steps
- # more readable, the different library version suffixes are
- # named with a single character: `s' for no version in the
- # name, `m' for the major version suffix, and `f' for the
- # full version suffix.
- # GCC in Mac OS doesn't work. To work around this issue, on Mac
- # systems we force ATLAS to use `clang' instead of `gcc'.
+# Get the operating system specific features (how to get CPU
+# frequency and the library suffixes). To make the steps more
+# readable, the different library version suffixes are named with a
+# single character: 's' for no version in the name, 'm' for the major
+# version suffix, and 'f' for the full version suffix. GCC in Mac OS
+# doesn't work. To work around this issue, on Mac systems we force
+# ATLAS to use 'clang' instead of 'gcc'.
if [ x$(on_mac_os) = xyes ]; then
s=dylib
m=3.dylib
@@ -321,8 +316,8 @@ $(ibidir)/atlas-$(atlas-version):
| sed "s/.*: \([0-9.]*\).*/\1/")
fi
- # See if the shared libraries should be build for a single CPU
- # thread or multiple threads.
+# See if the shared libraries should be build for a single CPU thread
+# or multiple threads.
N=$$(nproc)
srcdir=$$(pwd)/reproduce/software/make
if [ $$N = 1 ]; then
@@ -331,25 +326,25 @@ $(ibidir)/atlas-$(atlas-version):
sharedmk=$$srcdir/atlas-multiple.mk
fi
- # The linking step here doesn't recognize the `-Wl' in the
- # `rpath_command'.
+# The linking step here doesn't recognize the '-Wl' in the
+# 'rpath_command'.
export LDFLAGS=-L$(ildir)
cd $(ddir)
- tar xf $(tdir)/atlas-$(atlas-version).tar.bz2
+ tar -xf $(tdir)/atlas-$(atlas-version).tar.lz
cd ATLAS
rm -rf build
mkdir build
cd build
../configure -b 64 -D c -DPentiumCPS=$$core \
- --with-netlib-lapack-tarfile=$(tdir)/lapack-$(lapack-version).tar.gz \
+ --with-netlib-lapack-tarfile=$(tdir)/lapack-$(lapack-version).tar.lz \
--cripple-atlas-performance \
-Fa alg -fPIC --shared $$clangflag \
--prefix=$(idir)
- # Static build.
+# Static build.
make
- # Currently the shared libraries have problems on macOS.
+# Currently the shared libraries have problems on macOS.
if [ "x$(on_mac_os)" != xyes ]; then
cd lib
make -f $$sharedmk
@@ -362,21 +357,21 @@ $(ibidir)/atlas-$(atlas-version):
ln -fs $(ildir)/liblapack.$$f $(ildir)/liblapack.$$m
fi
- # Install the libraries.
+# Install the libraries.
make install
- # We need to check the existance of `libptlapack.a', but we can't
- # do this in the `&&' steps above (it will conflict). So we'll do
- # the check after seeing if `libtatlas.so' is installed, then we'll
- # finalize the build (delete the untarred directory).
+# We need to check the existance of 'libptlapack.a', but we can't do
+# this in the '&&' steps above (it will conflict). So we'll do the
+# check after seeing if 'libtatlas.so' is installed, then we'll
+# finalize the build (delete the untarred directory).
if [ "x$(on_mac_os)" != xyes ]; then \
[ -e lib/libptlapack.a ] && cp lib/libptlapack.a $(ildir); \
cd $(ddir); \
rm -rf ATLAS; \
fi
- # We'll check the full installation with the static library (not
- # currently building shared library on Mac.
+# We'll check the full installation with the static library (not
+# currently building shared library on Mac.
if [ -f $(ildir)/libatlas.a ]; then \
echo "ATLAS $(atlas-version)" > $@; \
fi
@@ -391,7 +386,7 @@ $(ibidir)/boost-$(boost-version): \
rm -rf $(ddir)/$$unpackdir
topdir=$(pwd)
cd $(ddir)
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
cd $$unpackdir
./bootstrap.sh --prefix=$(idir) --with-libraries=all \
--with-python=python3
@@ -404,15 +399,15 @@ $(ibidir)/boost-$(boost-version): \
$(ibidir)/cfitsio-$(cfitsio-version):
- # Download the tarball
- tarball=cfitsio-$(cfitsio-version).tar.gz
+# Download the tarball
+ tarball=cfitsio-$(cfitsio-version).tar.lz
$(call import-source, $(cfitsio-url), $(cfitsio-checksum))
- # CFITSIO hard-codes '@rpath' inside the shared library on
- # Mac systems. So we need to change it to our library
- # installation path. It doesn't affect GNU/Linux, so we'll
- # just do it in any case to keep things clean.
- topdir=$(pwd); cd $(ddir); tar xf $(tdir)/$$tarball
+# CFITSIO hard-codes '@rpath' inside the shared library on Mac
+# systems. So we need to change it to our library installation
+# path. It doesn't affect GNU/Linux, so we'll just do it in any case
+# to keep things clean.
+ topdir=$(pwd); cd $(ddir); tar -xf $(tdir)/$$tarball
customtar=cfitsio-$(cfitsio-version)-custom.tar.gz
cd cfitsio-$(cfitsio-version)
sed configure -e's|@rpath|$(ildir)|g' > configure_tmp
@@ -422,9 +417,9 @@ $(ibidir)/cfitsio-$(cfitsio-version):
tar cf $$customtar cfitsio-$(cfitsio-version)
cd $$topdir
- # Continue the standard build on the customized tarball. Note that
- # with the installation of CFITSIO, `fpack' and `funpack' are not
- # installed by default. Because of that, they are added explicity.
+# Continue the standard build on the customized tarball. Note that
+# with the installation of CFITSIO, 'fpack' and 'funpack' are not
+# installed by default. Because of that, they are added explicity.
export gbuild_tar=$(ddir)/$$customtar
$(call gbuild, cfitsio-$(cfitsio-version), , \
--enable-sse2 --enable-reentrant \
@@ -437,24 +432,25 @@ $(ibidir)/cairo-$(cairo-version): \
$(ibidir)/pixman-$(pixman-version) \
$(ibidir)/libpng-$(libpng-version) \
$(ibidir)/freetype-$(freetype-version)
- tarball=cairo-$(cairo-version).tar.xz
+ tarball=cairo-$(cairo-version).tar.lz
$(call import-source, $(cairo-url), $(cairo-checksum))
$(call gbuild, cairo-$(cairo-version), static, \
--with-x=yes, -j$(numthreads) V=1)
echo "Cairo $(cairo-version)" > $@
# Eigen is just headers! So it doesn't need to be compiled. Once unpacked
-# it has a checksum after `eigen-eigen', so we'll just use a `*' to choose
+# it has a checksum after 'eigen-eigen', so we'll just use a '*' to choose
# the unpacked directory.
$(ibidir)/eigen-$(eigen-version):
- tarball=eigen-$(eigen-version).tar.gz
+ tarball=eigen-$(eigen-version).tar.lz
$(call import-source, $(eigen-url), $(eigen-checksum))
rm -rf $(ddir)/eigen-eigen-*
- topdir=$(pwd); cd $(ddir); tar xf $(tdir)/$$tarball
- cd eigen-eigen-*
- cp -r Eigen $(iidir)/eigen3
+ topdir=$(pwd); cd $(ddir); tar -xf $(tdir)/$$tarball
+ cd eigen-$(eigen-version)
+ if ! [ -d $(iidir)/eigen3 ]; then mkdir $(iidir)/eigen3; fi
+ cp -r Eigen/* $(iidir)/eigen3/
cd $$topdir
- rm -rf $(ddir)/eigen-eigen-*
+ rm -rf $(ddir)/eigen-$(eigen-version)
echo "Eigen $(eigen-version)" > $@
# GNU Emacs is an advanced text editor (among many other things!), so it
@@ -468,7 +464,7 @@ $(ibidir)/eigen-$(eigen-version):
# except the core Emacs functionality (using '--without-all') and we are
# also disabling all graphic user interface features (using '--without-x').
$(ibidir)/emacs-$(emacs-version):
- tarball=emacs-$(emacs-version).tar.xz
+ tarball=emacs-$(emacs-version).tar.lz
$(call import-source, $(emacs-url), $(emacs-checksum))
$(call gbuild, emacs-$(emacs-version), static, \
--without-all --without-x \
@@ -483,15 +479,28 @@ $(ibidir)/expat-$(expat-version):
echo "Expat $(expat-version)" > $@
$(ibidir)/fftw-$(fftw-version):
- # Prepare the source tarball.
- tarball=fftw-$(fftw-version).tar.gz
+
+# Prepare the source tarball.
+ tarball=fftw-$(fftw-version).tar.lz
$(call import-source, $(fftw-url), $(fftw-checksum))
- # FFTW's single and double precission libraries must be built
- # independently: for the the single-precision library, we need to
- # add the `--enable-float' option. We will build this first, then
- # the default double-precision library.
- confop="--enable-shared --enable-threads --enable-avx --enable-sse2"
+# FFTW's single and double precision libraries must be built
+# independently: for the the single-precision library, we need to add
+# the '--enable-float' option. We will build this first, then the
+# default double-precision library.
+#
+# There are Intel-specific optimizations that can be enabled by
+# adding the following two options to 'confop'
+#
+# --enable-avx --enable-sse2
+#
+# However, they cause crashs on non-Intel processors (has been
+# confirmed in ARM's aarch64). So in the generic scenario they are
+# removed. Checking how these optimizations affect the numeric
+# accuracy of the result (and thus optionally adding them for
+# Intel-based processors) should be studied before they are
+# optionally added for Intel-based CPUs (and ignored for others).
+ confop="--enable-shared --enable-threads"
$(call gbuild, fftw-$(fftw-version), static, \
$$confop --enable-float)
$(call gbuild, fftw-$(fftw-version), static, \
@@ -500,19 +509,19 @@ $(ibidir)/fftw-$(fftw-version):
echo "FFTW $(fftw-version) \citep{fftw}" > $@
$(ibidir)/freetype-$(freetype-version): $(ibidir)/libpng-$(libpng-version)
- tarball=freetype-$(freetype-version).tar.gz
+ tarball=freetype-$(freetype-version).tar.lz
$(call import-source, $(freetype-url), $(freetype-checksum))
$(call gbuild, freetype-$(freetype-version), static)
echo "FreeType $(freetype-version)" > $@
$(ibidir)/gperf-$(gperf-version):
- tarball=gperf-$(gperf-version).tar.gz
+ tarball=gperf-$(gperf-version).tar.lz
$(call import-source, $(gperf-url), $(gperf-checksum))
$(call gbuild, gperf-$(gperf-version), static)
echo "GNU gperf $(gperf-version)" > $@
$(ibidir)/gsl-$(gsl-version):
- tarball=gsl-$(gsl-version).tar.gz
+ tarball=gsl-$(gsl-version).tar.lz
$(call import-source, $(gsl-url), $(gsl-checksum))
$(call gbuild, gsl-$(gsl-version), static)
echo "GNU Scientific Library $(gsl-version)" > $@
@@ -531,14 +540,14 @@ $(ibidir)/hdf5-$(hdf5-version): $(ibidir)/openmpi-$(openmpi-version)
# HEALPix includes the source of its C, C++, Python (and several other
# languages) libraries within one tarball. We will include the Python
# installation only when any other Python module is requested (in
-# `TARGETS.conf').
+# 'TARGETS.conf').
#
-# Note that the default `./configure' script is an interactive script which
-# is hard to automate. So we need to go into the `autotools' directory of
-# the `C' and `cxx' directories and configure the GNU Build System (with
-# `autoreconf', which uses `autoconf' and `automake') to easily build the
+# Note that the default './configure' script is an interactive script which
+# is hard to automate. So we need to go into the 'autotools' directory of
+# the 'C' and 'cxx' directories and configure the GNU Build System (with
+# 'autoreconf', which uses 'autoconf' and 'automake') to easily build the
# HEALPix C/C++ libraries in batch mode.
-ifeq ($(strip $(top-level-python)),)
+ifeq ($(strip $(targets-python)),)
healpix-python-dep =
else
healpix-python-dep = $(ipydir)/matplotlib-$(matplotlib-version) \
@@ -548,7 +557,7 @@ $(ibidir)/healpix-$(healpix-version): $(healpix-python-dep) \
$(ibidir)/cfitsio-$(cfitsio-version) \
$(ibidir)/autoconf-$(autoconf-version) \
$(ibidir)/automake-$(automake-version)
- tarball=healpix-$(healpix-version).tar.gz
+ tarball=healpix-$(healpix-version).tar.lz
$(call import-source, $(healpix-url), $(healpix-checksum))
if [ x"$(healpix-python-dep)" = x ]; then
pycommand1="echo no-healpy-because-no-other-python"
@@ -559,7 +568,7 @@ $(ibidir)/healpix-$(healpix-version): $(healpix-python-dep) \
fi
rm -rf $(ddir)/Healpix_$(healpix-version)
topdir=$(pwd); cd $(ddir);
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
cd Healpix_$(healpix-version)/src/C/autotools/
autoreconf --install
./configure --prefix=$(idir)
@@ -568,6 +577,13 @@ $(ibidir)/healpix-$(healpix-version): $(healpix-python-dep) \
cd ../../cxx/autotools/
autoreconf --install
./configure --prefix=$(idir)
+
+# With CFITSIO 4.0, the 'CFITSIO_VERSION' macro has three
+# components. But this version of Healpix doesn't yet account for
+# this.
+ sed -i -e's/CFITSIO_VERSION/fitsversion/' cxxsupport/fitshandle.cc
+
+# Continue with the building.
make V=1 -j$(numthreads) SHELL=$(ibdir)/bash
make install
cd ../../healpy
@@ -578,19 +594,52 @@ $(ibidir)/healpix-$(healpix-version): $(healpix-python-dep) \
cp $(dtexdir)/healpix.tex $(ictdir)/
echo "HEALPix $(healpix-version) \citep{healpix}" > $@
+$(ibidir)/libbsd-$(libbsd-version): $(ibidir)/libmd-$(libmd-version)
+ tarball=libbsd-$(libbsd-version).tar.lz
+ $(call import-source, $(libbsd-url), $(libbsd-checksum))
+ if [ x$(on_mac_os) = xyes ]; then
+ echo "" > $@
+ else
+ export LDFLAGS="-L$(idirlibmd)/lib $$LDFLAGS"
+ export CPPFLAGS="-I$(idirlibmd)/include $$CPPFLAGS"
+ $(call gbuild, libbsd-$(libbsd-version), static,,V=1)
+ echo "Libbsd $(libbsd-version)" > $@
+ fi
+
$(ibidir)/libidn-$(libidn-version):
- tarball=libidn-$(libidn-version).tar.gz
+ tarball=libidn-$(libidn-version).tar.lz
$(call import-source, $(libidn-url), $(libidn-checksum))
$(call gbuild, libidn-$(libidn-version), static, \
--disable-doc, -j$(numthreads) V=1)
echo "Libidn $(libidn-version)" > $@
$(ibidir)/libjpeg-$(libjpeg-version):
- tarball=jpegsrc.$(libjpeg-version).tar.gz
+ tarball=libjpeg-$(libjpeg-version).tar.lz
$(call import-source, $(libjpeg-url), $(libjpeg-checksum))
- $(call gbuild, jpeg-9b, static,,V=1)
+ $(call gbuild, libjpeg-$(libjpeg-version), static,,V=1)
echo "Libjpeg $(libjpeg-version)" > $@
+# libmd is a set of "message digest" functions that are available in in the
+# C library of BSD-based systems, but not others (like GNU-based
+# systems). It includes hash functions like MD5 and SHAs.
+#
+# Libmd is being installed in a non-standard location because its headers
+# (like 'md5.h') will conflict with similarly named headers by the system
+# during the building of Binutils later! So any program that needs libmd's
+# headers or libraries (like 'libbsd'), should add this special location to
+# its CPPFLAGS and LDFLAGS.
+idirlibmd=$(idircustom)/libmd
+$(ibidir)/libmd-$(libmd-version): | $(idircustom)
+ tarball=libmd-$(libmd-version).tar.lz
+ $(call import-source, $(libmd-url), $(libmd-checksum))
+ if [ x$(on_mac_os) = xyes ]; then
+ echo "" > $@
+ else
+ export gbuild_prefix=$(idirlibmd)
+ $(call gbuild, libmd-$(libmd-version), static,,V=1)
+ echo "Libmd $(libmd-version)" > $@
+ fi
+
$(ibidir)/libnsl-$(libnsl-version): \
$(ibidir)/libtirpc-$(libtirpc-version) \
$(ibidir)/rpcsvc-proto-$(rpcsvc-proto-version)
@@ -603,11 +652,11 @@ $(ibidir)/libnsl-$(libnsl-version): \
$(ibidir)/libpaper-$(libpaper-version): \
$(ibidir)/automake-$(automake-version)
- # Download the tarball.
- tarball=libpaper-$(libpaper-version).tar.gz
+# Download the tarball.
+ tarball=libpaper-$(libpaper-version).tar.lz
$(call import-source, $(libpaper-url), $(libpaper-checksum))
- # Unpack, build the configure system, build and install.
+# Unpack, build the configure system, build and install.
cd $(ddir)
tar -xf $(tdir)/$$tarball
unpackdir=libpaper-$(libpaper-version)
@@ -620,10 +669,10 @@ $(ibidir)/libpaper-$(libpaper-version): \
cd ..
rm -rf $$unpackdir
- # Post-processing: according to Linux From Scratch, libpaper
- # expects that packages will install files into this directory and
- # 'paperconfig' is a script which will invoke 'run-parts' if
- # '/etc/libpaper.d' exists
+# Post-processing: according to Linux From Scratch, libpaper expects
+# that packages will install files into this directory and
+# 'paperconfig' is a script which will invoke 'run-parts' if
+# '/etc/libpaper.d' exists
mkdir -vp $(idir)/etc/libpaper.d
sed -e's|MANEAGESHELL|$(SHELL)|' $(shsrcdir)/run-parts.in \
> $(ibdir)/run-parts
@@ -631,15 +680,19 @@ $(ibidir)/libpaper-$(libpaper-version): \
echo "Libpaper $(libpaper-version)" > $@
$(ibidir)/libpng-$(libpng-version):
- tarball=libpng-$(libpng-version).tar.xz
+
+# The option '-DPNG_ARM_NEON_OPT=0' prevents an arm64 'neon' library
+# from being required at compile time.
+ tarball=libpng-$(libpng-version).tar.lz
$(call import-source, $(libpng-url), $(libpng-checksum))
- $(call gbuild, libpng-$(libpng-version), static)
+ $(call gbuild, libpng-$(libpng-version), static, \
+ CFLAGS="-DPNG_ARM_NEON_OPT=0")
echo "Libpng $(libpng-version)" > $@
$(ibidir)/libtiff-$(libtiff-version): $(ibidir)/libjpeg-$(libjpeg-version)
- tarball=tiff-$(libtiff-version).tar.gz
+ tarball=libtiff-$(libtiff-version).tar.lz
$(call import-source, $(libtiff-url), $(libtiff-checksum))
- $(call gbuild, tiff-$(libtiff-version), static, \
+ $(call gbuild, libtiff-$(libtiff-version), static, \
--disable-jbig \
--disable-webp \
--disable-zstd)
@@ -652,12 +705,90 @@ $(ibidir)/libtirpc-$(libtirpc-version):
--disable-gssapi, V=1)
echo "libtirpc $(libtirpc-version)" > $@
+# Metastore is used (through a Git hook) to restore the source modification
+# dates of files after a Git checkout. Another Git hook saves all file
+# metadata just before a commit (to allow restoration after a
+# checkout). Since this project is managed in Makefiles, file modification
+# dates are critical to not having to redo the whole analysis after
+# checking out between branches.
+#
+# Note that we aren't using the standard version of Metastore, but a fork
+# of it that is maintained in this repository:
+# https://gitlab.com/makhlaghi/metastore-fork
+#
+# Libbsd is not necessary on macOS systems, because macOS is already a
+# BSD-based distribution. But on GNU/Linux systems, it is necessary.
+$(ibidir)/metastore-$(metastore-version): \
+ $(ibidir)/libbsd-$(libbsd-version)
+
+# Download the tarball.
+ tarball=metastore-$(metastore-version).tar.lz
+ $(call import-source, $(metastore-url), $(metastore-checksum))
+
+# Metastore doesn't have any './configure' script. So we'll just call
+# 'pwd' as a place-holder for the './configure' command.
+#
+# File attributes are also not available on some systems, since the
+# main purpose here is modification dates (and not attributes), we'll
+# also set the 'NO_XATTR' flag.
+#
+# After installing Metastore, write the relevant hooks into this
+# system's Git hooks, while setting the system-specific
+# directories/files.
+#
+# Note that the metastore -O and -G options used in this template are
+# currently only available in a fork of 'metastore' hosted at:
+# https://github.com/mohammad-akhlaghi/metastore
+#
+# Checking for presence of '.git'. When the project source is
+# downloaded from a non-Git source (for example from arXiv), there is
+# no '.git' directory to work with. So until we find a better
+# solution, avoid the step to to add the Git hooks.
+ current_dir=$$(pwd); \
+ $(call gbuild, metastore-$(metastore-version), static,, \
+ NO_XATTR=1 V=1,,pwd,PREFIX=$(idir))
+
+# Correct RPATH when necessary.
+ if [ -f $(ibdir)/patchelf ]; then
+ $(ibdir)/patchelf --set-rpath $(ildir) $(ibdir)/metastore
+ fi
+
+# If this project is being built in a directory version controlled
+# by Git, copy the hooks into the Git configuation.
+ if [ -f $(ibdir)/metastore ]; then
+ if [ -d .git ]; then
+ user=$$(whoami)
+ group=$$(groups | awk '{print $$1}')
+ cd $$current_dir
+ for f in pre-commit post-checkout; do
+ sed -e's|@USER[@]|'$$user'|g' \
+ -e's|@GROUP[@]|'$$group'|g' \
+ -e's|@BINDIR[@]|$(ibdir)|g' \
+ -e's|@TOP_PROJECT_DIR[@]|'$$current_dir'|g' \
+ reproduce/software/shell/git-$$f > .git/hooks/$$f
+ chmod +x .git/hooks/$$f
+ done
+ fi
+ echo "Metastore (forked) $(metastore-version)" > $@
+ else
+ echo; echo; echo
+ echo "*****************"
+ echo "metastore couldn't be installed!"
+ echo
+ echo "Its used for preserving timestamps on Git commits."
+ echo "Its useful for development, not simple running of "
+ echo "the project. So we won't stop the configuration "
+ echo "because it wasn't built."
+ echo "*****************"
+ echo "" > $@
+ fi
+
$(ibidir)/openblas-$(openblas-version):
- tarball=OpenBLAS-$(openblas-version).tar.gz
+ tarball=OpenBLAS-$(openblas-version).tar.lz
$(call import-source, $(openblas-url), $(openblas-checksum))
if [ x$(on_mac_os) = xyes ]; then export CC=clang; fi
cd $(ddir)
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
cd OpenBLAS-$(openblas-version)
make -j$(numthreads)
make PREFIX=$(idir) install
@@ -666,7 +797,7 @@ $(ibidir)/openblas-$(openblas-version):
echo "OpenBLAS $(openblas-version)" > $@
$(ibidir)/openmpi-$(openmpi-version):
- tarball=openmpi-$(openmpi-version).tar.gz
+ tarball=openmpi-$(openmpi-version).tar.lz
$(call import-source, $(openmpi-url), $(openmpi-checksum))
$(call gbuild, openmpi-$(openmpi-version), static, \
--with-pmix=internal \
@@ -692,22 +823,22 @@ $(ibidir)/openssh-$(openssh-version):
echo "OpenSSH $(openssh-version)" > $@
$(ibidir)/pixman-$(pixman-version):
- tarball=pixman-$(pixman-version).tar.gz
+ tarball=pixman-$(pixman-version).tar.lz
$(call import-source, $(pixman-url), $(pixman-checksum))
$(call gbuild, pixman-$(pixman-version), static, , \
-j$(numthreads) V=1)
echo "Pixman $(pixman-version)" > $@
$(ibidir)/rpcsvc-proto-$(rpcsvc-proto-version):
- # 'libintl' is installed as part of GNU Gettext in
- # 'basic.mk'. rpcsvc-proto needs to link with it on macOS.
+# 'libintl' is installed as part of GNU Gettext in
+# 'basic.mk'. rpcsvc-proto needs to link with it on macOS.
if [ x$(on_mac_os) = xyes ]; then
export CC=clang
export CXX=clang++
export LDFLAGS="-lintl $$LDFLAGS"
fi
- # Download the tarball and build rpcsvc-proto.
+# Download the tarball and build rpcsvc-proto.
tarball=rpcsvc-proto-$(rpcsvc-proto-version).tar.xz
$(call import-source, $(rpcsvc-proto-url), $(rpcsvc-proto-checksum))
$(call gbuild, rpcsvc-proto-$(rpcsvc-proto-version), static)
@@ -721,35 +852,10 @@ $(ibidir)/tides-$(tides-version):
cp $(dtexdir)/tides.tex $(ictdir)/
echo "TIDES $(tides-version) \citep{tides}" > $@
-$(ibidir)/valgrind-$(valgrind-version): \
- $(ibidir)/patch-$(patch-version) \
- $(ibidir)/autoconf-$(autoconf-version) \
- $(ibidir)/automake-$(automake-version)
- # Import the tarball
- tarball=valgrind-$(valgrind-version).tar.bz2
+$(ibidir)/valgrind-$(valgrind-version):
+ tarball=valgrind-$(valgrind-version).tar.lz
$(call import-source, $(valgrind-url), $(valgrind-checksum))
-
- # For valgrind-3.15.0, see
- # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=946329 for a
- # report on an MPI-related compile bug and the two patches
- # below. These two patches and `automake` should allow valgrind to
- # compile with gcc-9.2.0.
- cd $(ddir)
- tar -xf $(tdir)/$$tarball
- valgrinddir=valgrind-$(valgrind-version)
- cd $${valgrinddir}
- printf "valgrindir=$${valgrinddir} ; pwd = %s .\n" $$($(ibdir)/pwd)
- if [ "x$(valgrind-version)" = "x3.15.0" ]; then
- patch --verbose -p1 < $(patchdir)/valgrind-3.15.0-mpi-fix1.patch
- patch --verbose -p1 < $(patchdir)/valgrind-3.15.0-mpi-fix2.patch
- fi
- autoreconf
- ./configure --prefix=$(idir)
- make -j$(numthreads)
- if ! make check -j$(numthreads); then
- echo; echo "Valgrind's 'make check' failed!"; echo
- fi
- make install
+ $(call gbuild, valgrind-$(valgrind-version), static)
echo "Valgrind $(valgrind-version)" > $@
$(ibidir)/yaml-$(yaml-version):
@@ -773,47 +879,48 @@ $(ibidir)/yaml-$(yaml-version):
# manually.
#
# For example, Libgit2 page recommends doing a static build, especially for
-# Mac systems (with `-DBUILD_SHARED_LIBS=OFF'): "It’s highly recommended
+# Mac systems (with '-DBUILD_SHARED_LIBS=OFF'): "It’s highly recommended
# that you build libgit2 as a static library for Xcode projects. This
# simplifies distribution significantly, as the resolution of dynamic
# libraries at runtime can be extremely problematic.". This is a major
# problem we have been having so far with Mac systems:
# https://libgit2.org/docs/guides/build-and-link
-# On macOS system, `libgit2' complains about not finding `_iconv*'
-# functions! But apparently `libgit2' has its own implementation of libiconv
+# On macOS system, 'libgit2' complains about not finding '_iconv*'
+# functions! But apparently 'libgit2' has its own implementation of libiconv
# that it uses if it can't find libiconv on macOS. So, to fix this problem
-# it is necessary to use the option `-DUSE_ICONV=OFF` in the configure step.
+# it is necessary to use the option '-DUSE_ICONV=OFF' in the configure step.
$(ibidir)/libgit2-$(libgit2-version): $(ibidir)/cmake-$(cmake-version)
- tarball=libgit2-$(libgit2-version).tar.gz
+ tarball=libgit2-$(libgit2-version).tar.lz
$(call import-source, $(libgit2-url), $(libgit2-checksum))
$(call cbuild, libgit2-$(libgit2-version), static, \
-DUSE_SSH=OFF -DBUILD_CLAR=OFF \
-DTHREADSAFE=ON -DUSE_ICONV=OFF )
if [ x$(on_mac_os) = xyes ]; then
- install_name_tool -id $(ildir)/libgit2.1.0.dylib \
- $(ildir)/libgit2.1.0.dylib
+ install_name_tool -id $(ildir)/libgit2.1.3.dylib \
+ $(ildir)/libgit2.1.3.dylib
fi
echo "Libgit2 $(libgit2-version)" > $@
$(ibidir)/wcslib-$(wcslib-version): $(ibidir)/cfitsio-$(cfitsio-version)
- # Import the tarball.
- tarball=wcslib-$(wcslib-version).tar.bz2
+
+# Import the tarball.
+ tarball=wcslib-$(wcslib-version).tar.lz
$(call import-source, $(wcslib-url), $(wcslib-checksum))
- # If Fortran isn't present, don't build WCSLIB with it.
+# If Fortran isn't present, don't build WCSLIB with it.
if type gfortran &> /dev/null; then fortranopt="";
else fortranopt="--disable-fortran"
fi
- # Build WCSLIB.
+# Build WCSLIB.
$(call gbuild, wcslib-$(wcslib-version), , \
LIBS="-pthread -lcurl -lm" \
--with-cfitsiolib=$(ildir) \
--with-cfitsioinc=$(idir)/include \
--without-pgplot $$fortranopt)
if [ x$(on_mac_os) = xyes ]; then
- install_name_tool -id $(ildir)/libwcs.7.3.dylib \
- $(ildir)/libwcs.7.3.dylib
+ install_name_tool -id $(ildir)/libwcs.7.7.dylib \
+ $(ildir)/libwcs.7.7.dylib
fi
echo "WCSLIB $(wcslib-version)" > $@
@@ -843,17 +950,17 @@ $(ibidir)/astrometrynet-$(astrometrynet-version): \
$(ibidir)/cfitsio-$(cfitsio-version) \
$(ibidir)/libjpeg-$(libjpeg-version)
- # Import the tarball
- tarball=astrometry.net-$(astrometrynet-version).tar.gz
+# Import the tarball
+ tarball=astrometry.net-$(astrometrynet-version).tar.lz
$(call import-source, $(astrometrynet-url), $(astrometrynet-checksum))
- # We are modifying the Makefile in two steps because on Mac OS
- # system we do not have `/proc/cpuinfo' nor `free'. Since this is
- # only for the `report.txt', this changes do not causes problems in
- # running `astrometrynet'
+# We are modifying the Makefile in two steps because on Mac OS system
+# we do not have '/proc/cpuinfo' nor 'free'. Since this is only for
+# the 'report.txt', this changes do not causes problems in running
+# 'astrometrynet'
cd $(ddir)
rm -rf astrometry.net-$(astrometrynet-version)
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
cd astrometry.net-$(astrometrynet-version)
sed -e 's|cat /proc/cpuinfo|echo "Ignoring CPU info"|' \
-e 's|-free|echo "Ignoring RAM info"|' Makefile > Makefile.tmp
@@ -874,7 +981,7 @@ $(ibidir)/autoconf-$(autoconf-version):
echo "GNU Autoconf $(autoconf-version)" > $@
$(ibidir)/automake-$(automake-version): $(ibidir)/autoconf-$(autoconf-version)
- tarball=automake-$(automake-version).tar.gz
+ tarball=automake-$(automake-version).tar.lz
$(call import-source, $(automake-url), $(automake-checksum))
$(call gbuild, automake-$(automake-version), static, ,V=1)
echo "GNU Automake $(automake-version)" > $@
@@ -886,17 +993,18 @@ $(ibidir)/bison-$(bison-version): $(ibidir)/help2man-$(help2man-version)
echo "GNU Bison $(bison-version)" > $@
# cdsclient is a set of software written in c to interact with astronomical
-# database servers. It is a dependency of `scamp' to be able to download
+# database servers. It is a dependency of 'scamp' to be able to download
# reference catalogues.
-# NOTE: we do not use a convencional `gbuild' installation because the
+#
+# NOTE: we do not use a convencional 'gbuild' installation because the
# programs are scripts and we need to touch them before installing.
# Otherwise this software will be re-built each time the configure step is
# invoked.
$(ibidir)/cdsclient-$(cdsclient-version):
- tarball=cdsclient-$(cdsclient-version).tar.gz
+ tarball=cdsclient-$(cdsclient-version).tar.lz
$(call import-source, $(cdsclient-url), $(cdsclient-checksum))
cd $(ddir)
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
cd cdsclient-$(cdsclient-version)
touch *
./configure --prefix=$(idir)
@@ -906,25 +1014,27 @@ $(ibidir)/cdsclient-$(cdsclient-version):
rm -rf cdsclient-$(cdsclient-version)
echo "cdsclient $(cdsclient-version)" > $@
-# CMake can be built with its custom `./bootstrap' script.
-$(ibidir)/cmake-$(cmake-version): $(ibidir)/curl-$(curl-version)
- # Import the tarball
- tarball=cmake-$(cmake-version).tar.gz
+# CMake can be built with its custom './bootstrap' script and has no
+# dependencies beyond the basic Maneage software.
+$(ibidir)/cmake-$(cmake-version):
+
+# Import the tarball
+ tarball=cmake-$(cmake-version).tar.lz
$(call import-source, $(cmake-url), $(cmake-checksum))
- # After searching in `bootstrap', I couldn't find `LIBS', only
- # `LDFLAGS'. So the extra libraries are being added to `LDFLAGS',
- # not `LIBS'.
- #
- # On Mac systems, the build complains about `clang' specific
- # features, so we can't use our own GCC build here.
+# After searching in 'bootstrap', I couldn't find 'LIBS', only
+# 'LDFLAGS'. So the extra libraries are being added to 'LDFLAGS', not
+# 'LIBS'.
+#
+# On Mac systems, the build complains about 'clang' specific
+# features, so we can't use our own GCC build here.
if [ x$(on_mac_os) = xyes ]; then
export CC=clang
export CXX=clang++
fi
cd $(ddir)
rm -rf cmake-$(cmake-version)
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
cd cmake-$(cmake-version)
./bootstrap --prefix=$(idir) --system-curl --system-zlib \
--system-bzip2 --system-liblzma --no-qt-gui \
@@ -942,7 +1052,7 @@ $(ibidir)/flex-$(flex-version): $(ibidir)/bison-$(bison-version)
echo "Flex $(flex-version)" > $@
$(ibidir)/gdb-$(gdb-version): $(ibidir)/python-$(python-version)
- tarball=gdb-$(gdb-version).tar.gz
+ tarball=gdb-$(gdb-version).tar.lz
export configure_in_different_directory=1;
$(call import-source, $(gdb-url), $(gdb-checksum))
$(call gbuild, gdb-$(gdb-version),,,V=1 -j$(numthreads))
@@ -956,49 +1066,51 @@ $(ibidir)/ghostscript-$(ghostscript-version): \
$(ibidir)/libtiff-$(libtiff-version) \
$(ibidir)/libpaper-$(libpaper-version)
- # Download the standard collection of Ghostscript fonts.
- tarball=ghostscript-fonts-std-$(ghostscript-fonts-std-version).tar.gz
+# Download the standard collection of Ghostscript fonts.
+ tarball=ghostscript-fonts-std-$(ghostscript-fonts-std-version).tar.lz
$(call import-source, $(ghostscript-fonts-std-url), \
$(ghostscript-fonts-std-checksum))
- # Download the extra GNU fonts for Ghostscript.
- tarball=ghostscript-fonts-gnu-$(ghostscript-fonts-gnu-version).tar.gz
+# Download the extra GNU fonts for Ghostscript.
+ tarball=ghostscript-fonts-gnu-$(ghostscript-fonts-gnu-version).tar.lz
$(call import-source, $(ghostscript-fonts-gnu-url), \
$(ghostscript-fonts-gnu-checksum))
- # Download the tarball
- tarball=ghostscript-$(ghostscript-version).tar.gz
+# Download the tarball
+ tarball=ghostscript-$(ghostscript-version).tar.lz
$(call import-source, $(ghostscript-url), $(ghostscript-checksum))
- # Unpack it and configure Ghostscript.
+# Unpack it and configure Ghostscript. The option
+# '-DPNG_ARM_NEON_OPT=0' prevents an arm64 'neon' library from being
+# required at compile time.
cd $(ddir)
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
cd ghostscript-$(ghostscript-version)
./configure --prefix=$(idir) \
--disable-cups \
--enable-dynamic \
- --with-system-libtiff \
- --disable-compile-inits
+ --disable-compile-inits \
+ CFLAGS="-DPNG_ARM_NEON_OPT=0"
- # Build and install the program and the shared libraries.
+# Build and install the program and the shared libraries.
make V=1 -j$(numthreads)
make so V=1 -j$(numthreads)
make install
make soinstall
- # Install headers and set PostScript (PS) headers to point there.
+# Install headers and set PostScript (PS) headers to point there.
install -v -m644 base/*.h $(iidir)/ghostscript
ln -sfvn $(iidir)/ghostscript $(iidir)/ps
- # Install the fonts.
- tar -xvf $(tdir)/ghostscript-fonts-std-$(ghostscript-fonts-std-version).tar.gz \
+# Install the fonts.
+ tar -xvf $(tdir)/ghostscript-fonts-std-$(ghostscript-fonts-std-version).tar.lz \
-C $(idir)/share/ghostscript
- tar -xvf $(tdir)/ghostscript-fonts-gnu-$(ghostscript-fonts-gnu-version).tar.gz \
+ tar -xvf $(tdir)/ghostscript-fonts-gnu-$(ghostscript-fonts-gnu-version).tar.lz \
-C $(idir)/share/ghostscript
fc-cache -v $(idir)/share/ghostscript/fonts/
echo; echo "Ghostscript fonts added to Fontconfig."; echo;
- # Clean up and write the output target.
+# Clean up and write the output target.
cd ..
rm -rf ghostscript-$(ghostscript-version)
echo "GPL Ghostscript $(ghostscript-version)" > $@
@@ -1018,48 +1130,74 @@ $(ibidir)/gnuastro-$(gnuastro-version): \
echo "GNU Astronomy Utilities $(gnuastro-version) \citep{gnuastro}" > $@
$(ibidir)/help2man-$(help2man-version):
- tarball=help2man-$(help2man-version).tar.xz
+ tarball=help2man-$(help2man-version).tar.lz
$(call import-source, $(help2man-url), $(help2man-checksum))
$(call gbuild, help2man-$(help2man-version), static, ,V=1)
echo "Help2man $(Help2man-version)" > $@
+$(ibidir)/icu-$(icu-version): $(ibidir)/python-$(python-version)
+
+# First, we need to remove any possibly existing ICU installation
+# because it can cause conflicts during a new configuration
+# (especially if a new version is to replace the old one).
+ for i in data i18n io test tu uc; do
+ rm -fv $(ildir)/libicu$$i.*;
+ done
+
+# Prepare the tarball, unpack, build and install ICU (some
+# customizations are necessary, so we're not using 'gbuild').
+ tarball=icu-$(icu-version).tar.lz
+ $(call import-source, $(icu-url), $(icu-checksum))
+ cd $(ddir)
+ tar -xf $(tdir)/$$tarball
+ unpackdir=icu-$(icu-version)
+ cd $$unpackdir/icu4c/source
+ ./configure --enable-static --prefix=$(idir)
+ make -j$(numthreads) V=1
+ make install
+ cd $(ddir)
+ rm -rf $$unpackdir
+ echo "ICU $(icu-version)" > $@
+
$(ibidir)/imagemagick-$(imagemagick-version): \
$(ibidir)/zlib-$(zlib-version) \
$(ibidir)/libjpeg-$(libjpeg-version) \
$(ibidir)/libtiff-$(libtiff-version)
- tarball=imagemagick-$(imagemagick-version).tar.xz
+ tarball=ImageMagick-$(imagemagick-version).tar.lz
$(call import-source, $(imagemagick-url), $(imagemagick-checksum))
$(call gbuild, ImageMagick-$(imagemagick-version), static, \
--without-x --disable-openmp, V=1 -j$(numthreads))
echo "ImageMagick $(imagemagick-version)" > $@
-# `imfit' doesn't use the traditional `configure' and `make' to install
-# itself. Instead of that, it uses `scons'. As a consequence, the
+# 'imfit' doesn't use the traditional 'configure' and 'make' to install
+# itself. Instead of that, it uses 'scons'. As a consequence, the
# installation is manually done by decompressing the tarball, and running
-# `scons' with the necessary flags. Despite of that, it is necessary to
+# 'scons' with the necessary flags. Despite of that, it is necessary to
# replace the default searching paths in this script by our installation
-# paths. This is done with `sed', replacing each ocurrence of `/usr/local'
-# by `$(idir)'. After that, each compiled program (`imfit', `imfit-mcmc'
-# and `makeimage') is copied into the installation directory and an `rpath'
+# paths. This is done with 'sed', replacing each ocurrence of '/usr/local'
+# by '$(idir)'. After that, each compiled program ('imfit', 'imfit-mcmc'
+# and 'makeimage') is copied into the installation directory and an 'rpath'
# is added.
$(ibidir)/imfit-$(imfit-version): \
$(ibidir)/gsl-$(gsl-version) \
$(ibidir)/fftw-$(fftw-version) \
$(ibidir)/scons-$(scons-version) \
$(ibidir)/cfitsio-$(cfitsio-version)
+
+# Prepare the source.
tarball=imfit-$(imfit-version).tar.gz
$(call import-source, $(imfit-url), $(imfit-checksum))
- # If the C library is in a non-standard location.
+# If the C library is in a non-standard location.
if ! [ x$(SYS_CPATH) = x ]; then
headerpath="--header-path=$(SYS_CPATH)"
fi
- # Unpack and build imfit and its accompanying programs.
+# Unpack and build imfit and its accompanying programs.
cd $(ddir)
unpackdir=imfit-$(imfit-version)
rm -rf $$unpackdir
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
cd $$unpackdir
sed -i 's|/usr/local|$(idir)|g' SConstruct
sed -i 's|/usr/include|$(idir)/include|g' SConstruct
@@ -1086,6 +1224,8 @@ $(ibidir)/imfit-$(imfit-version): \
done
fi
cp $(dtexdir)/imfit.tex $(ictdir)/
+ cd ..
+ rm -rf $$unpackdir
echo "Imfit $(imfit-version) \citep{imfit2015}" > $@
# Minizip 1.x is actually distributed within zlib. It doesn't have its own
@@ -1098,13 +1238,13 @@ $(ibidir)/imfit-$(imfit-version): \
# About deleting the final crypt.h file after installation, see
# https://bugzilla.redhat.com/show_bug.cgi?id=1424609
$(ibidir)/minizip-$(minizip-version): $(ibidir)/automake-$(automake-version)
- tarball=zlib-$(zlib-version).tar.gz
- $(call import-source, $(minizip-url), $(minizip-checksum))
+ tarball=zlib-$(zlib-version).tar.lz
+ $(call import-source, $(zlib-url), $(zlib-checksum))
cd $(ddir)
unpackdir=minizip-$(minizip-version)
rm -rf $$unpackdir
mkdir $$unpackdir
- tar xf $(tdir)/$$tarball -C$$unpackdir --strip-components=1
+ tar -xf $(tdir)/$$tarball -C$$unpackdir --strip-components=1
cd $$unpackdir
./configure --prefix=$(idir)
make
@@ -1140,16 +1280,18 @@ $(ibidir)/missfits-$(missfits-version):
# Netpbm is a prerequisite of Astrometry-net, it contains a lot of programs.
# This program has a crazy dialogue installation which is override using the
-# printf statment. Each `\n' is a new question that the installation process
+# printf statment. Each '\n' is a new question that the installation process
# ask to the user. We give all answers with a pipe to the scripts (configure
# and install). The questions are different depending on the system (tested
# on GNU/Linux and Mac OS).
$(ibidir)/netpbm-$(netpbm-version): \
+ $(ibidir)/flex-$(flex-version) \
$(ibidir)/libpng-$(libpng-version) \
+ $(ibidir)/libx11-$(libx11-version) \
$(ibidir)/libjpeg-$(libjpeg-version) \
$(ibidir)/libtiff-$(libtiff-version) \
$(ibidir)/libxml2-$(libxml2-version)
- tarball=netpbm-$(netpbm-version).tar.gz
+ tarball=netpbm-$(netpbm-version).tar.lz
$(call import-source, $(netpbm-url), $(netpbm-checksum))
if [ x$(on_mac_os) = xyes ]; then
answers='\n\n$(ildir)\n\n\n\n\n\n$(ildir)/include\n\n$(ildir)/include\n\n$(ildir)/include\nnone\n\n'
@@ -1159,7 +1301,7 @@ $(ibidir)/netpbm-$(netpbm-version): \
cd $(ddir)
unpackdir=netpbm-$(netpbm-version)
rm -rf $$unpackdir
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
cd $$unpackdir
printf "$$answers" | ./configure
make
@@ -1189,60 +1331,15 @@ $(ibidir)/pcre-$(pcre-version):
, V=1 -j$(numthreads))
echo "Perl Compatible Regular Expressions $(pcre-version)" > $@
-# Comment on building R without GUI support ('--without-tcltlk')
-#
-# Tcl/Tk are a set of tools to provide Graphic User Interface (GUI) support
-# in some software. But they are not yet natively built within Maneage,
-# primarily because we have higher-priority work right now (if anyone is
-# interested, they can ofcourse contribute!). GUI tools in general aren't
-# high on our priority list right now because they are generally good for
-# human interaction (which is contrary to the reproducible philosophy:
-# there will always be human-error and frustration, for example in GUI
-# tools the best level of reproducibility is statements like this: "move
-# your mouse to button XXX, then click on menu YYY and etc"). A robust
-# reproducible solution must be done automatically.
-#
-# If someone wants to use R's GUI functionalities while investigating for
-# their analysis, they can do the GUI part on their host OS
-# implementation. Later, they can bring the finalized source into Maneage
-# to be automatically run in Maneage. This will also be the recommended way
-# to deal with GUI tools later when we do install them within Maneage.
-$(ibidir)/R-$(R-version): \
- $(ibidir)/pcre-$(pcre-version) \
- $(ibidir)/cairo-$(cairo-version) \
- $(ibidir)/libpng-$(libpng-version) \
- $(ibidir)/libjpeg-$(libjpeg-version) \
- $(ibidir)/libtiff-$(libtiff-version) \
- $(ibidir)/libpaper-$(libpaper-version)
- tarball=R-$(R-version).tar.gz
- $(call import-source, $(R-url), $(R-checksum))
- cd $(ddir)
- tar xf $(tdir)/$$tarball
- cd R-$(R-version)
-
- # We need to manually remove the lines with '~autodetect~', they
- # cause the configure script to crash in version 4.0.2. They are
- # used in relation to Java, and we don't use Java anyway.
- sed -i -e '/\~autodetect\~/ s/^/#/g' configure
- export R_SHELL=$(SHELL)
- ./configure --prefix=$(idir) \
- --without-x \
- --with-pcre1 \
- --disable-java \
- --with-readline \
- --without-tcltk \
- --disable-openmp
- make -j$(numthreads)
- make install
- cd ..
- rm -rf R-$(R-version)
- echo "R $(R-version)" > $@
+# 2022-01-01 The rules for building R - identified as r-cran to avoid the
+# difficulties in searching text for a one-letter string - were shifted to
+# 'r-cran.mk'.
# SCAMP documentation says ATLAS is a mandatory prerequisite for using
# SCAMP. We have ATLAS into the project but there are some problems with the
# libraries that are not yet solved. However, we tried to install it with
# the option --enable-openblas and it worked (same issue happened with
-# `sextractor'.
+# 'sextractor'.
$(ibidir)/scamp-$(scamp-version): \
$(ibidir)/fftw-$(fftw-version) \
$(ibidir)/openblas-$(openblas-version) \
@@ -1250,7 +1347,7 @@ $(ibidir)/scamp-$(scamp-version): \
tarball=scamp-$(scamp-version).tar.lz
$(call import-source, $(scamp-url), $(scamp-checksum))
- # See comment above 'missfits' for '-fcommon'.
+# See comment above 'missfits' for '-fcommon'.
$(call gbuild, scamp-$(scamp-version), static, \
CFLAGS="-fcommon" \
--enable-threads \
@@ -1263,17 +1360,19 @@ $(ibidir)/scamp-$(scamp-version): \
cp $(dtexdir)/scamp.tex $(ictdir)/
echo "SCAMP $(scamp-version) \citep{scamp}" > $@
-# Since `scons' doesn't use the traditional GNU installation with
-# `configure' and `make' it is installed manually using `python'.
+# Since 'scons' doesn't use the traditional GNU installation with
+# 'configure' and 'make' it is installed manually using 'python'.
$(ibidir)/scons-$(scons-version): $(ibidir)/python-$(python-version)
tarball=scons-$(scons-version).tar.gz
$(call import-source, $(scons-url), $(scons-checksum))
cd $(ddir)
unpackdir=scons-$(scons-version)
rm -rf $$unpackdir
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
cd $$unpackdir
python setup.py install
+ cd ..
+ rm -rf $$unpackdir
echo "SCons $(scons-version)" > $@
# Sextractor crashes complaining about not linking with some ATLAS
@@ -1289,7 +1388,7 @@ $(ibidir)/sextractor-$(sextractor-version): \
tarball=sextractor-$(sextractor-version).tar.lz
$(call import-source, $(sextractor-url), $(sextractor-checksum))
- # See comment above 'missfits' for '-fcommon'.
+# See comment above 'missfits' for '-fcommon'.
$(call gbuild, sextractor-$(sextractor-version), static, \
CFLAGS="-fcommon" \
--enable-threads \
@@ -1304,7 +1403,7 @@ $(ibidir)/swarp-$(swarp-version): $(ibidir)/fftw-$(fftw-version)
tarball=swarp-$(swarp-version).tar.gz
$(call import-source, $(swarp-url), $(swarp-checksum))
- # See comment above 'missfits' for '-fcommon'.
+# See comment above 'missfits' for '-fcommon'.
$(call gbuild, swarp-$(swarp-version), static, \
CFLAGS="-fcommon" \
--enable-threads)
@@ -1312,10 +1411,11 @@ $(ibidir)/swarp-$(swarp-version): $(ibidir)/fftw-$(fftw-version)
echo "SWarp $(swarp-version) \citep{swarp}" > $@
$(ibidir)/swig-$(swig-version):
- # Option --without-pcre was a suggestion once the configure step
- # was tried and it failed. It was not recommended but it works!
- # pcr is a dependency of swig
- tarball=swig-$(swig-version).tar.gz
+
+# Option --without-pcre was a suggestion once the configure step was
+# tried and it failed. It was not recommended but it works! pcr is a
+# dependency of swig
+ tarball=swig-$(swig-version).tar.lz
$(call import-source, $(swig-url), $(swig-checksum))
$(call gbuild, swig-$(swig-version), static, \
--without-pcre --without-tcl)
@@ -1345,19 +1445,19 @@ $(ibidir)/swig-$(swig-version):
# '$(ibdir)'. If any program does need 'util-linux' libraries, they can
# simply add the proper directories to the environment variables, see
# 'fontconfig' for example.
-$(ibidir)/util-linux-$(util-linux-version):
+$(ibidir)/util-linux-$(util-linux-version): | $(idircustom)
- # Import the source.
- tarball=util-linux-$(util-linux-version).tar.xz
+# Import the source.
+ tarball=util-linux-$(util-linux-version).tar.lz
$(call import-source, $(util-linux-url), $(util-linux-checksum))
- # Unpack the source and set it to install in a special directory
- # (as explained above). As shown below, later, we'll put a symbolic
- # link of all the necessary binaries in the main '$(idir)/bin'.
+# Unpack the source and set it to install in a special directory (as
+# explained above). As shown below, later, we'll put a symbolic link
+# of all the necessary binaries in the main '$(idir)/bin'.
cd $(ddir)
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
cd util-linux-$(util-linux-version)
- ./configure --prefix=$(idir)/util-linux \
+ ./configure --prefix=$(idircustom)/util-linux \
--disable-dependency-tracking \
--disable-silent-rules \
--without-systemd \
@@ -1368,21 +1468,21 @@ $(ibidir)/util-linux-$(util-linux-version):
--disable-wall \
--disable-su
- # Build and install it.
+# Build and install it.
make V=1 -j$(numthreads)
make install
- # Put a symbolic link to installed programs in main installation
- # directory. If 'sbin' exists in the main installation directory,
- # put util-linux's 'sbin/' directory there too.
- ln -sf $(idir)/util-linux/bin/* $(ibdir)/
+# Put a symbolic link to installed programs in main installation
+# directory. If 'sbin' exists in the main installation directory, put
+# util-linux's 'sbin/' directory there too.
+ ln -sf $(idircustom)/util-linux/bin/* $(ibdir)/
if [ -d $(idir)/sbin ]; then
- ln -sf $(idir)/util-linux/sbin/* $(idir)/sbin
+ ln -sf $(idircustom)/util-linux/sbin/* $(idir)/sbin
else
- ln -sf $(idir)/util-linux/sbin/* $(idir)/bin
+ ln -sf $(idircustom)/util-linux/sbin/* $(idir)/bin
fi
- # Clean up and write the main target.
+# Clean up and write the main target.
cd ../
rm -rf util-linux-$(util-linux-version)
echo "util-Linux $(util-linux-version)" > $@
@@ -1428,7 +1528,7 @@ $(ibidir)/vim-$(vim-version):
tarball=vim-$(vim-version).tar.bz2
$(call import-source, $(vim-url), $(vim-checksum))
cd $(ddir)
- tar xf $(tdir)/$$tarball
+ tar -xf $(tdir)/$$tarball
n=$$(echo $(vim-version) | sed -e's|\.||')
cd $(ddir)/vim$$n
./configure --prefix=$(idir) \
@@ -1455,7 +1555,7 @@ $(ibidir)/vim-$(vim-version):
# hard to track for Make (as a target). Also, TeX in general is optional
# for the project (the processing is the main target, not the generation of
# the final PDF). So we'll make a simple ASCII file called
-# `texlive-ready-tlmgr' and use its contents to mark if we can use it or
+# 'texlive-ready-tlmgr' and use its contents to mark if we can use it or
# not.
#
# TeX Live mirror
@@ -1463,8 +1563,8 @@ $(ibidir)/vim-$(vim-version):
#
# The automatic mirror finding fails sometimes. So we'll manually set it to
# use a fixed mirror. I first tried the LaTeX root webpage
-# (`ftp.dante.de'), however, it is far too slow (when I tested it). The
-# `rit.edu' server seems to be a good alternative (given the importance of
+# ('ftp.dante.de'), however, it is far too slow (when I tested it). The
+# 'rit.edu' server seems to be a good alternative (given the importance of
# NY on the internet infrastructure).
texlive-url=http://mirrors.rit.edu/CTAN/systems/texlive/tlnet
$(itidir)/texlive-ready-tlmgr: reproduce/software/config/texlive.conf
@@ -1472,81 +1572,81 @@ $(itidir)/texlive-ready-tlmgr: reproduce/software/config/texlive.conf
tarball=install-tl-unx.tar.gz
$(call import-source, $(texlive-url), NO-CHECK-SUM)
- # Unpack, enter the directory, and install based on the given
- # configuration (prerequisite of this rule).
+# Unpack, enter the directory, and install based on the given
+# configuration (prerequisite of this rule).
@topdir=$$(pwd)
cd $(ddir)
rm -rf install-tl-*
- tar xf $(tdir)/install-tl-unx.tar.gz
+ tar -xf $(tdir)/install-tl-unx.tar.gz
cd install-tl-*
sed -e's|@installdir[@]|$(idir)|g' \
"$$topdir"/reproduce/software/config/texlive.conf \
> texlive.conf
- # TeX Live's installation may fail due to any reason. But TeX Live
- # is optional (only necessary for building the final PDF). So we
- # don't want the configure script to fail if it can't run.
- # Possible error messages will be saved into `log.txt' and if it
- # fails, 'log.txt' will be checked to see if the error is due to
- # the different version of the current tarball and the TeXLive
- # server or something else.
- #
- # The problem with versions is this: each installer tarball (that
- # is downloaded and a user may backup) is for a specific version of
- # TeXLive (specified by year, usually around April). So if a user
- # has an old tarball, but the CTAN server has been updated, the
- # script will fail with a message like this:
- #
- # =============================================================
- # ./install-tl: The TeX Live versions of the local installation
- # and the repository being accessed are not compatible:
- # local: 2019
- # repository: 2020
- # Perhaps you need to use a different CTAN mirror?
- # (For more, see the output of install-tl --help, especially the
- # -repository option. Online via https://tug.org/texlive/doc.)
- # =============================================================
- #
- # To address this problem, when this happens, we simply download a
- # the most recent tarball, and if it succeeds, we will build
- # TeXLive using that. The old tarball will be preserved, but will
- # have an '-OLD' suffix after it.
+# TeX Live's installation may fail due to any reason. But TeX Live is
+# optional (only necessary for building the final PDF). So we don't
+# want the configure script to fail if it can't run. Possible error
+# messages will be saved into 'log.txt' and if it fails, 'log.txt'
+# will be checked to see if the error is due to the different version
+# of the current tarball and the TeXLive server or something else.
+#
+# The problem with versions is this: each installer tarball (that is
+# downloaded and a user may backup) is for a specific version of
+# TeXLive (specified by year, usually around April). So if a user has
+# an old tarball, but the CTAN server has been updated, the script
+# will fail with a message like this:
+#
+# =============================================================
+# ./install-tl: The TeX Live versions of the local installation
+# and the repository being accessed are not compatible:
+# local: 2019
+# repository: 2020
+# Perhaps you need to use a different CTAN mirror?
+# (For more, see the output of install-tl --help, especially the
+# -repository option. Online via https://tug.org/texlive/doc.)
+# =============================================================
+#
+# To address this problem, when this happens, we simply download a
+# the most recent tarball, and if it succeeds, we will build TeXLive
+# using that. The old tarball will be preserved, but will have an
+# '-OLD' suffix after it.
if ./install-tl --profile=texlive.conf -repository \
$(texlive-url) 2> log.txt; then
- # Put a symbolic link of the TeX Live executables in `ibdir' to
- # avoid all the complexities of its sub-directories and additions
- # to PATH.
+# Put a symbolic link of the TeX Live executables in 'ibdir' to
+# avoid all the complexities of its sub-directories and additions
+# to PATH.
ln -fs $(idir)/texlive/maneage/bin/*/* $(ibdir)/
- # Register that the build was successful.
+# Register that the build was successful.
echo "TeX Live is ready." > $@
- # The build failed!
+# The build failed!
else
- # Print on the command line the error messages during the
- # installation.
+# Print on the command line the error messages during the
+# installation.
cat log.txt
- # Look for words `repository:' and `local:' in `log.txt' and make
- # sure that two lines are returned. Note that we need to check
- # for two lines because one of them may exist, but another may
- # not (in this case, its not a version conflict scenario).
+# Look for words 'repository:' and 'local:' in 'log.txt' and make
+# sure that two lines are returned. Note that we need to check for
+# two lines because one of them may exist, but another may not (in
+# this case, its not a version conflict scenario).
version_check=$$(grep -w 'repository:\|local:' log.txt | wc -l)
- # If these words exists and two lines are found, there is a
- # conflict with the main TeXLive version in the tarball and on
- # the server. So it is necessary to move the old tarball and
- # download the new one to install it.
+# If these words exists and two lines are found, there is a
+# conflict with the main TeXLive version in the tarball and on the
+# server. So it is necessary to move the old tarball and download
+# the new one to install it.
if [ x"$$version_check" = x2 ]; then
- # Go back to the top project directory, don't remove the
- # tarball, just rename it.
+
+# Go back to the top project directory, don't remove the tarball,
+# just rename it.
cd $$topdir
mv $(tdir)/install-tl-unx.tar.gz $(tdir)/install-tl-unx-OLD.tar.gz
- # Download using the script specially defined for this job. If
- # the download of new tarball success, install it (same lines
- # than above). If not, record the fail into the target.
+# Download using the script specially defined for this job. If
+# the download of new tarball success, install it (same lines
+# than above). If not, record the fail into the target.
url=http://mirror.ctan.org/systems/texlive/tlnet
tarballurl=$$url/install-tl-unx.tar.gz
touch $(lockdir)/download
@@ -1556,7 +1656,7 @@ $(itidir)/texlive-ready-tlmgr: reproduce/software/config/texlive.conf
"$(backupservers)"; then
cd $(ddir)
rm -rf install-tl-*
- tar xf $(tdir)/install-tl-unx.tar.gz
+ tar -xf $(tdir)/install-tl-unx.tar.gz
cd install-tl-*
sed -e's|@installdir[@]|$(idir)|g' \
$$topdir/reproduce/software/config/texlive.conf \
@@ -1576,7 +1676,7 @@ $(itidir)/texlive-ready-tlmgr: reproduce/software/config/texlive.conf
fi
fi
- # Clean up
+# Clean up
cd ..
rm -rf install-tl-*
@@ -1590,49 +1690,50 @@ $(itidir)/texlive-ready-tlmgr: reproduce/software/config/texlive.conf
#
# Note that Biber needs to link with libraries like libnsl. However, we
# don't currently build biber from source. So we can't choose the library
-# version. But we have the source and build instructions for the `nsl'
+# version. But we have the source and build instructions for the 'nsl'
# library. When we later build biber from source, we can easily use them.
$(itidir)/texlive: reproduce/software/config/texlive-packages.conf \
$(itidir)/texlive-ready-tlmgr
- # To work with TeX live installation, we'll need the internet.
+# To work with TeX live installation, we'll need the internet.
@res=$$(cat $(itidir)/texlive-ready-tlmgr)
if [ x"$$res" = x"NOT!" ]; then
echo "" > $@
else
- # To update itself, tlmgr needs a backup directory.
+
+# To update itself, tlmgr needs a backup directory.
backupdir=$(idir)/texlive/backups
mkdir -p $$backupdir
- # Before checking LaTeX packages, update tlmgr itself.
+# Before checking LaTeX packages, update tlmgr itself.
tlmgr option backupdir $$backupdir
tlmgr -repository $(texlive-url) update --self
- # Install all the extra necessary packages. If LaTeX complains
- # about not finding a command/file/what-ever/XXXXXX, simply run
- # the following command to find which package its in, then add it
- # to the `texlive-packages' variable of the first prerequisite.
- #
- # ./.local/bin/tlmgr info XXXXXX
- #
- # We are putting a notice, because if there is no internet,
- # `tlmgr' just hangs waiting.
+# Install all the extra necessary packages. If LaTeX complains
+# about not finding a command/file/what-ever/XXXXXX, simply run the
+# following command to find which package its in, then add it to
+# the 'texlive-packages' variable of the first prerequisite.
+#
+# ./.local/bin/tlmgr info XXXXXX
+#
+# We are putting a notice, because if there is no internet, 'tlmgr'
+# just hangs waiting.
tlmgr install $(texlive-packages)
- # Make a symbolic link of all the TeX Live executables in the bin
- # directory so we don't have to modify `PATH'.
+# Make a symbolic link of all the TeX Live executables in the bin
+# directory so we don't have to modify 'PATH'.
ln -fs $(idir)/texlive/maneage/bin/*/* $(ibdir)/
- # Get all the necessary versions.
+# Get all the necessary versions.
texlive=$$(pdflatex --version \
| awk 'NR==1' \
| sed 's/.*(\(.*\))/\1/' \
| awk '{print $$NF}');
- # Package names and versions. Note that all TeXLive packages
- # don't have a version unfortunately! So we need to also read the
- # `revision' and `cat-date' elements and print them incase
- # version isn't available.
+# Package names and versions. Note that all TeXLive packages
+# don't have a version unfortunately! So we need to also read the
+# 'revision' and 'cat-date' elements and print them incase
+# version isn't available.
tlmgr info $(texlive-packages) --only-installed | awk \
'$$1=="package:" { \
if(name!=0) \
diff --git a/reproduce/software/make/python.mk b/reproduce/software/make/python.mk
index 18c68de..6766744 100644
--- a/reproduce/software/make/python.mk
+++ b/reproduce/software/make/python.mk
@@ -4,7 +4,7 @@
# !!!!! IMPORTANT NOTES !!!!!
#
# This Makefile will be loaded into 'high-level.mk', which is called by the
-# `./project configure' script. It is not included into the project
+# './project configure' script. It is not included into the project
# afterwards.
#
# This Makefile contains instructions to build all the Python-related
@@ -12,8 +12,8 @@
#
# ------------------------------------------------------------------------
#
-# Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com>
-# Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2019-2022 Raul Infante-Sainz <infantesainz@gmail.com>
+# Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This Makefile is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -35,7 +35,7 @@
# Python enviroment
# -----------------
#
-# The main Python environment variable is `PYTHONPATH'. However, so far we
+# The main Python environment variable is 'PYTHONPATH'. However, so far we
# have found several other Python-related environment variables on some
# systems which might interfere. To be safe, we are removing all their
# values.
@@ -62,19 +62,20 @@ python-major-version = $(shell echo $(python-version) | awk 'BEGIN{FS="."} \
# While this Makefile is for Python programs, in some cases, we need
# certain programs (like Python itself), or libraries for the modules.
$(ibidir)/libffi-$(libffi-version):
- # Prepare the source.
- tarball=libffi-$(libffi-version).tar.gz
+
+# Prepare the source.
+ tarball=libffi-$(libffi-version).tar.lz
$(call import-source, $(libffi-url), $(libffi-checksum))
- # Build libffi.
+# Build libffi.
$(call gbuild, libffi-$(libffi-version), , \
CFLAGS="-DNO_JAVA_RAW_API=1")
- # On some Fedora systems, libffi installs in `lib64', not
- # `lib'. This will cause problems when building setuptools
- # later. To fix this problem, we'll first check if this has indeed
- # happened (it exists under `lib64', but not under `lib'). If so,
- # we'll put a copy of the installed libffi libraries in `lib'.
+# On some Fedora systems, libffi installs in 'lib64', not 'lib'. This
+# will cause problems when building setuptools later. To fix this
+# problem, we'll first check if this has indeed happened (it exists
+# under 'lib64', but not under 'lib'). If so, we'll put a copy of the
+# installed libffi libraries in 'lib'.
if [ -f $(idir)/lib64/libffi.a ] && ! [ -f $(idir)/lib/libffi.a ]; then
cp $(idir)/lib64/libffi* $(ildir)/
fi
@@ -82,17 +83,17 @@ $(ibidir)/libffi-$(libffi-version):
$(ibidir)/python-$(python-version): $(ibidir)/libffi-$(libffi-version)
- # Download the source.
- tarball=python-$(python-version).tar.gz
+# Download the source.
+ tarball=python-$(python-version).tar.lz
$(call import-source, $(python-url), $(python-checksum))
- # On Mac systems, the build complains about `clang' specific
- # features, so we can't use our own GCC build here.
+# On Mac systems, the build complains about 'clang' specific
+# features, so we can't use our own GCC build here.
if [ x$(on_mac_os) = xyes ]; then
export CC=clang
export CXX=clang++
fi
- $(call gbuild, Python-$(python-version),, \
+ $(call gbuild, python-$(python-version),, \
--without-ensurepip \
--with-system-ffi \
--enable-shared, -j$(numthreads))
@@ -110,10 +111,10 @@ $(ibidir)/python-$(python-version): $(ibidir)/libffi-$(libffi-version)
# Non-PiP Python module installation
# ----------------------------------
#
-# To build Python packages with direct access to a `setup.py' (if no direct
-# access to `setup.py' is needed, pip can be used). Note that the
+# To build Python packages with direct access to a 'setup.py' (if no direct
+# access to 'setup.py' is needed, pip can be used). Note that the
# software's packaged source code is the first prerequisite that is in the
-# `tdir' directory.
+# 'tdir' directory.
#
# Arguments of this function are the numbers
# 1) Unpack command
@@ -122,8 +123,8 @@ $(ibidir)/python-$(python-version): $(ibidir)/libffi-$(libffi-version)
# 4) Official software name (for paper).
#
# Hooks:
-# pyhook_before: optional steps before running `python setup.py build'
-# pyhook_after: optional steps after running `python setup.py install'
+# pyhook_before: optional steps before running 'python setup.py build'
+# pyhook_after: optional steps after running 'python setup.py install'
pybuild = cd $(ddir); rm -rf $(2); \
if ! $(1) $(tdir)/$$tarball; then \
echo; echo "Tar error"; exit 1; \
@@ -153,13 +154,13 @@ pybuild = cd $(ddir); rm -rf $(2); \
$(ipydir)/asn1crypto-$(asn1crypto-version): $(ipydir)/setuptools-$(setuptools-version)
tarball=asn1crypto-$(asn1crypto-version).tar.gz
$(call import-source, $(asn1crypto-url), $(asn1crypto-checksum))
- $(call pybuild, tar xf, asn1crypto-$(asn1crypto-version), , \
+ $(call pybuild, tar -xf, asn1crypto-$(asn1crypto-version), , \
Asn1crypto $(asn1crypto-version))
$(ipydir)/asteval-$(asteval-version): $(ipydir)/numpy-$(numpy-version)
tarball=asteval-$(asteval-version).tar.gz
$(call import-source, $(asteval-url), $(asteval-checksum))
- $(call pybuild, tar xf, asteval-$(asteval-version), , \
+ $(call pybuild, tar -xf, asteval-$(asteval-version), , \
ASTEVAL $(asteval-version))
$(ipydir)/astroquery-$(astroquery-version): \
@@ -168,7 +169,7 @@ $(ipydir)/astroquery-$(astroquery-version): \
$(ipydir)/requests-$(requests-version)
tarball=astroquery-$(astroquery-version).tar.gz
$(call import-source, $(astroquery-url), $(astroquery-checksum))
- $(call pybuild, tar xf, astroquery-$(astroquery-version), , \
+ $(call pybuild, tar -xf, astroquery-$(astroquery-version), , \
Astroquery $(astroquery-version))
$(ipydir)/astropy-$(astropy-version): \
@@ -177,87 +178,84 @@ $(ipydir)/astropy-$(astropy-version): \
$(ipydir)/scipy-$(scipy-version) \
$(ipydir)/numpy-$(numpy-version) \
$(ipydir)/pyyaml-$(pyyaml-version) \
+ $(ipydir)/jinja2-$(jinja2-version) \
+ $(ipydir)/pyerfa-$(pyerfa-version) \
$(ipydir)/html5lib-$(html5lib-version) \
- $(ipydir)/beautifulsoup4-$(beautifulsoup4-version)
-
- # Download the source.
- tarball=astropy-$(astropy-version).tar.gz
+ $(ipydir)/beautifulsoup4-$(beautifulsoup4-version) \
+ $(ipydir)/extension-helpers-$(extension-helpers-version)
+ tarball=astropy-$(astropy-version).tar.lz
$(call import-source, $(astropy-url), $(astropy-checksum))
-
- # Currently, when the Expat library is already built in a project
- # (for example as a dependency of another program), Astropy's
- # internal building of Expat will conflict with the project's. So
- # we have added Expat as a dependency of Astropy (so it is always
- # built before it, and we tell Astropy to use the project's
- # libexpat.
- pyhook_before () {
- echo "" >> setup.cfg
- echo "[build]" >> setup.cfg
- echo "use_system_expat=1" >> setup.cfg
- }
- $(call pybuild, tar xf, astropy-$(astropy-version))
+ $(call pybuild, tar -xf, astropy-$(astropy-version))
cp $(dtexdir)/astropy.tex $(ictdir)/
echo "Astropy $(astropy-version) \citep{astropy2013,astropy2018}" > $@
$(ipydir)/beautifulsoup4-$(beautifulsoup4-version): \
$(ipydir)/soupsieve-$(soupsieve-version)
- tarball=beautifulsoup4-$(beautifulsoup4-version).tar.gz
+ tarball=beautifulsoup4-$(beautifulsoup4-version).tar.lz
$(call import-source, $(beautifulsoup4-url), $(beautifulsoup4-checksum))
- $(call pybuild, tar xf, beautifulsoup4-$(beautifulsoup4-version), , \
+ $(call pybuild, tar -xf, beautifulsoup4-$(beautifulsoup4-version), , \
BeautifulSoup $(beautifulsoup4-version))
+$(ipydir)/beniget-$(beniget-version): $(ipydir)/setuptools-$(setuptools-version)
+ tarball=beniget-$(beniget-version).tar.lz
+ $(call import-source, $(beniget-url), $(beniget-checksum))
+ $(call pybuild, tar -xf, beniget-$(beniget-version), , \
+ Beniget $(beniget-version))
+
$(ipydir)/certifi-$(certifi-version): $(ipydir)/setuptools-$(setuptools-version)
tarball=certifi-$(certifi-version).tar.gz
$(call import-source, $(certifi-url), $(certifi-checksum))
- $(call pybuild, tar xf, certifi-$(certifi-version), , \
+ $(call pybuild, tar -xf, certifi-$(certifi-version), , \
Certifi $(certifi-version))
$(ipydir)/cffi-$(cffi-version): \
$(ibidir)/libffi-$(libffi-version) \
$(ipydir)/pycparser-$(pycparser-version)
- tarball=cffi-$(cffi-version).tar.gz
+ tarball=cffi-$(cffi-version).tar.lz
$(call import-source, $(cffi-url), $(cffi-checksum))
- $(call pybuild, tar xf, cffi-$(cffi-version), ,cffi $(cffi-version))
+ $(call pybuild, tar -xf, cffi-$(cffi-version), ,cffi $(cffi-version))
$(ipydir)/chardet-$(chardet-version): $(ipydir)/setuptools-$(setuptools-version)
tarball=chardet-$(chardet-version).tar.gz
$(call import-source, $(chardet-url), $(chardet-checksum))
- $(call pybuild, tar xf, chardet-$(chardet-version), , \
+ $(call pybuild, tar -xf, chardet-$(chardet-version), , \
Chardet $(chardet-version))
$(ipydir)/corner-$(corner-version): $(ipydir)/matplotlib-$(matplotlib-version)
tarball=corner-$(corner-version).tar.gz
$(call import-source, $(corner-url), $(corner-checksum))
- $(call pybuild, tar xf, corner-$(corner-version), , \
+ $(call pybuild, tar -xf, corner-$(corner-version), , \
Corner $(corner-version))
cp $(dtexdir)/corner.tex $(ictdir)/
echo "Corner $(corner-version) \citep{corner}" > $@
$(ipydir)/cryptography-$(cryptography-version): \
$(ipydir)/cffi-$(cffi-version) \
- $(ipydir)/asn1crypto-$(asn1crypto-version)
- tarball=cryptography-$(cryptography-version).tar.gz
+ $(ipydir)/asn1crypto-$(asn1crypto-version) \
+ $(ipydir)/setuptools-rust-$(setuptools-rust-version)
+ tarball=cryptography-$(cryptography-version).tar.lz
$(call import-source, $(cryptography-url), $(cryptography-checksum))
- $(call pybuild, tar xf, cryptography-$(cryptography-version), , \
+ $(call pybuild, tar -xf, cryptography-$(cryptography-version), , \
Cryptography $(cryptography-version))
$(ipydir)/cycler-$(cycler-version): $(ipydir)/six-$(six-version)
- tarball=cycler-$(cycler-version).tar.gz
+ tarball=cycler-$(cycler-version).tar.lz
$(call import-source, $(cycler-url), $(cycler-checksum))
- $(call pybuild, tar xf, cycler-$(cycler-version), , \
+ $(call pybuild, tar -xf, cycler-$(cycler-version), , \
Cycler $(cycler-version))
$(ipydir)/cython-$(cython-version): $(ipydir)/setuptools-$(setuptools-version)
- tarball=cython-$(cython-version).tar.gz
+ tarball=Cython-$(cython-version).tar.lz
$(call import-source, $(cython-url), $(cython-checksum))
- $(call pybuild, tar xf, Cython-$(cython-version))
+ $(call pybuild, tar -xf, Cython-$(cython-version))
cp $(dtexdir)/cython.tex $(ictdir)/
echo "Cython $(cython-version) \citep{cython2011}" > $@
$(ipydir)/esutil-$(esutil-version): $(ipydir)/numpy-$(numpy-version)
- tarball=esutil-$(esutil-version).tar.gz
+ export CFLAGS="-std=c++14 $$CFLAGS"
+ tarball=esutil-$(esutil-version).tar.lz
$(call import-source, $(esutil-url), $(esutil-checksum))
- $(call pybuild, tar xf, esutil-$(esutil-version), , \
+ $(call pybuild, tar -xf, esutil-$(esutil-version), , \
esutil $(esutil-version))
$(ipydir)/eigency-$(eigency-version): \
@@ -266,7 +264,7 @@ $(ipydir)/eigency-$(eigency-version): \
$(ipydir)/cython-$(cython-version)
tarball=eigency-$(eigency-version).tar.gz
$(call import-source, $(eigency-url), $(eigency-checksum))
- $(call pybuild, tar xf, eigency-$(eigency-version), , \
+ $(call pybuild, tar -xf, eigency-$(eigency-version), , \
eigency $(eigency-version))
$(ipydir)/emcee-$(emcee-version): \
@@ -274,28 +272,35 @@ $(ipydir)/emcee-$(emcee-version): \
$(ipydir)/setuptools_scm-$(setuptools_scm-version)
tarball=emcee-$(emcee-version).tar.gz
$(call import-source, $(emcee-url), $(emcee-checksum))
- $(call pybuild, tar xf, emcee-$(emcee-version), , \
+ $(call pybuild, tar -xf, emcee-$(emcee-version), , \
emcee $(emcee-version))
$(ipydir)/entrypoints-$(entrypoints-version): \
$(ipydir)/setuptools-$(setuptools-version)
tarball=entrypoints-$(entrypoints-version).tar.gz
$(call import-source, $(entrypoints-url), $(entrypoints-checksum))
- $(call pybuild, tar xf, entrypoints-$(entrypoints-version), , \
+ $(call pybuild, tar -xf, entrypoints-$(entrypoints-version), , \
EntryPoints $(entrypoints-version))
+$(ipydir)/extension-helpers-$(extension-helpers-version): \
+ $(ipydir)/setuptools-$(setuptools-version)
+ tarball=extension-helpers-$(extension-helpers-version).tar.lz
+ $(call import-source, $(extension-helpers-url), $(extension-helpers-checksum))
+ $(call pybuild, tar -xf, extension-helpers-$(extension-helpers-version), , \
+ Extension-Helpers $(extension-helpers-version))
+
$(ipydir)/flake8-$(flake8-version): \
$(ipydir)/pyflakes-$(pyflakes-version) \
$(ipydir)/pycodestyle-$(pycodestyle-version)
tarball=flake8-$(flake8-version).tar.gz
$(call import-source, $(flake8-url), $(flake8-checksum))
- $(call pybuild, tar xf, flake8-$(flake8-version), , \
+ $(call pybuild, tar -xf, flake8-$(flake8-version), , \
Flake8 $(flake8-version))
$(ipydir)/future-$(future-version): $(ipydir)/setuptools-$(setuptools-version)
tarball=future-$(future-version).tar.gz
$(call import-source, $(future-url), $(future-checksum))
- $(call pybuild, tar xf, future-$(future-version), , \
+ $(call pybuild, tar -xf, future-$(future-version), , \
Future $(future-version))
$(ipydir)/galsim-$(galsim-version): \
@@ -304,12 +309,18 @@ $(ipydir)/galsim-$(galsim-version): \
$(ipydir)/eigency-$(eigency-version) \
$(ipydir)/pybind11-$(pybind11-version) \
$(ipydir)/lsstdesccoord-$(lsstdesccoord-version)
- tarball=galsim-$(galsim-version).tar.gz
+ tarball=galsim-$(galsim-version).tar.lz
$(call import-source, $(galsim-url), $(galsim-checksum))
- $(call pybuild, tar xf, GalSim-$(galsim-version))
+ $(call pybuild, tar -xf, galsim-$(galsim-version))
cp $(dtexdir)/galsim.tex $(ictdir)/
echo "Galsim $(galsim-version) \citep{galsim}" > $@
+$(ipydir)/gast-$(gast-version): $(ipydir)/setuptools-$(setuptools-version)
+ tarball=gast-$(gast-version).tar.lz
+ $(call import-source, $(gast-url), $(gast-checksum))
+ $(call pybuild, tar -xf, gast-$(gast-version), , \
+ Gast $(gast-version))
+
$(ipydir)/h5py-$(h5py-version): \
$(ipydir)/six-$(six-version) \
$(ibidir)/hdf5-$(hdf5-version) \
@@ -321,15 +332,15 @@ $(ipydir)/h5py-$(h5py-version): \
export HDF5_DIR=$(ildir)
tarball=h5py-$(h5py-version).tar.gz
$(call import-source, $(h5py-url), $(h5py-checksum))
- $(call pybuild, tar xf, h5py-$(h5py-version), , \
+ $(call pybuild, tar -xf, h5py-$(h5py-version), , \
h5py $(h5py-version))
-# `healpy' is actually installed as part of the HEALPix package. It will be
+# 'healpy' is actually installed as part of the HEALPix package. It will be
# installed with its C/C++ libraries if any other Python library is
-# requested with HEALPix. So actually calling for `healpix' (when `healpix'
+# requested with HEALPix. So actually calling for 'healpix' (when 'healpix'
# is requested) is not necessary. But some users might not know about this
-# and just ask for `healpy'. To avoid confusion in such cases, we'll just
-# set `healpy' to be dependent on `healpix' and not download any tarball
+# and just ask for 'healpy'. To avoid confusion in such cases, we'll just
+# set 'healpy' to be dependent on 'healpix' and not download any tarball
# for it, or write anything in the final target.
$(ipydir)/healpy-$(healpy-version): $(ibidir)/healpix-$(healpix-version)
touch $@
@@ -339,34 +350,40 @@ $(ipydir)/html5lib-$(html5lib-version): \
$(ipydir)/webencodings-$(webencodings-version)
tarball=html5lib-$(html5lib-version).tar.gz
$(call import-source, $(html5lib-url), $(html5lib-checksum))
- $(call pybuild, tar xf, html5lib-$(html5lib-version), , \
+ $(call pybuild, tar -xf, html5lib-$(html5lib-version), , \
HTML5lib $(html5lib-version))
$(ipydir)/idna-$(idna-version): $(ipydir)/setuptools-$(setuptools-version)
tarball=idna-$(idna-version).tar.gz
$(call import-source, $(idna-url), $(idna-checksum))
- $(call pybuild, tar xf, idna-$(idna-version), , \
+ $(call pybuild, tar -xf, idna-$(idna-version), , \
idna $(idna-version))
$(ipydir)/jeepney-$(jeepney-version): $(ipydir)/setuptools-$(setuptools-version)
tarball=jeepney-$(jeepney-version).tar.gz
$(call import-source, $(jeepney-url), $(jeepney-checksum))
- $(call pybuild, tar xf, jeepney-$(jeepney-version), , \
+ $(call pybuild, tar -xf, jeepney-$(jeepney-version), , \
Jeepney $(jeepney-version))
+$(ipydir)/jinja2-$(jinja2-version): $(ipydir)/markupsafe-$(markupsafe-version)
+ tarball=jinja2-$(jinja2-version).tar.lz
+ $(call import-source, $(jinja2-url), $(jinja2-checksum))
+ $(call pybuild, tar -xf, jinja2-$(jinja2-version), , \
+ Jinja2 $(jinja2-version))
+
$(ipydir)/keyring-$(keyring-version): \
$(ipydir)/entrypoints-$(entrypoints-version) \
$(ipydir)/secretstorage-$(secretstorage-version) \
$(ipydir)/setuptools_scm-$(setuptools_scm-version)
tarball=keyring-$(keyring-version).tar.gz
$(call import-source, $(keyring-url), $(keyring-checksum))
- $(call pybuild, tar xf, keyring-$(keyring-version), , \
+ $(call pybuild, tar -xf, keyring-$(keyring-version), , \
Keyring $(keyring-version))
$(ipydir)/kiwisolver-$(kiwisolver-version): $(ipydir)/setuptools-$(setuptools-version)
- tarball=kiwisolver-$(kiwisolver-version).tar.gz
+ tarball=kiwisolver-$(kiwisolver-version).tar.lz
$(call import-source, $(kiwisolver-url), $(kiwisolver-checksum))
- $(call pybuild, tar xf, kiwisolver-$(kiwisolver-version), , \
+ $(call pybuild, tar -xf, kiwisolver-$(kiwisolver-version), , \
Kiwisolver $(kiwisolver-version))
$(ipydir)/lmfit-$(lmfit-version): \
@@ -379,20 +396,28 @@ $(ipydir)/lmfit-$(lmfit-version): \
$(ipydir)/uncertainties-$(uncertainties-version)
tarball=lmfit-$(lmfit-version).tar.gz
$(call import-source, $(lmfit-url), $(lmfit-checksum))
- $(call pybuild, tar xf, lmfit-$(lmfit-version), , \
+ $(call pybuild, tar -xf, lmfit-$(lmfit-version), , \
LMFIT $(lmfit-version))
$(ipydir)/lsstdesccoord-$(lsstdesccoord-version): \
- $(ipydir)/setuptools-$(setuptools-version)
+ $(ipydir)/cython-$(cython-version)
tarball=lsstdesccoord-$(lsstdesccoord-version).tar.gz
$(call import-source, $(lsstdesccoord-url), $(lsstdesccoord-checksum))
- $(call pybuild, tar xf, LSSTDESC.Coord-$(lsstdesccoord-version), , \
+ $(call pybuild, tar -xf, LSSTDESC.Coord-$(lsstdesccoord-version), , \
LSSTDESC.Coord $(lsstdesccoord-version))
+$(ipydir)/markupsafe-$(markupsafe-version): \
+ $(ipydir)/setuptools-$(setuptools-version)
+ tarball=markupsafe-$(markupsafe-version).tar.lz
+ $(call import-source, $(markupsafe-url), $(markupsafe-checksum))
+ $(call pybuild, tar -xf, markupsafe-$(markupsafe-version), , \
+ MarkupSafe $(markupsafe-version))
+
$(ipydir)/matplotlib-$(matplotlib-version): \
$(itidir)/texlive \
$(ipydir)/numpy-$(numpy-version) \
$(ipydir)/cycler-$(cycler-version) \
+ $(ipydir)/pillow-$(pillow-version) \
$(ibidir)/freetype-$(freetype-version) \
$(ipydir)/pyparsing-$(pyparsing-version) \
$(ipydir)/kiwisolver-$(kiwisolver-version) \
@@ -400,33 +425,33 @@ $(ipydir)/matplotlib-$(matplotlib-version): \
$(ibidir)/imagemagick-$(imagemagick-version) \
$(ipydir)/python-dateutil-$(python-dateutil-version)
- # Download the source.
- tarball=matplotlib-$(matplotlib-version).tar.gz
+# Prepare the source.
+ tarball=matplotlib-$(matplotlib-version).tar.lz
$(call import-source, $(matplotlib-url), $(matplotlib-checksum))
- # On Mac systems, the build complains about `clang' specific
- # features, so we can't use our own GCC build here.
+# On Mac systems, the build complains about 'clang' specific
+# features, so we can't use our own GCC build here.
if [ x$(on_mac_os) = xyes ]; then
export CC=clang
export CXX=clang++
fi
- $(call pybuild, tar xf, matplotlib-$(matplotlib-version))
+ $(call pybuild, tar -xf, matplotlib-$(matplotlib-version))
cp $(dtexdir)/matplotlib.tex $(ictdir)/
echo "Matplotlib $(matplotlib-version) \citep{matplotlib2007}" > $@
$(ipydir)/mpi4py-$(mpi4py-version): \
$(ibidir)/openmpi-$(openmpi-version) \
$(ipydir)/setuptools-$(setuptools-version)
- tarball=mpi4py-$(mpi4py-version).tar.gz
+ tarball=mpi4py-$(mpi4py-version).tar.lz
$(call import-source, $(mpi4py-url), $(mpi4py-checksum))
- $(call pybuild, tar xf, mpi4py-$(mpi4py-version))
+ $(call pybuild, tar -xf, mpi4py-$(mpi4py-version))
cp $(dtexdir)/mpi4py.tex $(ictdir)/
echo "mpi4py $(mpi4py-version) \citep{mpi4py2011}" > $@
$(ipydir)/mpmath-$(mpmath-version): $(ipydir)/setuptools-$(setuptools-version)
tarball=mpmath-$(mpmath-version).tar.gz
$(call import-source, $(mpmath-url), $(mpmath-checksum))
- $(call pybuild, tar xf, mpmath-$(mpmath-version), , \
+ $(call pybuild, tar -xf, mpmath-$(mpmath-version), , \
mpmath $(mpmath-version))
$(ipydir)/numpy-$(numpy-version): \
@@ -434,7 +459,7 @@ $(ipydir)/numpy-$(numpy-version): \
$(ipydir)/cython-$(cython-version) \
$(ibidir)/openblas-$(openblas-version) \
$(ipydir)/setuptools-$(setuptools-version)
- tarball=numpy-$(numpy-version).zip
+ tarball=numpy-$(numpy-version).tar.lz
$(call import-source, $(numpy-url), $(numpy-checksum))
if [ x$(on_mac_os) = xyes ]; then
export LDFLAGS="$(LDFLAGS) -undefined dynamic_lookup -bundle"
@@ -443,28 +468,48 @@ $(ipydir)/numpy-$(numpy-version): \
fi
export CFLAGS="--std=c99 $$CFLAGS"
conf="$$(pwd)/reproduce/software/config/numpy-scipy.cfg"
- $(call pybuild, unzip, numpy-$(numpy-version),$$conf, \
+ $(call pybuild, tar -xf, numpy-$(numpy-version),$$conf, \
Numpy $(numpy-version))
cp $(dtexdir)/numpy.tex $(ictdir)/
echo "Numpy $(numpy-version) \citep{numpy2011}" > $@
+$(ipydir)/packaging-$(packaging-version): \
+ $(ipydir)/pyparsing-$(pyparsing-version)
+ tarball=packaging-$(packaging-version).tar.lz
+ $(call import-source, $(packaging-url), $(packaging-checksum))
+ $(call pybuild, tar -xf, packaging-$(packaging-version), , \
+ Packaging $(packaging-version))
+
$(ipydir)/pexpect-$(pexpect-version): $(ipydir)/setuptools-$(setuptools-version)
tarball=pexpect-$(pexpect-version).tar.gz
$(call import-source, $(pexpect-url), $(pexpect-checksum))
- $(call pybuild, tar xf, pexpect-$(pexpect-version), , \
+ $(call pybuild, tar -xf, pexpect-$(pexpect-version), , \
Pexpect $(pexpect-version))
+$(ipydir)/pillow-$(pillow-version): $(ibidir)/libjpeg-$(libjpeg-version) \
+ $(ipydir)/setuptools-$(setuptools-version)
+ tarball=Pillow-$(pillow-version).tar.lz
+ $(call import-source, $(pillow-url), $(pillow-checksum))
+ $(call pybuild, tar -xf, Pillow-$(pillow-version), , \
+ Pillow $(pillow-version))
+
$(ipydir)/pip-$(pip-version): $(ipydir)/setuptools-$(setuptools-version)
tarball=pip-$(pip-version).tar.gz
$(call import-source, $(pip-url), $(pip-checksum))
- $(call pybuild, tar xf, pip-$(pip-version), , \
+ $(call pybuild, tar -xf, pip-$(pip-version), , \
PiP $(pip-version))
+$(ipydir)/ply-$(ply-version): $(ipydir)/setuptools-$(setuptools-version)
+ tarball=ply-$(ply-version).tar.lz
+ $(call import-source, $(ply-url), $(ply-checksum))
+ $(call pybuild, tar -xf, ply-$(ply-version), , \
+ ply $(ply-version))
+
$(ipydir)/pycodestyle-$(pycodestyle-version): \
$(ipydir)/setuptools-$(setuptools-version)
tarball=pycodestyle-$(pycodestyle-version).tar.gz
$(call import-source, $(pycodestyle-url), $(pycodestyle-checksum))
- $(call pybuild, tar xf, pycodestyle-$(pycodestyle-version), , \
+ $(call pybuild, tar -xf, pycodestyle-$(pycodestyle-version), , \
pycodestyle $(pycodestyle-version))
$(ipydir)/pybind11-$(pybind11-version): \
@@ -476,31 +521,40 @@ $(ipydir)/pybind11-$(pybind11-version): \
pyhook_after() {
cp -r include/pybind11 $(iidir)/python$(python-major-version)m/
}
- $(call pybuild, tar xf, pybind11-$(pybind11-version), , \
+ $(call pybuild, tar -xf, pybind11-$(pybind11-version), , \
pybind11 $(pybind11-version))
$(ipydir)/pycparser-$(pycparser-version): $(ipydir)/setuptools-$(setuptools-version)
tarball=pycparser-$(pycparser-version).tar.gz
$(call import-source, $(pycparser-url), $(pycparser-checksum))
- $(call pybuild, tar xf, pycparser-$(pycparser-version), , \
+ $(call pybuild, tar -xf, pycparser-$(pycparser-version), , \
pycparser $(pycparser-version))
+$(ipydir)/pyerfa-$(pyerfa-version): \
+ $(ipydir)/numpy-$(numpy-version) \
+ $(ipydir)/packaging-$(packaging-version)
+ tarball=pyerfa-$(pyerfa-version).tar.lz
+ $(call import-source, $(pyerfa-url), $(pyerfa-checksum))
+ $(call pybuild, tar -xf, pyerfa-$(pyerfa-version), , \
+ PyERFA $(pyerfa-version))
+
$(ipydir)/pyflakes-$(pyflakes-version): $(ipydir)/setuptools-$(setuptools-version)
tarball=pyflakes-$(pyflakes-version).tar.gz
$(call import-source, $(pyflakes-url), $(pyflakes-checksum))
- $(call pybuild, tar xf, pyflakes-$(pyflakes-version), , \
+ $(call pybuild, tar -xf, pyflakes-$(pyflakes-version), , \
pyflakes $(pyflakes-version))
-$(ipydir)/pyparsing-$(pyparsing-version): $(ipydir)/setuptools-$(setuptools-version)
- tarball=pyparsing-$(pyparsing-version).tar.gz
+$(ipydir)/pyparsing-$(pyparsing-version): \
+ $(ipydir)/setuptools-$(setuptools-version)
+ tarball=pyparsing-$(pyparsing-version).tar.lz
$(call import-source, $(pyparsing-url), $(pyparsing-checksum))
- $(call pybuild, tar xf, pyparsing-$(pyparsing-version), , \
+ $(call pybuild, tar -xf, pyparsing-$(pyparsing-version), , \
PyParsing $(pyparsing-version))
$(ipydir)/pypkgconfig-$(pypkgconfig-version): $(ipydir)/setuptools-$(setuptools-version)
tarball=pkgconfig-$(pypkgconfig-version).tar.gz
$(call import-source, $(pypkgconfig-url), $(pypkgconfig-checksum))
- $(call pybuild, tar xf, pkgconfig-$(pypkgconfig-version), ,
+ $(call pybuild, tar -xf, pkgconfig-$(pypkgconfig-version), ,
pkgconfig $(pypkgconfig-version))
$(ipydir)/python-dateutil-$(python-dateutil-version): \
@@ -508,15 +562,26 @@ $(ipydir)/python-dateutil-$(python-dateutil-version): \
$(ipydir)/setuptools_scm-$(setuptools_scm-version)
tarball=python-dateutil-$(python-dateutil-version).tar.gz
$(call import-source, $(python-dateutil-url), $(python-dateutil-checksum))
- $(call pybuild, tar xf, python-dateutil-$(python-dateutil-version), , \
+ $(call pybuild, tar -xf, python-dateutil-$(python-dateutil-version), , \
python-dateutil $(python-dateutil-version))
+$(ipydir)/pythran-$(pythran-version): \
+ $(ipydir)/ply-$(ply-version) \
+ $(ipydir)/gast-$(gast-version) \
+ $(ibidir)/boost-$(boost-version) \
+ $(ipydir)/beniget-$(beniget-version) \
+ $(ipydir)/setuptools_scm-$(setuptools_scm-version)
+ tarball=pythran-$(pythran-version).tar.lz
+ $(call import-source, $(pythran-url), $(pythran-checksum))
+ $(call pybuild, tar -xf, pythran-$(pythran-version), , \
+ pythran $(pythran-version))
+
$(ipydir)/pyyaml-$(pyyaml-version): \
$(ibidir)/yaml-$(yaml-version) \
$(ipydir)/cython-$(cython-version)
tarball=pyyaml-$(pyyaml-version).tar.gz
$(call import-source, $(pyyaml-url), $(pyyaml-checksum))
- $(call pybuild, tar xf, PyYAML-$(pyyaml-version), , \
+ $(call pybuild, tar -xf, PyYAML-$(pyyaml-version), , \
PyYAML $(pyyaml-version))
$(ipydir)/requests-$(requests-version): $(ipydir)/idna-$(idna-version) \
@@ -526,13 +591,14 @@ $(ipydir)/requests-$(requests-version): $(ipydir)/idna-$(idna-version) \
$(ipydir)/urllib3-$(urllib3-version)
tarball=requests-$(requests-version).tar.gz
$(call import-source, $(requests-url), $(requests-checksum))
- $(call pybuild, tar xf, requests-$(requests-version), , \
+ $(call pybuild, tar -xf, requests-$(requests-version), , \
Requests $(requests-version))
$(ipydir)/scipy-$(scipy-version): \
$(ipydir)/numpy-$(numpy-version) \
+ $(ipydir)/pythran-$(pythran-version) \
$(ipydir)/pybind11-$(pybind11-version)
- tarball=scipy-$(scipy-version).tar.gz
+ tarball=scipy-$(scipy-version).tar.lz
$(call import-source, $(scipy-url), $(scipy-checksum))
if [ x$(on_mac_os) = xyes ]; then
export LDFLAGS="$(LDFLAGS) -undefined dynamic_lookup -bundle"
@@ -540,7 +606,7 @@ $(ipydir)/scipy-$(scipy-version): \
export LDFLAGS="$(LDFLAGS) -shared"
fi
conf="$$(pwd)/reproduce/software/config/numpy-scipy.cfg"
- $(call pybuild, tar xf, scipy-$(scipy-version),$$conf)
+ $(call pybuild, tar -xf, scipy-$(scipy-version),$$conf)
cp $(dtexdir)/scipy.tex $(ictdir)/
echo "Scipy $(scipy-version) \citep{scipy2007,scipy2011}" > $@
@@ -549,68 +615,81 @@ $(ipydir)/secretstorage-$(secretstorage-version): \
$(ipydir)/cryptography-$(cryptography-version)
tarball=secretstorage-$(secretstorage-version).tar.gz
$(call import-source, $(secretstorage-url), $(secretstorage-checksum))
- $(call pybuild, tar xf, SecretStorage-$(secretstorage-version), , \
+ $(call pybuild, tar -xf, SecretStorage-$(secretstorage-version), , \
SecretStorage $(secretstorage-version))
$(ipydir)/setuptools-$(setuptools-version): \
$(ibidir)/unzip-$(unzip-version) \
$(ibidir)/python-$(python-version)
- tarball=setuptools-$(setuptools-version).zip
+ tarball=setuptools-$(setuptools-version).tar.lz
$(call import-source, $(setuptools-url), $(setuptools-checksum))
- $(call pybuild, unzip, setuptools-$(setuptools-version), , \
+ $(call pybuild, tar -xf, setuptools-$(setuptools-version), , \
Setuptools $(setuptools-version))
$(ipydir)/setuptools_scm-$(setuptools_scm-version): \
$(ipydir)/setuptools-$(setuptools-version)
tarball=setuptools_scm-$(setuptools_scm-version).tar.gz
$(call import-source, $(setuptools_scm-url), $(setuptools_scm-checksum))
- $(call pybuild, tar xf, setuptools_scm-$(setuptools_scm-version), , \
+ $(call pybuild, tar -xf, setuptools_scm-$(setuptools_scm-version), , \
Setuptools-scm $(setuptools_scm-version))
+$(ipydir)/setuptools-rust-$(setuptools-rust-version): \
+ $(ipydir)/setuptools-$(setuptools-version)
+ tarball=setuptools-rust-$(setuptools-rust-version).tar.lz
+ $(call import-source, $(setuptools-rust-url), $(setuptools-rust-checksum))
+ $(call pybuild, tar -xf, setuptools-rust-$(setuptools-rust-version), , \
+ Setuptools-scm $(setuptools-rust-version))
+
$(ipydir)/sip_tpv-$(sip_tpv-version): \
$(ipydir)/sympy-$(sympy-version) \
$(ipydir)/astropy-$(astropy-version)
tarball=sip_tpv-$(sip_tpv-version).tar.gz
$(call import-source, $(sip_tpv-url), $(sip_tpv-checksum))
- $(call pybuild, tar xf, sip_tpv-$(sip_tpv-version), ,)
+ $(call pybuild, tar -xf, sip_tpv-$(sip_tpv-version), ,)
cp $(dtexdir)/sip_tpv.tex $(ictdir)/
echo "sip_tpv $(sip_tpv-version) \citep{sip-tpv}" > $@
$(ipydir)/six-$(six-version): $(ipydir)/setuptools-$(setuptools-version)
- tarball=six-$(six-version).tar.gz
+ tarball=six-$(six-version).tar.lz
$(call import-source, $(six-url), $(six-checksum))
- $(call pybuild, tar xf, six-$(six-version), , \
+ $(call pybuild, tar -xf, six-$(six-version), , \
Six $(six-version))
$(ipydir)/soupsieve-$(soupsieve-version): $(ipydir)/setuptools-$(setuptools-version)
tarball=soupsieve-$(soupsieve-version).tar.gz
$(call import-source, $(soupsieve-url), $(soupsieve-checksum))
- $(call pybuild, tar xf, soupsieve-$(soupsieve-version), , \
+ $(call pybuild, tar -xf, soupsieve-$(soupsieve-version), , \
SoupSieve $(soupsieve-version))
$(ipydir)/sympy-$(sympy-version): $(ipydir)/mpmath-$(mpmath-version)
tarball=sympy-$(sympy-version).tar.gz
$(call import-source, $(sympy-url), $(sympy-checksum))
- $(call pybuild, tar xf, sympy-$(sympy-version), ,)
+ $(call pybuild, tar -xf, sympy-$(sympy-version), ,)
cp $(dtexdir)/sympy.tex $(ictdir)/
echo "SymPy $(sympy-version) \citep{sympy}" > $@
$(ipydir)/uncertainties-$(uncertainties-version): $(ipydir)/numpy-$(numpy-version)
- tarball=uncertainties-$(uncertainties-version).tar.gz
+ tarball=uncertainties-$(uncertainties-version).tar.lz
$(call import-source, $(uncertainties-url), $(uncertainties-checksum))
- $(call pybuild, tar xf, uncertainties-$(uncertainties-version), , \
+ $(call pybuild, tar -xf, uncertainties-$(uncertainties-version), , \
uncertainties $(uncertainties-version))
$(ipydir)/urllib3-$(urllib3-version): $(ipydir)/setuptools-$(setuptools-version)
tarball=urllib3-$(urllib3-version).tar.gz
$(call import-source, $(urllib3-url), $(urllib3-checksum))
- $(call pybuild, tar xf, urllib3-$(urllib3-version), , \
+ $(call pybuild, tar -xf, urllib3-$(urllib3-version), , \
Urllib3 $(urllib3-version))
$(ipydir)/webencodings-$(webencodings-version): \
$(ipydir)/setuptools-$(setuptools-version)
tarball=webencodings-$(webencodings-version).tar.gz
$(call import-source, $(webencodings-url), $(webencodings-checksum))
- $(call pybuild, tar xf, webencodings-$(webencodings-version), , \
+ $(call pybuild, tar -xf, webencodings-$(webencodings-version), , \
Webencodings $(webencodings-version))
+
+$(ipydir)/wheel-$(wheel-version): $(ipydir)/setuptools-$(setuptools-version)
+ tarball=wheel-$(wheel-version).tar.lz
+ $(call import-source, $(wheel-url), $(wheel-checksum))
+ $(call pybuild, tar -xf, wheel-$(wheel-version), , \
+ Wheel $(wheel-version))
diff --git a/reproduce/software/make/r-cran.mk b/reproduce/software/make/r-cran.mk
new file mode 100644
index 0000000..617b8de
--- /dev/null
+++ b/reproduce/software/make/r-cran.mk
@@ -0,0 +1,487 @@
+# Build the project's R (here called R-CRAN) dependencies.
+#
+# ------------------------------------------------------------------------
+# !!!!! IMPORTANT NOTES !!!!!
+#
+# This Makefile will be loaded into 'high-level.mk', which is called by the
+# './project configure' script. It is not included into the project
+# afterwards.
+#
+# This Makefile contains instructions to build all the R-CRAN-related
+# software within the project.
+#
+# ------------------------------------------------------------------------
+#
+# Copyright (C) 2022 Boud Roukema <boud@cosmo.torun.pl>
+# Copyright (C) 2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+#
+# This Makefile is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This Makefile is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this Makefile. If not, see <http://www.gnu.org/licenses/>.
+
+
+
+
+
+# BUGS/IMPROVEMENTS
+# -----------------
+#
+# As of 2021-06-20, the R system is still very new and has not yet
+# been tested on non-Debian-derived systems. Please provide bug
+# reports ( https://savannah.nongnu.org/task/?15772 ) or propose fixes
+# as git pull requests on a public git server (e.g. on a fork of
+# https://codeberg.org/boud/maneage_dev ).
+
+
+
+
+
+# R-CRAN enviroment
+# -----------------
+#
+# It may be necessary to override host-level R-related environment
+# variables that interfere with the Maneage-installed R system.
+# systems which might interfere.
+
+# Ideas for which environment variables might create problems
+# and might need to be set to be empty here:
+#
+# https://stat.ethz.ch/R-manual/R-devel/library/base/html/EnvVar.html
+
+# These first variables should be set automatically when R starts:
+#export R_HOME := $(idir)/lib/R
+#export R_INCLUDE_DIR := $(idir)/lib/R/include
+
+
+
+
+
+# R-CRAN-specific installation directories.
+r-cran-major-version = $(shell echo $(r-cran-version) \
+ | awk 'BEGIN{FS="."} \
+ {printf "%d.%d\n", $$1, $$2}')
+
+
+
+
+
+# R-CRAN-specific build rules for 'make'
+# ======================================
+
+# Double-check an already downloaded R source
+# -------------------------------------------
+#
+# Check that the tarball with the version in
+# 'reproduce/software/conf/versions.conf' has the sha512sum (checksum)
+# stated 'reproduce/software/conf/checksums.conf'. This does not do any
+# security checks; it only checks that the source file package is the one
+# that is expected the last time that someone updated these two files for
+# the R package of interest.
+#
+# Calculate the checksum and exit with a non-zero error code if there's a
+# mismatch, after informing the user.
+#
+# Arguments:
+# 1: The expected checksum of the tarball.
+#
+# Necessary shell variables
+# 'tarball': This is the name of the actual tarball file without a
+# directory.
+double-check-R-source = final=$(tdir)/$$tarball; \
+ exp_checksum="$(strip $(1))"; \
+ if [ x"$$exp_checksum" = x"NO-CHECK-SUM" ]; then \
+ result=0; \
+ else \
+ if type sha512sum > /dev/null 2>/dev/null; then \
+ checksum=$$(sha512sum "$$final" | awk '{print $$1}'); \
+ if [ x"$$checksum" = x"$$exp_checksum" ]; then \
+ result=0; \
+ else \
+ echo "ERROR: Non-matching checksum: $$final"; \
+ echo "Checksum should be: $$exp_checksum"; \
+ echo "Checksum is: $$checksum"; \
+ result=1; \
+ exit 1; \
+ fi; \
+ else \
+ echo "ERROR: sha512sum is unavailable."; \
+ exit 1; \
+ fi; \
+ fi
+
+# Default 'make' build rules for an CRAN package
+# -----------------------------------------------
+#
+# The default 'install.packages' function of R only recognizes 'tar.gz'
+# tarballs. But Maneage uses '.tar.lz' format for its archival. So to be
+# agnostic to the compression algorithm, we will be using 'tar' externally
+# (before entering R), then give the un-compressed directory to
+# 'install.packages'.
+#
+# Parameters:
+# 1. package name (without 'r-cran', without the version string)
+# 2. version string
+# 3. checksum of the package
+r_cran_build = \
+ pkg=$(strip $(1)); \
+ version=$(strip $(2)); \
+ checksum=$(strip $(3)); \
+ $(call import-source, \
+ https://cran.r-project.org/src/contrib, \
+ $$checksum, \
+ $$tarball, \
+ https://cran.r-project.org/src/contrib/00Archive/$$pkg); \
+ cd "$(ddir)"; \
+ tar -xf $(tdir)/$$tarball; \
+ unpackdir=$$pkg-$$version; \
+ (printf "install.packages(c(\"$(ddir)/$$unpackdir\"),"; \
+ printf 'lib="$(ilibrcrandir)",'; \
+ printf 'repos=NULL,'; \
+ printf 'type="source")\n'; \
+ printf 'quit()\n'; \
+ printf 'n\n') | R --no-save; \
+ rm -rf $$unpackdir; \
+ if [ $$pkg = r-pkgconfig ]; then iname=pkgconfig; \
+ else iname=$$pkg; fi; \
+ if [ -e "$(ilibrcrandir)"/$$iname/Meta/nsInfo.rds ]; then \
+ $(call double-check-R-source, $$checksum) \
+ && echo "$$pkg $$version" > $@; \
+ else \
+ printf "r-cran-$$pkg failed: Meta/nsInfo.rds missing.\n"; \
+ exit 1; \
+ fi
+
+
+
+
+
+# Necessary programs and libraries
+# --------------------------------
+#
+# While this Makefile is for R programs, in some cases, we need certain
+# programs (like R itself), or libraries for the modules. Comment on
+# building R without GUI support ('--without-tcltlk')
+#
+# Tcl/Tk are a set of tools to provide Graphic User Interface (GUI) support
+# in some software. But they are not yet natively built within Maneage,
+# primarily because we have higher-priority work right now (if anyone is
+# interested, they can ofcourse contribute!). GUI tools in general aren't
+# high on our priority list right now because they are generally good for
+# human interaction (which is contrary to the reproducible philosophy:
+# there will always be human-error and frustration, for example in GUI
+# tools the best level of reproducibility is statements like this: "move
+# your mouse to button XXX, then click on menu YYY and etc"). A robust
+# reproducible solution must be done automatically.
+#
+# If someone wants to use R's GUI functionalities while investigating for
+# their analysis, they can do the GUI part on their host OS
+# implementation. Later, they can bring the finalized source into Maneage
+# to be automatically run in Maneage. This will also be the recommended way
+# to deal with GUI tools later when we do install them within Maneage.
+$(ibidir)/r-cran-$(r-cran-version): \
+ $(itidir)/texlive \
+ $(ibidir)/icu-$(icu-version) \
+ $(ibidir)/pcre-$(pcre-version) \
+ $(ibidir)/cairo-$(cairo-version) \
+ $(ibidir)/libpng-$(libpng-version) \
+ $(ibidir)/libjpeg-$(libjpeg-version) \
+ $(ibidir)/libtiff-$(libtiff-version) \
+ $(ibidir)/libpaper-$(libpaper-version)
+
+# Prepare the tarball, unpack it and enter the directory.
+ tarball=R-$(r-cran-version).tar.lz
+ $(call import-source, $(r-cran-url), $(r-cran-checksum))
+ cd $(ddir)
+ tar -xf $(tdir)/$$tarball
+ unpackdir=R-$(r-cran-version)
+ cd $$unpackdir
+
+# We need to manually remove the lines with '~autodetect~', they
+# cause the configure script to crash in version 4.0.2. They are used
+# in relation to Java, and we don't use Java anyway.
+ sed -i -e '/\~autodetect\~/ s/^/#/g' configure
+ export R_SHELL=$(SHELL)
+ ./configure --prefix=$(idir) \
+ --without-x \
+ --with-pcre1 \
+ --disable-java \
+ --with-readline \
+ --without-tcltk \
+ --disable-openmp
+ make -j$(numthreads)
+ make install
+ cd ..
+ rm -rf R-$(r-cran-version)
+ cp -p $(dtexdir)/r-cran.tex $(ictdir)/
+ echo "R $(r-cran-version) \citep{RIhakaGentleman1996}" > $@
+
+
+
+
+
+# Non-Maneage'd tarballs
+# ----------------------
+#
+# CRAN tarballs differ in two aspects from Maneage'd tarballs:
+# - CRAN uses '.tar.gz', while Maneage uses 'tar.lz'.
+# - CRAN uses 'name_version', while Maneage uses 'name-version'.
+#
+# So if you add a new R package, or update the version of an existing one
+# (that is not yet in Maneage's archive), you need to use the CRAN naming
+# format for the 'tarball' variable.
+
+
+
+
+
+# R-CRAN modules
+# ---------------
+#
+# The rules for downloading, compiling and installing any R-CRAN modules
+# that are needed should be provided here. Each target (before the colon)
+# is first shown with its dependence on prerequisites (which are listed
+# after the colon. The default macro 'r_cran_build' will install the
+# package without checking on dependencies.
+
+$(ircrandir)/r-cran-cli-$(r-cran-cli-version): \
+ $(ibidir)/r-cran-$(r-cran-version) \
+ $(ircrandir)/r-cran-glue-$(r-cran-glue-version)
+ tarball=cli-$(r-cran-cli-version).tar.lz
+ $(call r_cran_build, cli, $(r-cran-cli-version), \
+ $(r-cran-cli-checksum))
+
+$(ircrandir)/r-cran-colorspace-$(r-cran-colorspace-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=colorspace-$(r-cran-colorspace-version).tar.lz
+ $(call r_cran_build, colorspace, $(r-cran-colorspace-version), \
+ $(r-cran-colorspace-checksum))
+
+$(ircrandir)/r-cran-cowplot-$(r-cran-cowplot-version): \
+ $(ibidir)/r-cran-$(r-cran-version) \
+ $(ircrandir)/r-cran-rlang-$(r-cran-rlang-version) \
+ $(ircrandir)/r-cran-gtable-$(r-cran-gtable-version) \
+ $(ircrandir)/r-cran-scales-$(r-cran-scales-version) \
+ $(ircrandir)/r-cran-ggplot2-$(r-cran-ggplot2-version)
+ tarball=cowplot-$(r-cran-cowplot-version).tar.lz
+ $(call r_cran_build, cowplot, $(r-cran-cowplot-version), \
+ $(r-cran-cowplot-checksum))
+
+$(ircrandir)/r-cran-crayon-$(r-cran-crayon-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=crayon-$(r-cran-crayon-version).tar.lz
+ $(call r_cran_build, crayon, $(r-cran-crayon-version), \
+ $(r-cran-crayon-checksum))
+
+$(ircrandir)/r-cran-digest-$(r-cran-digest-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=digest-$(r-cran-digest-version).tar.lz
+ $(call r_cran_build, digest, $(r-cran-digest-version), \
+ $(r-cran-digest-checksum))
+
+$(ircrandir)/r-cran-farver-$(r-cran-farver-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=farver-$(r-cran-farver-version).tar.lz
+ $(call r_cran_build, farver, $(r-cran-farver-version), \
+ $(r-cran-farver-checksum))
+
+$(ircrandir)/r-cran-ellipsis-$(r-cran-ellipsis-version): \
+ $(ibidir)/r-cran-$(r-cran-version) \
+ $(ircrandir)/r-cran-rlang-$(r-cran-rlang-version)
+ tarball=ellipsis-$(r-cran-ellipsis-version).tar.lz
+ $(call r_cran_build, ellipsis, $(r-cran-ellipsis-version), \
+ $(r-cran-ellipsis-checksum))
+
+$(ircrandir)/r-cran-fansi-$(r-cran-fansi-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=fansi-$(r-cran-fansi-version).tar.lz
+ $(call r_cran_build, fansi, $(r-cran-fansi-version), \
+ $(r-cran-fansi-checksum))
+
+$(ircrandir)/r-cran-ggplot2-$(r-cran-ggplot2-version): \
+ $(ibidir)/r-cran-$(r-cran-version) \
+ $(ircrandir)/r-cran-glue-$(r-cran-glue-version) \
+ $(ircrandir)/r-cran-mgcv-$(r-cran-mgcv-version) \
+ $(ircrandir)/r-cran-MASS-$(r-cran-MASS-version) \
+ $(ircrandir)/r-cran-rlang-$(r-cran-rlang-version) \
+ $(ircrandir)/r-cran-withr-$(r-cran-withr-version) \
+ $(ircrandir)/r-cran-digest-$(r-cran-digest-version) \
+ $(ircrandir)/r-cran-gtable-$(r-cran-gtable-version) \
+ $(ircrandir)/r-cran-scales-$(r-cran-scales-version) \
+ $(ircrandir)/r-cran-tibble-$(r-cran-tibble-version) \
+ $(ircrandir)/r-cran-isoband-$(r-cran-isoband-version)
+ tarball=ggplot2-$(r-cran-ggplot2-version).tar.lz
+ $(call r_cran_build, ggplot2, $(r-cran-ggplot2-version), \
+ $(r-cran-ggplot2-checksum))
+
+$(ircrandir)/r-cran-glue-$(r-cran-glue-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=glue-$(r-cran-glue-version).tar.lz
+ $(call r_cran_build, glue, $(r-cran-glue-version), \
+ $(r-cran-glue-checksum))
+
+$(ircrandir)/r-cran-gridExtra-$(r-cran-gridExtra-version): \
+ $(ibidir)/r-cran-$(r-cran-version) \
+ $(ircrandir)/r-cran-gtable-$(r-cran-gtable-version)
+ tarball=gridExtra-$(r-cran-gridExtra-version).tar.lz
+ $(call r_cran_build, gridExtra, $(r-cran-gridExtra-version), \
+ $(r-cran-gridExtra-checksum))
+
+$(ircrandir)/r-cran-gtable-$(r-cran-gtable-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=gtable-$(r-cran-gtable-version).tar.lz
+ $(call r_cran_build, gtable, $(r-cran-gtable-version), \
+ $(r-cran-gtable-checksum))
+
+$(ircrandir)/r-cran-isoband-$(r-cran-isoband-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=isoband-$(r-cran-isoband-version).tar.lz
+ $(call r_cran_build, isoband, $(r-cran-isoband-version), \
+ $(r-cran-isoband-checksum))
+
+$(ircrandir)/r-cran-labeling-$(r-cran-labeling-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=labeling-$(r-cran-labeling-version).tar.lz
+ $(call r_cran_build, labeling, $(r-cran-labeling-version), \
+ $(r-cran-labeling-checksum))
+
+$(ircrandir)/r-cran-lifecycle-$(r-cran-lifecycle-version): \
+ $(ibidir)/r-cran-$(r-cran-version) \
+ $(ircrandir)/r-cran-glue-$(r-cran-glue-version) \
+ $(ircrandir)/r-cran-rlang-$(r-cran-rlang-version)
+ tarball=lifecycle-$(r-cran-lifecycle-version).tar.lz
+ $(call r_cran_build, lifecycle, $(r-cran-lifecycle-version), \
+ $(r-cran-lifecycle-checksum))
+
+$(ircrandir)/r-cran-magrittr-$(r-cran-magrittr-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=magrittr-$(r-cran-magrittr-version).tar.lz
+ $(call r_cran_build, magrittr, $(r-cran-magrittr-version), \
+ $(r-cran-magrittr-checksum))
+
+$(ircrandir)/r-cran-MASS-$(r-cran-MASS-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=MASS-$(r-cran-MASS-version).tar.lz
+ $(call r_cran_build, MASS, $(r-cran-MASS-version), \
+ $(r-cran-MASS-checksum))
+
+# The base R-2.0.4 install includes nlme and Matrix.
+# https://cran.r-project.org/web/packages/mgcv/index.html
+$(ircrandir)/r-cran-mgcv-$(r-cran-mgcv-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=mgcv-$(r-cran-mgcv-version).tar.lz
+ $(call r_cran_build, mgcv, $(r-cran-mgcv-version), \
+ $(r-cran-mgcv-checksum))
+
+$(ircrandir)/r-cran-munsell-$(r-cran-munsell-version): \
+ $(ibidir)/r-cran-$(r-cran-version) \
+ $(ircrandir)/r-cran-colorspace-$(r-cran-colorspace-version)
+ tarball=munsell-$(r-cran-munsell-version).tar.lz
+ $(call r_cran_build, munsell, $(r-cran-munsell-version), \
+ $(r-cran-munsell-checksum))
+
+#TODO: https://cran.r-project.org/web/packages/pillar/index.html
+$(ircrandir)/r-cran-pillar-$(r-cran-pillar-version): \
+ $(ibidir)/r-cran-$(r-cran-version) \
+ $(ircrandir)/r-cran-cli-$(r-cran-cli-version) \
+ $(ircrandir)/r-cran-utf8-$(r-cran-utf8-version) \
+ $(ircrandir)/r-cran-fansi-$(r-cran-fansi-version) \
+ $(ircrandir)/r-cran-rlang-$(r-cran-rlang-version) \
+ $(ircrandir)/r-cran-vctrs-$(r-cran-vctrs-version) \
+ $(ircrandir)/r-cran-crayon-$(r-cran-crayon-version) \
+ $(ircrandir)/r-cran-ellipsis-$(r-cran-ellipsis-version) \
+ $(ircrandir)/r-cran-lifecycle-$(r-cran-lifecycle-version)
+ tarball=pillar-$(r-cran-pillar-version).tar.lz
+ $(call r_cran_build, pillar, $(r-cran-pillar-version), \
+ $(r-cran-pillar-checksum))
+
+# Since we have other software packages with the name 'pkgconfig', to avoid
+# confusion with those tarballs, we have put a 'r-' prefix in the tarball
+# name. If you want to use the CRAN tarball, please correct the name
+# accordingly (as described in the comment above this group of rules).
+$(ircrandir)/r-cran-pkgconfig-$(r-cran-pkgconfig-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=r-pkgconfig-$(r-cran-pkgconfig-version).tar.lz
+ $(call r_cran_build, r-pkgconfig, $(r-cran-pkgconfig-version), \
+ $(r-cran-pkgconfig-checksum))
+
+$(ircrandir)/r-cran-RColorBrewer-$(r-cran-RColorBrewer-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=RColorBrewer-$(r-cran-RColorBrewer-version).tar.lz
+ $(call r_cran_build, RColorBrewer, $(r-cran-RColorBrewer-version), \
+ $(r-cran-RColorBrewer-checksum))
+
+$(ircrandir)/r-cran-R6-$(r-cran-R6-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=R6-$(r-cran-R6-version).tar.lz
+ $(call r_cran_build, R6, $(r-cran-R6-version), $(r-cran-R6-checksum))
+
+$(ircrandir)/r-cran-rlang-$(r-cran-rlang-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=rlang-$(r-cran-rlang-version).tar.lz
+ $(call r_cran_build, rlang, $(r-cran-rlang-version), \
+ $(r-cran-rlang-checksum))
+
+# https://cran.r-project.org/web/packages/scales/index.html
+$(ircrandir)/r-cran-scales-$(r-cran-scales-version): \
+ $(ibidir)/r-cran-$(r-cran-version) \
+ $(ircrandir)/r-cran-R6-$(r-cran-R6-version) \
+ $(ircrandir)/r-cran-farver-$(r-cran-farver-version) \
+ $(ircrandir)/r-cran-munsell-$(r-cran-munsell-version) \
+ $(ircrandir)/r-cran-labeling-$(r-cran-labeling-version) \
+ $(ircrandir)/r-cran-lifecycle-$(r-cran-lifecycle-version) \
+ $(ircrandir)/r-cran-viridisLite-$(r-cran-viridisLite-version) \
+ $(ircrandir)/r-cran-RColorBrewer-$(r-cran-RColorBrewer-version)
+ tarball=scales-$(r-cran-scales-version).tar.lz
+ $(call r_cran_build, scales, $(r-cran-scales-version), \
+ $(r-cran-scales-checksum))
+
+#https://cran.r-project.org/web/packages/tibble/index.html
+$(ircrandir)/r-cran-tibble-$(r-cran-tibble-version): \
+ $(ibidir)/r-cran-$(r-cran-version) \
+ $(ircrandir)/r-cran-fansi-$(r-cran-fansi-version) \
+ $(ircrandir)/r-cran-rlang-$(r-cran-rlang-version) \
+ $(ircrandir)/r-cran-vctrs-$(r-cran-vctrs-version) \
+ $(ircrandir)/r-cran-pillar-$(r-cran-pillar-version) \
+ $(ircrandir)/r-cran-ellipsis-$(r-cran-ellipsis-version) \
+ $(ircrandir)/r-cran-magrittr-$(r-cran-magrittr-version) \
+ $(ircrandir)/r-cran-lifecycle-$(r-cran-lifecycle-version) \
+ $(ircrandir)/r-cran-pkgconfig-$(r-cran-pkgconfig-version)
+ tarball=tibble-$(r-cran-tibble-version).tar.lz
+ $(call r_cran_build, tibble, $(r-cran-tibble-version), \
+ $(r-cran-tibble-checksum))
+
+$(ircrandir)/r-cran-utf8-$(r-cran-utf8-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=utf8-$(r-cran-utf8-version).tar.lz
+ $(call r_cran_build, utf8, $(r-cran-utf8-version), \
+ $(r-cran-utf8-checksum))
+
+$(ircrandir)/r-cran-vctrs-$(r-cran-vctrs-version): \
+ $(ibidir)/r-cran-$(r-cran-version) \
+ $(ircrandir)/r-cran-glue-$(r-cran-glue-version) \
+ $(ircrandir)/r-cran-rlang-$(r-cran-rlang-version) \
+ $(ircrandir)/r-cran-ellipsis-$(r-cran-ellipsis-version)
+ tarball=vctrs-$(r-cran-vctrs-version).tar.lz
+ $(call r_cran_build, vctrs, $(r-cran-vctrs-version), \
+ $(r-cran-vctrs-checksum))
+
+$(ircrandir)/r-cran-viridisLite-$(r-cran-viridisLite-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=viridisLite-$(r-cran-viridisLite-version).tar.lz
+ $(call r_cran_build, viridisLite, $(r-cran-viridisLite-version), \
+ $(r-cran-viridisLite-checksum))
+
+$(ircrandir)/r-cran-withr-$(r-cran-withr-version): \
+ $(ibidir)/r-cran-$(r-cran-version)
+ tarball=withr-$(r-cran-withr-version).tar.lz
+ $(call r_cran_build, withr, $(r-cran-withr-version), \
+ $(r-cran-withr-checksum))
diff --git a/reproduce/software/make/xorg.mk b/reproduce/software/make/xorg.mk
index 3178cb4..6e62595 100644
--- a/reproduce/software/make/xorg.mk
+++ b/reproduce/software/make/xorg.mk
@@ -4,7 +4,7 @@
# !!!!! IMPORTANT NOTES !!!!!
#
# This Makefile will be loaded into 'high-level.mk', which is called by the
-# `./project configure' script. It is not included into the project
+# './project configure' script. It is not included into the project
# afterwards.
#
# This Makefile contains instructions to build all the Xorg-related
@@ -14,8 +14,8 @@
#
# ------------------------------------------------------------------------
#
-# Copyright (C) 2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
-# Copyright (C) 2021 Raul Infante-Sainz <infantesainz@gmail.com>
+# Copyright (C) 2021-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2021-2022 Raul Infante-Sainz <infantesainz@gmail.com>
#
# This Makefile is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -57,7 +57,7 @@ $(idir)/etc/profile.d/xorg.sh: | $(idir)/etc/profile.d
$(ibidir)/util-macros-$(util-macros-version): \
$(idir)/etc/profile.d/xorg.sh \
$(ibidir)/automake-$(automake-version)
- tarball=util-macros-$(util-macros-version).tar.bz2
+ tarball=util-macros-$(util-macros-version).tar.lz
$(call import-source, $(util-macros-url), $(util-macros-checksum))
$(call gbuild, util-macros-$(util-macros-version),,$(XORG_CONFIG),V=1)
echo "util-macros (Xorg) $(util-macros-version)" > $@
@@ -65,14 +65,14 @@ $(ibidir)/util-macros-$(util-macros-version): \
# Necessaary headers to define the Xorg protocols.
$(ibidir)/xorgproto-$(xorgproto-version): \
$(ibidir)/util-macros-$(util-macros-version)
- tarball=xorgproto-$(xorgproto-version).tar.bz2
+ tarball=xorgproto-$(xorgproto-version).tar.lz
$(call import-source, $(xorg-proto-url), $(xorgproto-checksum))
$(call gbuild, xorgproto-$(xorgproto-version),,$(XORG_CONFIG),V=1)
echo "xorgproto $(xorgproto-version)" > $@
# Necessaary headers to define the Xorg protocols.
$(ibidir)/libxau-$(libxau-version): $(ibidir)/xorgproto-$(xorgproto-version)
- tarball=libXau-$(libxau-version).tar.bz2
+ tarball=libXau-$(libxau-version).tar.lz
$(call import-source, $(libaxu-url), $(libxau-checksum))
$(call gbuild, libXau-$(libxau-version),,$(XORG_CONFIG), V=1)
echo "libXau (Xorg) $(libxau-version)" > $@
@@ -88,7 +88,7 @@ $(ibidir)/libxdmcp-$(libxdmcp-version): $(ibidir)/libxau-$(libxau-version)
$(ibidir)/xcb-proto-$(xcb-proto-version): \
$(ibidir)/python-$(python-version) \
$(ibidir)/libxml2-$(libxml2-version)
- tarball=xcb-proto-$(xcb-proto-version).tar.xz
+ tarball=xcb-proto-$(xcb-proto-version).tar.lz
$(call import-source, $(xcb-proto-url), $(xcb-proto-checksum))
$(call gbuild, xcb-proto-$(xcb-proto-version),,$(XORG_CONFIG), V=1)
echo "XCB-proto (Xorg) $(xcb-proto-version)" > $@
@@ -98,7 +98,7 @@ $(ibidir)/libxcb-$(libxcb-version): \
$(ibidir)/libxdmcp-$(libxdmcp-version) \
$(ibidir)/xcb-proto-$(xcb-proto-version) \
$(ibidir)/libpthread-stubs-$(libpthread-stubs-version)
- tarball=libxcb-$(libxcb-version).tar.xz
+ tarball=libxcb-$(libxcb-version).tar.lz
$(call import-source, $(libxcb-url), $(libxcb-checksum))
$(call gbuild, libxcb-$(libxcb-version),, \
$(XORG_CONFIG) --without-doxygen, \
@@ -107,7 +107,7 @@ $(ibidir)/libxcb-$(libxcb-version): \
$(ibidir)/libpthread-stubs-$(libpthread-stubs-version): \
$(ibidir)/automake-$(automake-version)
- tarball=libpthread-stubs-$(libpthread-stubs-version).tar.gz
+ tarball=libpthread-stubs-$(libpthread-stubs-version).tar.lz
$(call import-source, $(libpthread-stubs-url), $(libpthread-stubs-checksum))
$(call gbuild, libpthread-stubs-$(libpthread-stubs-version),, V=1)
echo "libpthread-stubs (Xorg) $(libpthread-stubs-version)" > $@
@@ -116,11 +116,12 @@ $(ibidir)/libpthread-stubs-$(libpthread-stubs-version): \
$(ibidir)/fontconfig-$(fontconfig-version): \
$(ibidir)/gperf-$(gperf-version) \
$(ibidir)/expat-$(expat-version) \
+ $(ibidir)/python-$(python-version) \
$(ibidir)/libxml2-$(libxml2-version) \
$(ibidir)/freetype-$(freetype-version) \
$(ibidir)/util-linux-$(util-linux-version)
# Import the source.
- tarball=fontconfig-$(fontconfig-version).tar.bz2
+ tarball=fontconfig-$(fontconfig-version).tar.lz
$(call import-source, $(fontconfig-url), $(fontconfig-checksum))
# Add the extra environment variables for using 'libuuid' of
@@ -139,27 +140,27 @@ $(ibidir)/fontconfig-$(fontconfig-version): \
$(ibidir)/xtrans-$(xtrans-version): \
$(ibidir)/libxcb-$(libxcb-version) \
$(ibidir)/fontconfig-$(fontconfig-version)
- tarball=xtrans-$(xtrans-version).tar.bz2
+ tarball=xtrans-$(xtrans-version).tar.lz
$(call import-source, $(xtrans-url), $(xtrans-checksum))
$(call gbuild, xtrans-$(xtrans-version),,$(XORG_CONFIG), V=1)
echo "xtrans (Xorg) $(xtrans-version)" > $@
$(ibidir)/libx11-$(libx11-version): $(ibidir)/xtrans-$(xtrans-version)
- tarball=libX11-$(libx11-version).tar.bz2
+ tarball=libX11-$(libx11-version).tar.lz
$(call import-source, $(libx11-url), $(libx11-checksum))
$(call gbuild, libX11-$(libx11-version),,$(XORG_CONFIG), \
-j$(numthreads) V=1)
echo "X11 library $(libx11-version)" > $@
$(ibidir)/libxext-$(libxext-version): $(ibidir)/libx11-$(libx11-version)
- tarball=libXext-$(libxext-version).tar.bz2
+ tarball=libXext-$(libxext-version).tar.lz
$(call import-source, $(libxext-url), $(libxext-checksum))
$(call gbuild, libXext-$(libxext-version),,$(XORG_CONFIG), \
-j$(numthreads) V=1)
echo "libXext $(libxext-version)" > $@
$(ibidir)/libice-$(libice-version): $(ibidir)/libxext-$(libxext-version)
- tarball=libICE-$(libice-version).tar.bz2
+ tarball=libICE-$(libice-version).tar.lz
$(call import-source, $(libice-url), $(libice-checksum))
$(call gbuild, libICE-$(libice-version),, \
$(XORG_CONFIG) ICE_LIBS=-lpthread, \
@@ -167,14 +168,14 @@ $(ibidir)/libice-$(libice-version): $(ibidir)/libxext-$(libxext-version)
echo "libICE $(libice-version)" > $@
$(ibidir)/libsm-$(libsm-version): $(ibidir)/libice-$(libice-version)
- tarball=libSM-$(libsm-version).tar.bz2
+ tarball=libSM-$(libsm-version).tar.lz
$(call import-source, $(libsm-url), $(libsm-checksum))
$(call gbuild, libSM-$(libsm-version),, \
$(XORG_CONFIG), -j$(numthreads) V=1)
echo "libSM $(libsm-version)" > $@
$(ibidir)/libxt-$(libxt-version): $(ibidir)/libsm-$(libsm-version)
- tarball=libXt-$(libxt-version).tar.bz2
+ tarball=libXt-$(libxt-version).tar.lz
$(call import-source, $(libxt-url), $(libxt-checksum))
$(call gbuild, libXt-$(libxt-version),, \
$(XORG_CONFIG), -j$(numthreads) V=1)
diff --git a/reproduce/software/patches/README.md b/reproduce/software/patches/README.md
new file mode 100644
index 0000000..804d7ec
--- /dev/null
+++ b/reproduce/software/patches/README.md
@@ -0,0 +1,6 @@
+Patches to apply to software source
+===================================
+
+This directory is for keeping patches that may be necessary for some
+versions of some software. So it may be empty in some instances (when no
+software in that commit needs a patch).
diff --git a/reproduce/software/patches/valgrind-3.15.0-mpi-fix1.patch b/reproduce/software/patches/valgrind-3.15.0-mpi-fix1.patch
deleted file mode 100644
index 94dcab5..0000000
--- a/reproduce/software/patches/valgrind-3.15.0-mpi-fix1.patch
+++ /dev/null
@@ -1,37 +0,0 @@
----
- mpi/libmpiwrap.c | 12 +++++++++++-
- 1 file changed, 11 insertions(+), 1 deletion(-)
- Patch by Samuel Thibault:
- https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=946329;msg=10
- and hacked further by Boud Roukema 2020-05-10.
---- a/mpi/libmpiwrap.c
-+++ b/mpi/libmpiwrap.c
-@@ -278,8 +278,12 @@ static void showTy ( FILE* f, MPI_Dataty
- else if (ty == MPI_LONG_INT) fprintf(f,"LONG_INT");
- else if (ty == MPI_SHORT_INT) fprintf(f,"SHORT_INT");
- else if (ty == MPI_2INT) fprintf(f,"2INT");
-+# if defined(MPI_UB_ENABLED_IN_MPI1)
- else if (ty == MPI_UB) fprintf(f,"UB");
-+# endif
-+# if defined(MPI_LB_ENABLED_IN_MPI1)
- else if (ty == MPI_LB) fprintf(f,"LB");
-+# endif
- # if defined(MPI_WCHAR)
- else if (ty == MPI_WCHAR) fprintf(f,"WCHAR");
- # endif
-@@ -733,8 +737,14 @@ void walk_type ( void(*f)(void*,long), c
- f(base + offsetof(Ty,loc), sizeof(int));
- return;
- }
-- if (ty == MPI_LB || ty == MPI_UB)
-+#if defined(MPI_LB_ENABLED_IN_MPI1)
-+ if (ty == MPI_LB)
-+ return; /* have zero size, so nothing needs to be done */
-+#endif
-+#if defined(MPI_UB_ENABLED_IN_MPI1)
-+ if (ty == MPI_UB)
- return; /* have zero size, so nothing needs to be done */
-+#endif
- goto unhandled;
- /*NOTREACHED*/
- }
diff --git a/reproduce/software/patches/valgrind-3.15.0-mpi-fix2.patch b/reproduce/software/patches/valgrind-3.15.0-mpi-fix2.patch
deleted file mode 100644
index 12b50a2..0000000
--- a/reproduce/software/patches/valgrind-3.15.0-mpi-fix2.patch
+++ /dev/null
@@ -1,23 +0,0 @@
-Index: valgrind-3.15.0/mpi/Makefile.am
-===================================================================
- Patch by Samuel Thibault:
- https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=946329;msg=22
---- valgrind-3.15.0.orig/mpi/Makefile.am
-+++ valgrind-3.15.0/mpi/Makefile.am
-@@ -42,14 +42,14 @@ libmpiwrap_@VGCONF_ARCH_PRI@_@VGCONF_OS@
- libmpiwrap_@VGCONF_ARCH_PRI@_@VGCONF_OS@_so_CPPFLAGS = -I$(top_srcdir)/include
- libmpiwrap_@VGCONF_ARCH_PRI@_@VGCONF_OS@_so_CFLAGS = \
- $(CFLAGS_MPI) $(MPI_FLAG_M3264_PRI) -Wno-deprecated-declarations
--libmpiwrap_@VGCONF_ARCH_PRI@_@VGCONF_OS@_so_LDFLAGS = $(LDFLAGS_MPI)
-+libmpiwrap_@VGCONF_ARCH_PRI@_@VGCONF_OS@_so_LDADD = $(LDFLAGS_MPI)
- endif
- if BUILD_MPIWRAP_SEC
- libmpiwrap_@VGCONF_ARCH_SEC@_@VGCONF_OS@_so_SOURCES = libmpiwrap.c
- libmpiwrap_@VGCONF_ARCH_SEC@_@VGCONF_OS@_so_CPPFLAGS = -I$(top_srcdir)/include
- libmpiwrap_@VGCONF_ARCH_SEC@_@VGCONF_OS@_so_CFLAGS = \
- $(CFLAGS_MPI) $(MPI_FLAG_M3264_SEC) -Wno-deprecated-declarations
--libmpiwrap_@VGCONF_ARCH_SEC@_@VGCONF_OS@_so_LDFLAGS = $(LDFLAGS_MPI)
-+libmpiwrap_@VGCONF_ARCH_SEC@_@VGCONF_OS@_so_LDADD = $(LDFLAGS_MPI)
- endif
-
- #----------------------------------------------------------------------------
diff --git a/reproduce/software/shell/bashrc.sh b/reproduce/software/shell/bashrc.sh
index 23845d6..6bb871b 100755
--- a/reproduce/software/shell/bashrc.sh
+++ b/reproduce/software/shell/bashrc.sh
@@ -3,10 +3,10 @@
# To have better control over the environment of each analysis step (Make
# recipe), besides having environment variables (directly included from
# Make), it may also be useful to have a Bash startup file (this file). All
-# of the Makefiles set this file as the `BASH_ENV' environment variable, so
+# of the Makefiles set this file as the 'BASH_ENV' environment variable, so
# it is loaded into all the Make recipes within the project.
#
-# The special `PROJECT_STATUS' environment variable is defined in every
+# The special 'PROJECT_STATUS' environment variable is defined in every
# top-level Makefile of the project. It defines the the state of the Make
# that is calling this script. It can have three values:
#
@@ -29,7 +29,7 @@
# versions.
#
#
-# Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2019-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
diff --git a/reproduce/software/shell/configure.sh b/reproduce/software/shell/configure.sh
index e15a8e9..07381e6 100755
--- a/reproduce/software/shell/configure.sh
+++ b/reproduce/software/shell/configure.sh
@@ -2,8 +2,9 @@
#
# Necessary preparations/configurations for the reproducible project.
#
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
-# Copyright (C) 2021 Raul Infante-Sainz <infantesainz@gmail.com>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2021-2022 Raul Infante-Sainz <infantesainz@gmail.com>
+# Copyright (C) 2022 Pedram Ashofteh Ardakani <pedramardakani@pm.me>
#
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -112,7 +113,7 @@ absolute_dir ()
# Check file permission handling (POSIX-compatibility)
# ----------------------------------------------------
#
-# Check if a `given' directory handles permissions as expected.
+# Check if a 'given' directory handles permissions as expected.
#
# This is to prevent a known bug in the NTFS filesystem that prevents
# proper installation of Perl, and probably some other packages. This
@@ -120,15 +121,15 @@ absolute_dir ()
# file, and examines whether the given directory handles the file
# permissions as expected.
#
-# Returns `0' if everything is fine, and `255' otherwise. Choosing `0' is
-# to mimic the `$ echo $?' behavior, while choosing `255' is to prevent
+# Returns '0' if everything is fine, and '255' otherwise. Choosing '0' is
+# to mimic the '$ echo $?' behavior, while choosing '255' is to prevent
# misunderstanding 0 and 1 as true and false.
#
# ===== CAUTION! ===== #
#
-# Since there is a `set -e' before running this function, the whole script
-# stops and exits IF the `check_permission' (or any other function) returns
-# anything OTHER than `0'! So, only use this function as a test. Here's a
+# Since there is a 'set -e' before running this function, the whole script
+# stops and exits IF the 'check_permission' (or any other function) returns
+# anything OTHER than '0'! So, only use this function as a test. Here's a
# minimal example:
#
# if $(check_permission $some_directory) ; then
@@ -136,7 +137,7 @@ absolute_dir ()
# fi ;
check_permission ()
{
- # Make a `junk' file, activate its executable flag and record its
+ # Make a 'junk' file, activate its executable flag and record its
# permissions generally.
local junkfile="$1"/check_permission_tmp_file
rm -f "$junkfile"
@@ -158,7 +159,7 @@ check_permission ()
return 1
else
# Setting permission SUCCESSFUL
- return 0
+ return 0
fi
}
@@ -309,7 +310,9 @@ fi
# system. Here, it is checked that this is the case, and if not, warn the user
# about not having Xcode already installed.
if [ x$on_mac_os = xyes ]; then
- xcode=$(which xcodebuild)
+
+ # 'which' isn't in POSIX, so we are using 'command -v' instead.
+ xcode=$(command -v xcodebuild)
if [ x$xcode != x ]; then
xcode_version=$(xcodebuild -version | grep Xcode)
echo " "
@@ -475,8 +478,8 @@ fi
# See if we need the dynamic-linker (-ldl)
# ----------------------------------------
#
-# Some programs (like Wget) need dynamic loading (using `libdl'). On
-# GNU/Linux systems, we'll need the `-ldl' flag to link such programs. But
+# Some programs (like Wget) need dynamic loading (using 'libdl'). On
+# GNU/Linux systems, we'll need the '-ldl' flag to link such programs. But
# Mac OS doesn't need any explicit linking. So we'll check here to see if
# it is present (thus necessary) or not.
cat > $testsource <<EOF
@@ -508,7 +511,7 @@ fi
# programs will go and find their necessary libraries on the host system.
#
# Another good advantage of shared libraries is that we can actually use
-# the shared library tool of the system (`ldd' with GNU C Library) and see
+# the shared library tool of the system ('ldd' with GNU C Library) and see
# exactly where each linked library comes from. But in static building,
# unless you follow the build closely, its not easy to see if the source of
# the library came from the system or our build.
@@ -545,7 +548,7 @@ fi
#
# On some systems (in particular Debian-based OSs), the static C library
# and necessary headers in a non-standard place, and we can't build GCC. So
-# we need to find them first. The `sys/cdefs.h' header is also in a
+# we need to find them first. The 'sys/cdefs.h' header is also in a
# similarly different location.
sys_cpath=""
sys_library_path=""
@@ -576,7 +579,7 @@ fi
# See if a link-able static C library exists
# ------------------------------------------
#
-# A static C library and the `sys/cdefs.h' header are necessary for
+# A static C library and the 'sys/cdefs.h' header are necessary for
# building GCC.
if [ x"$host_cc" = x0 ]; then
echo; echo; echo "Checking if static C library is available...";
@@ -752,9 +755,9 @@ EOF
# What to do with possibly existing configuration file
# ----------------------------------------------------
#
-# `LOCAL.conf' is the top-most local configuration for the project. If it
+# 'LOCAL.conf' is the top-most local configuration for the project. If it
# already exists when this script is run, we'll make a copy of it as backup
-# (for example the user might have ran `./project configure' by mistake).
+# (for example the user might have ran './project configure' by mistake).
printnotice=yes
rewritepconfig=yes
if [ -f $pconf ]; then
@@ -807,16 +810,18 @@ fi
# need to check the host's available tool for downloading at this step.
if [ $rewritepconfig = yes ]; then
if type wget > /dev/null 2>/dev/null; then
- name=$(which wget)
+
+ # 'which' isn't in POSIX, so we are using 'command -v' instead.
+ name=$(command -v wget)
# By default Wget keeps the remote file's timestamp, so we'll have
# to disable it manually.
downloader="$name --no-use-server-timestamps -O";
elif type curl > /dev/null 2>/dev/null; then
- name=$(which curl)
+ name=$(command -v curl)
# - cURL doesn't keep the remote file's timestamp by default.
- # - With the `-L' option, we tell cURL to follow redirects.
+ # - With the '-L' option, we tell cURL to follow redirects.
downloader="$name -L -o"
else
cat <<EOF
@@ -913,7 +918,7 @@ EOF
# If it was newly created, it will be empty, so delete it.
if ! [ "$(ls -A $bdir)" ]; then rm --dir "$bdir"; fi
- # Inform the user that this is not acceptable and reset `bdir'.
+ # Inform the user that this is not acceptable and reset 'bdir'.
bdir=
echo " ** The build-directory cannot be under the source-directory."
fi
@@ -938,10 +943,10 @@ EOF
fi
fi
- # If everything is still fine so far, see if we're able to
- # manipulate file permissions in the directory's filesystem and if
- # so, see if there is atleast 5GB free space.
- if ! [ x"$bdir" = x ]; then
+ # If everything is still fine so far, see if we're able to
+ # manipulate file permissions in the directory's filesystem and if
+ # so, see if there is atleast 5GB free space.
+ if ! [ x"$bdir" = x ]; then
if ! $(check_permission "$bdir"); then
# Unable to handle permissions well
bdir=
@@ -960,7 +965,7 @@ EOF
fi
# If the build directory was good, the loop will stop, if not,
- # reset `build_dir' to blank, so it continues asking for another
+ # reset 'build_dir' to blank, so it continues asking for another
# directory and let the user know that they must select a new
# directory.
if [ x"$bdir" = x ]; then
@@ -1118,7 +1123,7 @@ else
if [ x"$downloader" = x ]; then novalue="$novalue"DOWNLOADER; fi
if [ x"$novalue" != x ]; then verr=1; err=1; fi
- # Make sure `bdir' is an absolute path and it exists.
+ # Make sure 'bdir' is an absolute path and it exists.
berr=0
ierr=0
bdir="$(absolute_dir "$inbdir")"
@@ -1202,6 +1207,10 @@ if ! [ -d "$ibidir" ]; then mkdir "$ibidir"; fi
ipydir="$verdir"/python
if ! [ -d "$ipydir" ]; then mkdir "$ipydir"; fi
+# R module versions and citation.
+ircrandir="$verdir"/r-cran
+if ! [ -d "$ircrandir" ]; then mkdir "$ircrandir"; fi
+
# Used software BibTeX entries.
ictdir="$verdir"/cite
if ! [ -d "$ictdir" ]; then mkdir "$ictdir"; fi
@@ -1210,21 +1219,14 @@ if ! [ -d "$ictdir" ]; then mkdir "$ictdir"; fi
itidir="$verdir"/tex
if ! [ -d "$itidir" ]; then mkdir "$itidir"; fi
-# Temporary software un-packing/build directory: if the host has the
-# standard `/dev/shm' mounting-point, we'll do it in shared memory (on the
-# RAM), to avoid harming/over-using the HDDs/SSDs. The RAM of most systems
-# today (>8GB) is large enough for the parallel building of the software.
-#
-# For the name of the directory under `/dev/shm' (for this project), we'll
-# use the names of the two parent directories to the current/running
-# directory, separated by a `-' instead of `/'. We'll then appended that
-# with the user's name (in case multiple users may be working on similar
-# project names). Maybe later, we can use something like `mktemp' to add
-# random characters to this name and make it unique to every run (even for
-# a single user).
-tmpblddir="$sdir"/build-tmp
-rm -rf "$tmpblddir"/* "$tmpblddir" # If its a link, we need to empty its
- # contents first, then itself.
+# Some software install their libraries in '$(idir)/lib64'. But all other
+# libraries are in '$(idir)/lib'. Since Maneage's build is only for a
+# single architecture, we can set the '$(idir)/lib64' as a symbolic link to
+# '$(idir)/lib' so all the libraries are always available in the same
+# place.
+instlibdir="$instdir"/lib
+if ! [ -d "$instlibdir" ]; then mkdir "$instlibdir"; fi
+ln -fs "$instlibdir" "$instdir"/lib64
@@ -1297,29 +1299,106 @@ rm -f .gnuastro
-# Set the top-level shared memory location.
+
+
+# Software building directory (possibly in RAM)
+# ---------------------------------------------
+#
+# Building the software for the project will need the creation of many
+# small temporary files that will ultimately be deleted. To avoid harming
+# HDDs/SSDs and improve speed, it is therefore better to build them in the
+# RAM when possible. The RAM of most systems today (>8GB) is large enough
+# for the parallel building of the software.
+
+# Set the top-level shared memory location. Currently there is only one
+# standard location (for GNU/Linux OSs), so doing this check here and the
+# main job below may seem redundant. However, it is written separately from
+# the main code below because later, we expect to add more possible
+# mounting locations (for other OSs).
if [ -d /dev/shm ]; then shmdir=/dev/shm
else shmdir=""
fi
-# If a shared memory mounted directory exists and there is enough space
-# there (in RAM), build a temporary directory for this project.
-needed_space=2000000
+# If a shared memory mounted directory exists and has the necessary
+# conditions, set that directory to build software.
if [ x"$shmdir" != x ]; then
+
+ # Make sure it has enough space.
+ needed_space=2000000
available_space=$(df "$shmdir" | awk 'NR==2{print $4}')
if [ $available_space -gt $needed_space ]; then
+
+ # Set the Maneage-specific directory within the shared
+ # memory. We'll use the names of the two parent directories to the
+ # current/running directory, separated by a '-' instead of
+ # '/'. We'll then appended that with the user's name (in case
+ # multiple users may be working on similar project names).
+ #
+ # Maybe later, we can use something like 'mktemp' to add random
+ # characters to this name and make it unique to every run (even for
+ # a single user).
dirname=$(pwd | sed -e's/\// /g' \
- | awk '{l=NF-1; printf("%s-%s",$l, $NF)}')
+ | awk '{l=NF-1; printf("%s-%s", $l, $NF)}')
tbshmdir="$shmdir"/"$dirname"-$(whoami)
- if ! [ -d "$tbshmdir" ]; then mkdir "$tbshmdir"; fi
+
+ # Try to make the directory if it does not yet exist. A failed
+ # directory creation will be tested for a few lines later, when
+ # testing for the existence and executability of a test file.
+ if ! [ -d "$tbshmdir" ]; then (mkdir "$tbshmdir" || true); fi
+
+ # Some systems may protect '/dev/shm' against the right to execute
+ # programs by ordinary users. We thus need to check that the device
+ # allows execution within this directory by this user.
+ shmexecfile="$tbshmdir"/shm-execution-check.sh
+ rm -f $shmexecfile # We also don't want any existing flags.
+
+ # Create the file to be executed, but do not fail fatally if it
+ # cannot be created. We will check a few lines later if the file
+ # really exists.
+ (cat > "$shmexecfile" <<EOF || true)
+#!/bin/sh
+echo "This file successfully executed."
+EOF
+
+ # If the file was successfully created, then make the file
+ # executable and see if it runs. If not, set 'tbshmdir' to an empty
+ # string so it is not used in later steps. In any case, delete the
+ # temporary file afterwards.
+ #
+ # We aren't adding '&> /dev/null' after the execution command
+ # because it can produce false failures randomly on some systems.
+ if [ -e "$shmexecfile" ]; then
+
+ # Add the executable flag.
+ chmod +x "$shmexecfile"
+
+ # The following line tries to execute the file.
+ if "$shmexecfile"; then
+ # Successful execution. The colon is a "no-op" (no
+ # operation) shell command.
+ :
+ else
+ tbshmdir=""
+ fi
+ rm "$shmexecfile"
+ else
+ tbshmdir=""
+ fi
fi
else
tbshmdir=""
fi
-# If a shared memory directory was created set `build-tmp' to be a
-# symbolic link to it. Otherwise, just build the temporary build
-# directory under the project build directory.
+
+
+
+
+# If a shared memory directory was created, set the software building
+# directory to be a symbolic link to it. Otherwise, just build the
+# temporary build directory under the project's build directory.
+tmpblddir="$sdir"/build-tmp
+rm -rf "$tmpblddir"/* "$tmpblddir" # If it is a link, we need to empty
+ # its contents first, then itself.
if [ x"$tbshmdir" = x ]; then mkdir "$tmpblddir";
else ln -s "$tbshmdir" "$tmpblddir";
fi
@@ -1330,6 +1409,9 @@ fi
# Inform the user that the build process is starting
# -------------------------------------------------
+#
+# Everything is ready, let the user know that the building is going to
+# start.
if [ $printnotice = yes ]; then
tsec=10
cat <<EOF
@@ -1338,12 +1420,13 @@ if [ $printnotice = yes ]; then
Building dependencies ...
-------------------------
-Necessary dependency programs and libraries will be built in
+Necessary dependency programs and libraries will be installed in
$sdir/installed
-NOTE: the built software will NOT BE INSTALLED on your system (no root
-access is required). They are only for local usage by this project.
+NOTE: the built software will NOT BE INSTALLED in standard places of your
+OS (so no root access is required). They are only for local usage by this
+project.
**TIP**: you can see which software are being installed at every moment
with the following command. See "Inspecting status" section of
@@ -1392,7 +1475,7 @@ fi
# See if the linker accepts -Wl,-rpath-link
# -----------------------------------------
#
-# `-rpath-link' is used to write the information of the linked shared
+# '-rpath-link' is used to write the information of the linked shared
# library into the shared object (library or program). But some versions of
# LLVM's linker don't accept it an can cause problems.
#
@@ -1425,13 +1508,13 @@ rm -rf $compilertestdir
-# Paths needed by the host compiler (only for `basic.mk')
+# Paths needed by the host compiler (only for 'basic.mk')
# -------------------------------------------------------
#
# At the end of the basic build, we need to build GCC. But GCC will build
# in multiple phases, making its own simple compiler in order to build
# itself completely. The intermediate/simple compiler doesn't recognize
-# some system specific locations like `/usr/lib/ARCHITECTURE' that some
+# some system specific locations like '/usr/lib/ARCHITECTURE' that some
# operating systems use. We thus need to tell the intermediate compiler
# where its necessary libraries and headers are.
if [ x"$sys_library_path" != x ]; then
@@ -1468,7 +1551,7 @@ fi
# which will download the DOI-resolved webpage, and extract the Zenodo-URL
# of the most recent version from there (using the 'coreutils' tarball as
# an example, the directory part of the URL for all the other software are
-# the same). This is not done if the option `--debug' is used.
+# the same). This is not done if the option '--debug' is used.
zenodourl=""
user_backup_urls=""
zenodocheck=.build/software/zenodo-check.html
@@ -1501,6 +1584,7 @@ user_backup_urls="$user_backup_urls $zenodourl"
# (compression program), GNU Make (that 'basic.mk' is written in), Dash
# (minimal Bash-like shell) and Flock (to lock files and enable serial
# download).
+export on_mac_os
./reproduce/software/shell/pre-make-build.sh \
"$bdir" "$ddir" "$downloader" "$user_backup_urls"
@@ -1610,17 +1694,21 @@ is not used at all during the analysis.
Therefore, if you don't need the final PDF, and just want to do the
analysis, you can safely ignore this warning and continue.
-If you later have internet access and would like to add TeX live to your
-project, please delete the respective files, then re-run configure as shown
-below.
+If you later have internet access and would like to add TeX Live to your
+project, then please delete the following two files:
rm .local/version-info/tex/texlive-ready-tlmgr
+ rm .build/software/tarballs/install-tl-unx.tar.gz
+
+and re-run configure:
+
./project configure -e
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
EOF
+ sleep 10 # increase the chance that an interactive user reads this message
fi
@@ -1634,7 +1722,7 @@ fi
# software.
prepare_name_version ()
{
- # First see if the (possible) `*' in the input arguments corresponds to
+ # First see if the (possible) '*' in the input arguments corresponds to
# anything. Note that some of the given directories may be empty (no
# software installed).
hasfiles=0
@@ -1735,9 +1823,9 @@ hw_class_fixed="$(echo $hw_class | sed -e 's/_/\\_/')"
# ---------------------------------
#
# By the time the script reaches here the temporary software build
-# directory should be empty, so just delete it. Note `tmpblddir' may be a
+# directory should be empty, so just delete it. Note 'tmpblddir' may be a
# symbolic link to shared memory. So, to work in any scenario, first delete
-# the contents of the directory (if it has any), then delete `tmpblddir'.
+# the contents of the directory (if it has any), then delete 'tmpblddir'.
.local/bin/rm -rf $tmpblddir/* $tmpblddir
diff --git a/reproduce/software/shell/git-post-checkout b/reproduce/software/shell/git-post-checkout
index 7a90108..d49504d 100755
--- a/reproduce/software/shell/git-post-checkout
+++ b/reproduce/software/shell/git-post-checkout
@@ -1,14 +1,14 @@
#!@BINDIR@/bash
#
# The example hook script to store the metadata information of version
-# controlled files (with each commit) using the `metastore' program.
+# controlled files (with each commit) using the 'metastore' program.
#
# Copyright (C) 2016 Przemyslaw Pawelczyk <przemoc@gmail.com>
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
-# This script is taken from the `examples/hooks/pre-commit' file of the
-# `metastore' package (installed within the project, with an MIT license
-# for copyright). We have just changed the name of the `MSFILE' and also
+# This script is taken from the 'examples/hooks/pre-commit' file of the
+# 'metastore' package (installed within the project, with an MIT license
+# for copyright). We have just changed the name of the 'MSFILE' and also
# set special characters for the installation location of meta-store so our
# own installation is found by Git.
#
diff --git a/reproduce/software/shell/git-pre-commit b/reproduce/software/shell/git-pre-commit
index 85d3474..10ad710 100755
--- a/reproduce/software/shell/git-pre-commit
+++ b/reproduce/software/shell/git-pre-commit
@@ -1,10 +1,10 @@
#!@BINDIR@/bash
#
# The example hook script to store the metadata information of version
-# controlled files (with each commit) using the `metastore' program.
+# controlled files (with each commit) using the 'metastore' program.
#
# Copyright (C) 2016 Przemyslaw Pawelczyk <przemoc@gmail.com>
-# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# WARNING:
#
@@ -17,9 +17,9 @@
# git reset HEAD -- .metadata
# git checkout HEAD -- .metadata
#
-# This script is taken from the `examples/hooks/pre-commit' file of the
-# `metastore' package (installed within the project, with an MIT license
-# for copyright). Here, the name of the `MSFILE' and also set special
+# This script is taken from the 'examples/hooks/pre-commit' file of the
+# 'metastore' package (installed within the project, with an MIT license
+# for copyright). Here, the name of the 'MSFILE' and also set special
# characters for the installation location of meta-store so our own
# installation is found by Git.
#
diff --git a/reproduce/software/shell/pre-make-build.sh b/reproduce/software/shell/pre-make-build.sh
index a033963..e7de93d 100755
--- a/reproduce/software/shell/pre-make-build.sh
+++ b/reproduce/software/shell/pre-make-build.sh
@@ -2,7 +2,7 @@
#
# Very basic tools necessary to start Maneage's default building.
#
-# Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2020-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -185,6 +185,19 @@ build_program() {
# All others accept the configure script.
./configure --prefix="$instdir" $configoptions
+ # In Flock 0.4.0 there is a crash that can be fixed by simply
+ # replacing '%1u' with '%ld' on GNU/Linux and '%d' on macOS. This
+ # has been reported to flock maintainers:
+ # https://github.com/discoteq/flock/issues/33
+ if [ x$progname = xflock ]; then
+ case $on_mac_os in
+ yes) sed -e's/\%1u/\%d/' src/flock.c > src/flock-new.c;;
+ no) sed -e's/\%1u/\%ld/' src/flock.c > src/flock-new.c;;
+ *) echo "pre-make-build.sh: '$on_mac_os' unrecognized value for on_mac_os";;
+ esac
+ mv src/flock-new.c src/flock.c
+ fi
+
# To build GNU Make, we don't want to assume the existance of a
# Make program, so we use its 'build.sh' script and its own built
# 'make' program to install itself.
@@ -192,7 +205,7 @@ build_program() {
/bin/sh build.sh
./make install
else
- make
+ make V=1
make install
fi
fi
@@ -235,13 +248,20 @@ build_program
# '--disable-dependency-tracking' configure-time option is necessary so
# Make doesn't check for an existing 'make' implementation (recall that we
# aren't assuming any 'make' on the host).
+#
+# If GNU Guile is already present on the host system, Make will try to link
+# with it, and this will cause dependency problems later. So we have
+# distabled Guile. If a project needs the Guile extensions of Make, we need
+# to add a build rule for Guile in Maneage, with a special Guile-enabled
+# Make that has a different executable name (using the '--program-prefix='
+# configure option) from the "default" make (which is this one!).
progname="make"
progname_tex="GNU Make"
url=$(awk '/^'$progname'-url/{print $3}' $urlfile)
version=$(awk '/^'$progname'-version/{print $3}' $versionsfile)
tarball=$progname-$version.tar.lz
download_tarball
-build_program --disable-dependency-tracking
+build_program "--disable-dependency-tracking --without-guile"
@@ -274,11 +294,11 @@ fi
# -----
#
# Flock (or file-lock) is necessary to serialize operations when
-# necessary. GNU/Linux machines have it as part of their `util-linux'
+# necessary. GNU/Linux machines have it as part of their 'util-linux'
# programs. But to be consistent in non-GNU/Linux systems, we will be using
# our own build.
#
-# The reason that `flock' is built here is that generally the building of
+# The reason that 'flock' is built here is that generally the building of
# software is done in parallel, but we need it to serialize the download
# process of the software tarballs to avoid network complications when too
# many simultaneous download commands are called.
diff --git a/reproduce/software/shell/run-parts.in b/reproduce/software/shell/run-parts.in
index 7e649b1..a6db9e0 100755
--- a/reproduce/software/shell/run-parts.in
+++ b/reproduce/software/shell/run-parts.in
@@ -10,8 +10,8 @@
# However, it didn't have a copyright statement. So one is being added
# here.
#
-# Copyright (C) 2021 Authors mentioned above.
-# Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2022 Authors mentioned above.
+# Copyright (C) 2020-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
#
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
diff --git a/reproduce/software/shell/tarball-prepare.sh b/reproduce/software/shell/tarball-prepare.sh
new file mode 100755
index 0000000..ccc9318
--- /dev/null
+++ b/reproduce/software/shell/tarball-prepare.sh
@@ -0,0 +1,181 @@
+#!/bin/bash
+
+# Script to convert all files (tarballs in any format; just recognized
+# by 'tar') within an 'odir' to a unified '.tar.lz' format.
+#
+# The inputs are assumed to be formatted with 'NAME_VERSION', and only for
+# the names, we are currently assuming '.tar.*' (for the 'sed'
+# command). Please modify/generalize accordingly.
+#
+# It will unpack the source in a certain empty directory with the
+# 'tmpunpack' suffix, and rename the top directory to the requested format
+# of NAME-VERSION also. So irrespective of the name of the top original
+# tarball directory, the resulting tarball's top directory will have a name
+# formatting of NAME-VERSION.
+#
+# Discussion: https://savannah.nongnu.org/task/?15699
+#
+# Copyright (C) 2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+# Copyright (C) 2022 Pedram Ashofteh Ardakani <pedramardakani@pm.me>
+# Released under GNU GPLv3+
+
+# Abort the script in case of an error.
+set -e
+
+
+
+
+
+# Default arguments
+odir=
+idir=
+quiet=
+basedir=$PWD
+
+
+# The --help output
+print_help() {
+ cat <<EOF
+Usage: $0 [OPTIONS]
+
+Low-level script to create maneage-standard tarballs.
+
+ -o, --output-dir Target directory to write the packed tarballs.
+ Current: $odir
+
+
+ -i, --input-dir Directory containing original tarballs.
+ Current: $idir
+
+ -q, --quiet Suppress logging information. Only print the
+ final packed file and its sha512sum.
+
+Maneage URL: https://maneage.org
+
+Report bugs: https://savannah.nongnu.org/bugs/?group=reproduce
+EOF
+}
+
+
+
+
+# Parse the arguments
+while [ $# -gt 0 ]
+do
+ case $1 in
+ -q|--quiet) quiet=1; shift;;
+ -h|--help|-'?') print_help; exit 0;;
+ -i|--input-dir)
+ # Remove the trailing '/' introduced by autocomplete
+ idir=$(echo "$2" | sed 's|/$||');
+ shift; # past argument
+ shift;; # past value
+ -o|--output-dir)
+ # Remove the trailing '/' introduced by autocomplete
+ odir=$(echo "$2" | sed 's|/$||');
+ shift; # past argument
+ shift;; # past value
+ *) echo "$0: unknown option '$1'"; exit 1;;
+ esac
+done
+
+
+
+
+# Extract the 'absolute path' to input and output directories. Working with
+# relative path is a great source of confusion and unwanted side-effects
+# like moving/removing files by accident.
+if [ ! -d "$idir" ]; then
+ echo "$0: please pass the input directory (option --input-dir or -i)."
+ exit 1
+else
+ idir=$(realpath $idir)
+fi
+
+if [ ! -d "$odir" ]; then
+ echo "$0: please pass the output directory (option --output-dir or -o)."
+ exit 1
+else
+ odir=$(realpath $odir)
+fi
+
+
+
+
+
+# Unpack and pack all files in the '$idir'
+# ----------------------------------------
+allfiles=$(ls $idir | sort)
+
+# Let user know number of tarballs if its not in quiet mode
+if [ -z $quiet ]; then
+ nfiles=$(ls $idir | wc -l)
+ echo "Found $nfiles file(s) in '$idir/'"
+fi
+
+# Process all files
+for f in $allfiles; do
+
+ # Seperate name and version number
+ name=$(echo $f | sed -e 's/.tar.*//' | \
+ awk 'BEGIN { FS = "[-_ ]" } {print $1 "-" $2}')
+
+ # Skip previously packed files
+ if [ -f $odir/$name.tar.lz ]; then
+
+ # Print the info message if not in quiet mode
+ if [ -z $quiet ]; then
+ echo "$0: skipping '$odir/$name.tar.lz'"
+ fi
+
+ # skip this file
+ continue
+ else
+
+ # Print the info message if not in quiet mode
+ if [ -z $quiet ]; then
+ echo "$0: processing '$idir/$f'"
+ fi
+ fi
+
+ # Create a temporary directory name
+ tmpdir=$odir/$name-tmpunpack
+
+ # If the temporary directory exists, mkdir will throw an error. The
+ # developer needs to intervene manually to fix the issue.
+ mkdir $tmpdir
+
+
+
+
+
+ # Move into the temporary directory
+ # ---------------------------------
+ #
+ # The default output directory for all the following commands: $tmpdir
+ cd $tmpdir
+
+ # Unpack
+ tar -xf $idir/$f
+
+ # Make sure the unpacked tarball is contained within a directory with
+ # the clean program name
+ if [ ! -d "$name" ]; then
+ mv * $name/
+ fi
+
+ # Pack with recommended options
+ tar -c -Hustar --owner=root --group=root \
+ -f $name.tar $name/
+ lzip -9 $name.tar
+
+ # Move the compressed file from the temporary directory to the target
+ # output directory
+ mv $name.tar.lz $odir/
+
+ # Print the sha512sum along with the filename for a quick reference
+ echo $(sha512sum $odir/$name.tar.lz)
+
+ # Clean up the temporary directory
+ rm -r $tmpdir
+done
diff --git a/tex/README.md b/tex/README.md
index 0319585..6f30839 100644
--- a/tex/README.md
+++ b/tex/README.md
@@ -1,7 +1,7 @@
Directory containing LaTeX-related files
----------------------------------------
-Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>\
+Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>\
See the end of the file for license conditions.
This directory contains directories to various components the LaTeX part of
diff --git a/tex/img/icon-collaboration.eps b/tex/img/icon-collaboration.eps
index 81754b0..5817a32 100644
--- a/tex/img/icon-collaboration.eps
+++ b/tex/img/icon-collaboration.eps
@@ -1,6 +1,6 @@
%!PS-Adobe-3.0 EPSF-3.0
%%
-%% Copyright (C) 2020-2021 Marjan Akbari <mrjakbari@gmail.com>
+%% Copyright (C) 2020-2022 Marjan Akbari <mrjakbari@gmail.com>
%%
%% This image is available under Creative Commons Attribution-ShareAlike
%% (CC BY-SA). License URL: https://creativecommons.org/licenses/by-sa/4.0
diff --git a/tex/img/icon-complete.eps b/tex/img/icon-complete.eps
index d7ba056..ef50adb 100644
--- a/tex/img/icon-complete.eps
+++ b/tex/img/icon-complete.eps
@@ -1,6 +1,6 @@
%!PS-Adobe-3.0 EPSF-3.0
%%
-%% Copyright (C) 2020-2021 Marjan Akbari <mrjakbari@gmail.com>
+%% Copyright (C) 2020-2022 Marjan Akbari <mrjakbari@gmail.com>
%%
%% This image is available under Creative Commons Attribution-ShareAlike
%% (CC BY-SA). License URL: https://creativecommons.org/licenses/by-sa/4.0
diff --git a/tex/img/icon-processing.eps b/tex/img/icon-processing.eps
index 25824f8..c0bff76 100644
--- a/tex/img/icon-processing.eps
+++ b/tex/img/icon-processing.eps
@@ -1,6 +1,6 @@
%!PS-Adobe-3.0 EPSF-3.0
%%
-%% Copyright (C) 2020-2021 Marjan Akbari <mrjakbari@gmail.com>
+%% Copyright (C) 2020-2022 Marjan Akbari <mrjakbari@gmail.com>
%%
%% This image is available under Creative Commons Attribution-ShareAlike
%% (CC BY-SA). License URL: https://creativecommons.org/licenses/by-sa/4.0
diff --git a/tex/src/IEEEtran_openaccess.bst b/tex/src/IEEEtran_openaccess.bst
index ace8724..d77fbb3 100644
--- a/tex/src/IEEEtran_openaccess.bst
+++ b/tex/src/IEEEtran_openaccess.bst
@@ -2,7 +2,7 @@
%% IEEEtran_openaccess.bst
%% BibTeX Bibliography Style file for IEEE Journals and Conferences (unsorted)
%% Copyright (c) 2003-2015 Michael Shell
-%% Copyright (c) 2020-2021 Boud Roukema for additions of \eprint and \doi rules.
+%% Copyright (c) 2020-2022 Boud Roukema for additions of \eprint and \doi rules.
%% Based on IEEEtran.bst Version 1.14 (2015/08/26).
%%
%% WARNING: 2020-05-24: The \eprint and \doi rules are not quite right;
diff --git a/tex/src/appendix-existing-solutions.tex b/tex/src/appendix-existing-solutions.tex
index 2113377..9a9e2a6 100644
--- a/tex/src/appendix-existing-solutions.tex
+++ b/tex/src/appendix-existing-solutions.tex
@@ -2,9 +2,9 @@
%% file is loaded by the project's 'paper.tex' or 'tex/src/supplement.tex',
%% it should not be run independently.
%
-%% Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
-%% Copyright (C) 2021 Raúl Infante-Sainz <infantesainz@gmail.com>
-%% Copyright (C) 2021 Boudewijn F. Roukema <boud@astro.uni.torun.pl>
+%% Copyright (C) 2020-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2021-2022 Raúl Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2021-2022 Boudewijn F. Roukema <boud@astro.uni.torun.pl>
%
%% This file is free software: you can redistribute it and/or modify it
%% under the terms of the GNU General Public License as published by the
diff --git a/tex/src/appendix-existing-tools.tex b/tex/src/appendix-existing-tools.tex
index 885de8b..dcedd78 100644
--- a/tex/src/appendix-existing-tools.tex
+++ b/tex/src/appendix-existing-tools.tex
@@ -3,9 +3,9 @@
%% project's 'paper.tex' or 'tex/src/supplement.tex', it should not be run
%% independently.
%
-%% Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
-%% Copyright (C) 2021 Raúl Infante-Sainz <infantesainz@gmail.com>
-%% Copyright (C) 2021 Boudewijn F. Roukema <boud@astro.uni.torun.pl>
+%% Copyright (C) 2020-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2021-2022 Raúl Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2021-2022 Boudewijn F. Roukema <boud@astro.uni.torun.pl>
%
%% This file is free software: you can redistribute it and/or modify it
%% under the terms of the GNU General Public License as published by the
diff --git a/tex/src/appendix-necessity.tex b/tex/src/appendix-necessity.tex
index 452aa0f..591a0a5 100644
--- a/tex/src/appendix-necessity.tex
+++ b/tex/src/appendix-necessity.tex
@@ -2,8 +2,8 @@
%% papers. This file is loaded by the project's 'paper.tex' or
%% 'tex/src/supplement.tex', it should not be run independently.
%
-%% Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
-%% Copyright (C) 2021 Raúl Infante-Sainz <infantesainz@gmail.com>
+%% Copyright (C) 2020-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2021-2022 Raúl Infante-Sainz <infantesainz@gmail.com>
%
%% This file is free software: you can redistribute it and/or modify it
%% under the terms of the GNU General Public License as published by the
diff --git a/tex/src/figure-project-outline.tex b/tex/src/figure-project-outline.tex
index eaaf14f..dbf281c 100644
--- a/tex/src/figure-project-outline.tex
+++ b/tex/src/figure-project-outline.tex
@@ -1,4 +1,4 @@
-% Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+% Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
%
% This LaTeX source is free software: you can redistribute it and/or
% modify it under the terms of the GNU General Public License as
diff --git a/tex/src/figure-src-inputconf.tex b/tex/src/figure-src-inputconf.tex
index 4d75e24..33742df 100644
--- a/tex/src/figure-src-inputconf.tex
+++ b/tex/src/figure-src-inputconf.tex
@@ -1,8 +1,7 @@
\begin{tcolorbox}[title=\inlinecode{\textcolor{white}{INPUT.conf}}]
\footnotesize
- \texttt{\mkvar{MK20DATA} = \menketwentyxlsxname}\\
- \texttt{\mkvar{MK20MD5}{ } = \menketwentychecksum}\\
- \texttt{\mkvar{MK20SIZE} = \menketwentybytesize}\\
- \texttt{\mkvar{MK20URL}{ } = {\scriptsize \menketwentyurl}}\\
+ \texttt{\mkvar{INPUT-{\menketwentyxlsxname}-sha256} = \menketwentychecksum}\\
+ \texttt{\mkvar{INPUT-{\menketwentyxlsxname}-size} = \menketwentybytesize}\\
+ \texttt{\mkvar{INPUT-{\menketwentyxlsxname}-url} = {\scriptsize \menketwentyurl}}\\
\vspace{-3mm}
\end{tcolorbox}
diff --git a/tex/src/preamble-maneage.tex b/tex/src/preamble-maneage.tex
index 2f2b393..6875e37 100644
--- a/tex/src/preamble-maneage.tex
+++ b/tex/src/preamble-maneage.tex
@@ -1,6 +1,6 @@
%% Preamble for Maneage-related features.
%
-%% Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2020-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
%
%% This LaTeX file is part of Maneage. Maneage is free software: you can
%% redistribute it and/or modify it under the terms of the GNU General
diff --git a/tex/src/preamble-pgfplots.tex b/tex/src/preamble-pgfplots.tex
index 188b5a9..6c8ed5b 100644
--- a/tex/src/preamble-pgfplots.tex
+++ b/tex/src/preamble-pgfplots.tex
@@ -4,7 +4,7 @@
%% PGFPLOTS is a package in (La)TeX for making plots internally. It fits
%% nicely with the purpose of a reproducible project. But it isn't
%% mandatory. Therefore if you don't need it, just comment/delete the line
-%% that includes this file in the top LaTeX source (`paper.tex').
+%% that includes this file in the top LaTeX source ('paper.tex').
%
%% However, TiKZ and PGFPlots are the recommended way to include figures
%% and plots in your paper. There are two main reasons: 1) it follows the
@@ -29,18 +29,18 @@
%%
%% USAGE:
%
-%% - All plots are made within a `tikz' directory (that must already be
+%% - All plots are made within a 'tikz' directory (that must already be
%% present in the location LaTeX is run).
%
-%% - Use `\includetikz{XXXX}' to make/use the figure. If a `makepdf' LaTeX
-%% macro is not defined, then \includetikz will assume a `XXXX.pdf' file
-%% exists in `tex/tikz' and simply import it. If `makepdf' is defined,
+%% - Use '\includetikz{XXXX}' to make/use the figure. If a 'makepdf' LaTeX
+%% macro is not defined, then \includetikz will assume a 'XXXX.pdf' file
+%% exists in 'tex/tikz' and simply import it. If 'makepdf' is defined,
%% then TiKZ/PGFPlot will be called to (possibly) build the plot based
-%% on `tex/XXXX.tex'. Note that if the contents of `tex/src/XXXX.tex'
+%% on 'tex/XXXX.tex'. Note that if the contents of 'tex/src/XXXX.tex'
%% hasn't changed since the last build. TiKZ/PGFPlots won't rebuild the
%% plot.
%
-%% Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
%
%% This LaTeX file is part of Maneage. Maneage is free software: you can
%% redistribute it and/or modify it under the terms of the GNU General
@@ -92,8 +92,9 @@
-%% Uncomment the following lines for TiKZ external images to be saved as
-%% EPS and PS images.
+%% Uncomment the following lines for EPS and PS images. Note that you still
+%% have to use the 'pdflatex' executable and also add a '[dvips]' option to
+%% graphicx.
\tikzset{
external/system call={
rm -f "\image".eps "\image".ps "\image".dvi;
diff --git a/tex/src/preamble-project.tex b/tex/src/preamble-project.tex
index 7fabca1..d5a30af 100644
--- a/tex/src/preamble-project.tex
+++ b/tex/src/preamble-project.tex
@@ -4,7 +4,7 @@
%% LaTeX usages. However, if any are not needed in your work, please feel
%% free to remove them.
%
-%% Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
%% Copyright (C) YYYY Your Name <your@email.address>
%
%% This file is free software: you can redistribute it and/or modify it
@@ -40,10 +40,6 @@
%% For the `\url' command.
\usepackage{url}
-%% No need to load xcolor, its included by others below (it conflicts with
-%% the listings package.
-%\usepackage{xcolor}
-
%% To have links.
\usepackage[
colorlinks,
diff --git a/tex/src/references.tex b/tex/src/references.tex
index bf99d1d..4f6af2a 100644
--- a/tex/src/references.tex
+++ b/tex/src/references.tex
@@ -3,7 +3,7 @@
%
%% [[[BibTeX 0.99d complains with the at-character, even when its in a
%% comment line! So ::at:: is used instead in the email address]]].
-%% Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad::at::akhlaghi.org>
+%% Copyright (C) 2018-2022 Mohammad Akhlaghi <mohammad::at::akhlaghi.org>
%
%% Copying and distribution of this file, with or without modification,
%% are permitted in any medium without royalty provided the copyright
diff --git a/tex/src/supplement.tex b/tex/src/supplement.tex
index 9524705..362f304 100644
--- a/tex/src/supplement.tex
+++ b/tex/src/supplement.tex
@@ -1,8 +1,8 @@
%% The top-level file to build the separate supplement that contains the
%% appendices (to be published as a separate PDF file).
%
-%% Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>
-%% Copyright (C) 2020-2021 Boudewijn F. Roukema <boud@astro.uni.torun.pl>
+%% Copyright (C) 2020-2022 Mohammad Akhlaghi <mohammad@akhlaghi.org>
+%% Copyright (C) 2020-2022 Boudewijn F. Roukema <boud@astro.uni.torun.pl>
%
%% This file is free software: you can redistribute it and/or modify it
%% under the terms of the GNU General Public License as published by the