diff options
Diffstat (limited to 'reproduce/software')
59 files changed, 6534 insertions, 2554 deletions
diff --git a/reproduce/software/bibtex/astrometrynet.tex b/reproduce/software/bibtex/astrometrynet.tex index 15d4829..5697885 100644 --- a/reproduce/software/bibtex/astrometrynet.tex +++ b/reproduce/software/bibtex/astrometrynet.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +%% Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright @@ -6,15 +6,19 @@ %% without any warranty. @ARTICLE{astrometrynet, - doi = {10.1088/0004-6256/139/5/1782}, - url = {https://doi.org/10.1088%2F0004-6256%2F139%2F5%2F1782}, - year = 2010, - month = {mar}, - publisher = {{IOP} Publishing}, - volume = {139}, - number = {5}, - pages = {1782}, - author = {Dustin Lang and David W. Hogg and Keir Mierle and Michael Blanton and Sam Roweis}, - title = {{ASTROMETRY}.{NET}: {BLIND} {ASTROMETRIC} {CALIBRATION} {OF} {ARBITRARY} {ASTRONOMICAL} {IMAGES}}, - journal = {AJ} + author = {{Lang}, Dustin and {Hogg}, David W. and {Mierle}, Keir and {Blanton}, Michael and {Roweis}, Sam}, + title = "{Astrometry.net: Blind Astrometric Calibration of Arbitrary Astronomical Images}", + journal = {AJ}, + keywords = {astrometry, catalogs, instrumentation: miscellaneous, methods: data analysis, methods: statistical, techniques: image processing, Astrophysics - Instrumentation and Methods for Astrophysics}, + year = 2010, + month = may, + volume = {139}, + number = {5}, + pages = {1782-1800}, + doi = {10.1088/0004-6256/139/5/1782}, +archivePrefix = {arXiv}, + eprint = {0910.2233}, + primaryClass = {astro-ph.IM}, + adsurl = {https://ui.adsabs.harvard.edu/abs/2010AJ....139.1782L}, + adsnote = {Provided by the SAO/NASA Astrophysics Data System} } diff --git a/reproduce/software/bibtex/astropy.tex b/reproduce/software/bibtex/astropy.tex index c25803b..3df1aa1 100644 --- a/reproduce/software/bibtex/astropy.tex +++ b/reproduce/software/bibtex/astropy.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +%% Copyright (C) 2019-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright diff --git a/reproduce/software/bibtex/corner.tex b/reproduce/software/bibtex/corner.tex index b2e0e25..fd7ea76 100644 --- a/reproduce/software/bibtex/corner.tex +++ b/reproduce/software/bibtex/corner.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +%% Copyright (C) 2019-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright diff --git a/reproduce/software/bibtex/cython.tex b/reproduce/software/bibtex/cython.tex index 88a5c4e..311072d 100644 --- a/reproduce/software/bibtex/cython.tex +++ b/reproduce/software/bibtex/cython.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +%% Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright @@ -6,16 +6,15 @@ %% without any warranty. @ARTICLE{cython2011, - author = {{Behnel}, S. and {Bradshaw}, R. and {Citro}, C. and {Dalcin}, L. and - {Seljebotn}, D.~S. and {Smith}, K.}, - title = "{Cython: The Best of Both Worlds}", - journal = {CiSE}, - year = 2011, - month = mar, - volume = 13, - number = 2, - pages = {31}, - doi = {10.1109/MCSE.2010.118}, - adsurl = {http://ui.adsabs.harvard.edu/abs/2011CSE....13b..31B}, - adsnote = {Provided by the SAO/NASA Astrophysics Data System} + author = {{Behnel}, Stefan and {Bradshaw}, Robert and {Citro}, Craig and {Dalcin}, Lisandro and {Seljebotn}, Dag Sverre and {Smith}, Kurt}, + title = "{Cython: The Best of Both Worlds}", + journal = {Computing in Science and Engineering}, + year = 2011, + month = mar, + volume = {13}, + number = {2}, + pages = {31-39}, + doi = {10.1109/MCSE.2010.118}, + adsurl = {https://ui.adsabs.harvard.edu/abs/2011CSE....13b..31B}, + adsnote = {Provided by the SAO/NASA Astrophysics Data System} } diff --git a/reproduce/software/bibtex/fftw.tex b/reproduce/software/bibtex/fftw.tex index 7f525c9..15c5c62 100644 --- a/reproduce/software/bibtex/fftw.tex +++ b/reproduce/software/bibtex/fftw.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +%% Copyright (C) 2019-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright @@ -12,7 +12,7 @@ year = {2005}, volume = {93}, number = {2}, - pages = {216}, + pages = {216-231}, doi = {10.1109/JPROC.2004.840301}, ISSN = {0018-9219}, month = {Feb}, diff --git a/reproduce/software/bibtex/galsim.tex b/reproduce/software/bibtex/galsim.tex index 3646639..bbfdec1 100644 --- a/reproduce/software/bibtex/galsim.tex +++ b/reproduce/software/bibtex/galsim.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +%% Copyright (C) 2019-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright diff --git a/reproduce/software/bibtex/gnuastro.tex b/reproduce/software/bibtex/gnuastro.tex index 1470a00..7d56e3e 100644 --- a/reproduce/software/bibtex/gnuastro.tex +++ b/reproduce/software/bibtex/gnuastro.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +%% Copyright (C) 2019-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright @@ -6,19 +6,20 @@ %% without any warranty. @ARTICLE{gnuastro, - author = {{Akhlaghi}, M. and {Ichikawa}, T.}, - title = "{Noise-based Detection and Segmentation of Nebulous Objects}", - journal = {ApJS}, -archivePrefix = "arXiv", - eprint = {1505.01664}, - primaryClass = "astro-ph.IM", - keywords = {galaxies: irregular, galaxies: photometry, galaxies: structure, methods: data analysis, techniques: image processing, techniques: photometric}, - year = 2015, - month = sep, - volume = 220, - eid = {1}, - pages = {1}, - doi = {10.1088/0067-0049/220/1/1}, - adsurl = {http://ui.adsabs.harvard.edu/abs/2015ApJS..220....1A}, - adsnote = {Provided by the SAO/NASA Astrophysics Data System} + author = {{Akhlaghi}, Mohammad and {Ichikawa}, Takashi}, + title = "{Noise-based Detection and Segmentation of Nebulous Objects}", + journal = {ApJS}, + keywords = {galaxies: irregular, galaxies: photometry, galaxies: structure, methods: data analysis, techniques: image processing, techniques: photometric, Astrophysics - Instrumentation and Methods for Astrophysics, Astrophysics - Cosmology and Nongalactic Astrophysics, Astrophysics - Astrophysics of Galaxies}, + year = 2015, + month = sep, + volume = {220}, + number = {1}, + eid = {1}, + pages = {1}, + doi = {10.1088/0067-0049/220/1/1}, +archivePrefix = {arXiv}, + eprint = {1505.01664}, + primaryClass = {astro-ph.IM}, + adsurl = {https://ui.adsabs.harvard.edu/abs/2015ApJS..220....1A}, + adsnote = {Provided by the SAO/NASA Astrophysics Data System} } diff --git a/reproduce/software/bibtex/healpix.tex b/reproduce/software/bibtex/healpix.tex index 08bef90..afdd6a6 100644 --- a/reproduce/software/bibtex/healpix.tex +++ b/reproduce/software/bibtex/healpix.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +%% Copyright (C) 2019-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright @@ -6,16 +6,15 @@ %% without any warranty. @ARTICLE{healpix, - author = {{G{\'o}rski}, K.~M. and {Hivon}, E. and {Banday}, A.~J. and {Wand - elt}, B.~D. and {Hansen}, F.~K. and {Reinecke}, M. and {Bartelmann}, M.}, + author = {{G{\'o}rski}, K.~M. and {Hivon}, E. and {Banday}, A.~J. and {Wandelt}, B.~D. and {Hansen}, F.~K. and {Reinecke}, M. and {Bartelmann}, M.}, title = "{HEALPix: A Framework for High-Resolution Discretization and Fast Analysis of Data Distributed on the Sphere}", journal = {ApJ}, keywords = {Cosmology: Cosmic Microwave Background, Cosmology: Observations, Methods: Statistical, Astrophysics}, - year = "2005", - month = "Apr", + year = 2005, + month = apr, volume = {622}, number = {2}, - pages = {759}, + pages = {759-771}, doi = {10.1086/427976}, archivePrefix = {arXiv}, eprint = {astro-ph/0409513}, diff --git a/reproduce/software/bibtex/imfit.tex b/reproduce/software/bibtex/imfit.tex index 3822fd4..1d5270b 100644 --- a/reproduce/software/bibtex/imfit.tex +++ b/reproduce/software/bibtex/imfit.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +%% Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright @@ -6,19 +6,20 @@ %% without any warranty. @ARTICLE{imfit2015, - author = {{Erwin}, P.}, - title = "{IMFIT: A Fast, Flexible New Program for Astronomical Image Fitting}", - journal = {ApJ}, -archivePrefix = "arXiv", - eprint = {1408.1097}, - primaryClass = "astro-ph.IM", - keywords = {galaxies: bulges, galaxies: photometry, galaxies: structure, methods: data analysis, techniques: image processing, techniques: photometric }, - year = 2015, - month = feb, - volume = 799, - eid = {226}, - pages = {226}, - doi = {10.1088/0004-637X/799/2/226}, - adsurl = {https://ui.adsabs.harvard.edu/abs/2015ApJ...799..226E}, - adsnote = {Provided by the SAO/NASA Astrophysics Data System} + author = {{Erwin}, Peter}, + title = "{IMFIT: A Fast, Flexible New Program for Astronomical Image Fitting}", + journal = {ApJ}, + keywords = {galaxies: bulges, galaxies: photometry, galaxies: structure, methods: data analysis, techniques: image processing, techniques: photometric, Astrophysics - Instrumentation and Methods for Astrophysics, Astrophysics - Astrophysics of Galaxies}, + year = 2015, + month = feb, + volume = {799}, + number = {2}, + eid = {226}, + pages = {226}, + doi = {10.1088/0004-637X/799/2/226}, +archivePrefix = {arXiv}, + eprint = {1408.1097}, + primaryClass = {astro-ph.IM}, + adsurl = {https://ui.adsabs.harvard.edu/abs/2015ApJ...799..226E}, + adsnote = {Provided by the SAO/NASA Astrophysics Data System} } diff --git a/reproduce/software/bibtex/kiwisolver.tex b/reproduce/software/bibtex/kiwisolver.tex new file mode 100644 index 0000000..47d3f56 --- /dev/null +++ b/reproduce/software/bibtex/kiwisolver.tex @@ -0,0 +1,18 @@ +%% Copyright (C) 2025-2025 Boud Roukema <boud@cosmo.torun.pl> +%% +%% Copying and distribution of this file, with or without modification, +%% are permitted in any medium without royalty provided the copyright +%% notice and this notice are preserved. This file is offered as-is, +%% without any warranty. + +@ARTICLE{cassowary2001, + author = {{Grudin}, Jonathan and {Badros}, Greg J. and {Borning}, Alan and {Stuckey}, Peter J.}, + title = "{The Cassowary linear arithmetic constraint solving algorithm}", + journal = {ACM Transactions on Computer-Human Interaction}, + year = 2001, + month = dec, + volume = {8}, + number = {4}, + pages = {267-306}, + doi = {10.1145/504704.504705}, +} diff --git a/reproduce/software/bibtex/matplotlib.tex b/reproduce/software/bibtex/matplotlib.tex index c35f682..ec7bab4 100644 --- a/reproduce/software/bibtex/matplotlib.tex +++ b/reproduce/software/bibtex/matplotlib.tex @@ -1,18 +1,21 @@ -%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +%% Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright %% notice and this notice are preserved. This file is offered as-is, %% without any warranty. -@Article{matplotlib2007, - Author = {Hunter, J. D.}, - Title = {Matplotlib: A 2D graphics environment}, - Journal = {CiSE}, - Volume = {9}, - Number = {3}, - Pages = {90}, - publisher = {IEEE COMPUTER SOC}, - doi = {10.1109/MCSE.2007.55}, - year = 2007 +@ARTICLE{matplotlib2007, + author = {{Hunter}, John D.}, + title = "{Matplotlib: A 2D Graphics Environment}", + journal = {CiSE}, + keywords = {Python, Scripting languages, Application development, Scientific programming}, + year = 2007, + month = may, + volume = {9}, + number = {3}, + pages = {90-95}, + doi = {10.1109/MCSE.2007.55}, + adsurl = {https://ui.adsabs.harvard.edu/abs/2007CSE.....9...90H}, + adsnote = {Provided by the SAO/NASA Astrophysics Data System} } diff --git a/reproduce/software/bibtex/missfits.tex b/reproduce/software/bibtex/missfits.tex index 9fb0f54..83510dd 100644 --- a/reproduce/software/bibtex/missfits.tex +++ b/reproduce/software/bibtex/missfits.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2020-2021 Surena Fatemi <surena.fatemi@ipm.ir> +%% Copyright (C) 2020-2025 Surena Fatemi <surena.fatemi@ipm.ir> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright diff --git a/reproduce/software/bibtex/mpi4py.tex b/reproduce/software/bibtex/mpi4py.tex index 9a99803..10e0e74 100644 --- a/reproduce/software/bibtex/mpi4py.tex +++ b/reproduce/software/bibtex/mpi4py.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +%% Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright @@ -6,15 +6,15 @@ %% without any warranty. @ARTICLE{mpi4py2011, - author = {{Dalcin}, L.~D. and {Paz}, R.~R. and {Kler}, P.~A. and {Cosimo}, A. - }, - title = "{Parallel distributed computing using Python}", - journal = {AdvWatRes}, - year = 2011, - month = sep, - volume = 34, - pages = {1124}, - doi = {10.1016/j.advwatres.2011.04.013}, - adsurl = {http://ui.adsabs.harvard.edu/abs/2011AdWR...34.1124D}, - adsnote = {Provided by the SAO/NASA Astrophysics Data System} + author = {{Dalcin}, Lisandro D. and {Paz}, Rodrigo R. and {Kler}, Pablo A. and {Cosimo}, Alejandro}, + title = "{Parallel distributed computing using Python}", + journal = {Advances in Water Resources}, + year = 2011, + month = sep, + volume = {34}, + number = {9}, + pages = {1124-1139}, + doi = {10.1016/j.advwatres.2011.04.013}, + adsurl = {https://ui.adsabs.harvard.edu/abs/2011AdWR...34.1124D}, + adsnote = {Provided by the SAO/NASA Astrophysics Data System} } diff --git a/reproduce/software/bibtex/numpy.tex b/reproduce/software/bibtex/numpy.tex index 07101cf..fb017c4 100644 --- a/reproduce/software/bibtex/numpy.tex +++ b/reproduce/software/bibtex/numpy.tex @@ -1,23 +1,24 @@ -%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +%% Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright %% notice and this notice are preserved. This file is offered as-is, %% without any warranty. -@ARTICLE{numpy2011, - author = {{van der Walt}, S. and {Colbert}, S.~C. and {Varoquaux}, G.}, - title = "{The NumPy Array: A Structure for Efficient Numerical Computation}", - journal = {CiSE}, -archivePrefix = "arXiv", - eprint = {1102.1523}, - primaryClass = "cs.MS", - year = 2011, - month = mar, - volume = 13, - number = 2, - pages = {22}, - doi = {10.1109/MCSE.2011.37}, - adsurl = {http://ui.adsabs.harvard.edu/abs/2011CSE....13b..22V}, - adsnote = {Provided by the SAO/NASA Astrophysics Data System} +@ARTICLE{numpy2020, + author = {{Harris}, Charles R. and {Millman}, K. Jarrod and {van der Walt}, St{\'e}fan J. and {Gommers}, Ralf and {Virtanen}, Pauli and {Cournapeau}, David and {Wieser}, Eric and {Taylor}, Julian and {Berg}, Sebastian and {Smith}, Nathaniel J. and {Kern}, Robert and {Picus}, Matti and {Hoyer}, Stephan and {van Kerkwijk}, Marten H. and {Brett}, Matthew and {Haldane}, Allan and {del R{\'\i}o}, Jaime Fern{\'a}ndez and {Wiebe}, Mark and {Peterson}, Pearu and {G{\'e}rard-Marchant}, Pierre and {Sheppard}, Kevin and {Reddy}, Tyler and {Weckesser}, Warren and {Abbasi}, Hameer and {Gohlke}, Christoph and {Oliphant}, Travis E.}, + title = "{Array programming with NumPy}", + journal = {Nature}, + keywords = {Computer Science - Mathematical Software, Statistics - Computation}, + year = 2020, + month = sep, + volume = {585}, + number = {7825}, + pages = {357-362}, + doi = {10.1038/s41586-020-2649-2}, +archivePrefix = {arXiv}, + eprint = {2006.10256}, + primaryClass = {cs.MS}, + adsurl = {https://ui.adsabs.harvard.edu/abs/2020Natur.585..357H}, + adsnote = {Provided by the SAO/NASA Astrophysics Data System} } diff --git a/reproduce/software/bibtex/r-cran.tex b/reproduce/software/bibtex/r-cran.tex new file mode 100644 index 0000000..c58eeeb --- /dev/null +++ b/reproduce/software/bibtex/r-cran.tex @@ -0,0 +1,19 @@ +%% Copyright (C) 2022-2025 Boud Roukema <boud@cosmo.torun.pl> +%% Copyright (C) 2022-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +%% +%% Copying and distribution of this file, with or without modification, +%% are permitted in any medium without royalty provided the copyright +%% notice and this notice are preserved. This file is offered as-is, +%% without any warranty. + +@ARTICLE{RIhakaGentleman1996, + author = {{Ihaka}, Ross and {Gentleman}, Robert}, + title = "{R: A language for data analysis and graphics}", + journal = {J.Comput.Graph.Stat.}, + year = {1996}, + volume = {5}, + number = {3}, + pages = {299-314}, + DOI = {10.1080/10618600.1996.10474713}, + ISSN = {10618600}, +} diff --git a/reproduce/software/bibtex/scamp.tex b/reproduce/software/bibtex/scamp.tex index ab68dd6..b5c4da9 100644 --- a/reproduce/software/bibtex/scamp.tex +++ b/reproduce/software/bibtex/scamp.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +%% Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright diff --git a/reproduce/software/bibtex/scipy.tex b/reproduce/software/bibtex/scipy.tex index 71b0caa..1296393 100644 --- a/reproduce/software/bibtex/scipy.tex +++ b/reproduce/software/bibtex/scipy.tex @@ -1,34 +1,23 @@ -%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +%% Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright %% notice and this notice are preserved. This file is offered as-is, %% without any warranty. -@ARTICLE{scipy2011, - author = {K. J. {Millman} and M. {Aivazis}}, - journal = {CiSE}, - title = {Python for Scientists and Engineers}, - year = {2011}, - volume = {13}, - number = {2}, - pages = {9}, - keywords = {Special issues and sections;Computer languages;Programming;Scientific computing;Numerical models;Programming languages;Python;Scientific computing;interactive research;Python libraries;Python tools}, - doi = {10.1109/MCSE.2011.36}, - ISSN = {1521-9615}, - month ={March}, -} - -@ARTICLE{scipy2007, - author = {T. E. {Oliphant}}, - journal = {CiSE}, - title = {Python for Scientific Computing}, - year = {2007}, - volume = {9}, - number = {3}, - pages = {10}, - keywords = {high level languages;Python;scientific computing;steering language;scientific codes;high-level language;Scientific computing;High level languages;Libraries;Writing;Application software;Embedded software;Software standards;Standards development;Internet;Prototypes;Python;computer languages;scientific programming;scientific computing}, - doi = {10.1109/MCSE.2007.58}, - ISSN = {1521-9615}, - month = {May}, +@ARTICLE{scipy2020, + author = {{Virtanen}, Pauli and {Gommers}, Ralf and {Oliphant}, Travis E. and {Haberland}, Matt and {Reddy}, Tyler and {Cournapeau}, David and {Burovski}, Evgeni and {Peterson}, Pearu and {Weckesser}, Warren and {Bright}, Jonathan and {van der Walt}, St{\'e}fan J. and {Brett}, Matthew and {Wilson}, Joshua and {Millman}, K. Jarrod and {Mayorov}, Nikolay and {Nelson}, Andrew R.~J. and {Jones}, Eric and {Kern}, Robert and {Larson}, Eric and {Carey}, C.~J. and {Polat}, {\.I}lhan and {Feng}, Yu and {Moore}, Eric W. and {VanderPlas}, Jake and {Laxalde}, Denis and {Perktold}, Josef and {Cimrman}, Robert and {Henriksen}, Ian and {Quintero}, E.~A. and {Harris}, Charles R. and {Archibald}, Anne M. and {Ribeiro}, Ant{\^o}nio H. and {Pedregosa}, Fabian and {van Mulbregt}, Paul and {SciPy 1. 0 Contributors}}, + title = "{SciPy 1.0: fundamental algorithms for scientific computing in Python}", + journal = {Nature Methods}, + keywords = {Computer Science - Mathematical Software, Computer Science - Data Structures and Algorithms, Computer Science - Software Engineering, Physics - Computational Physics}, + year = 2020, + month = feb, + volume = {17}, + pages = {261-272}, + doi = {10.1038/s41592-019-0686-2}, +archivePrefix = {arXiv}, + eprint = {1907.10121}, + primaryClass = {cs.MS}, + adsurl = {https://ui.adsabs.harvard.edu/abs/2020NatMe..17..261V}, + adsnote = {Provided by the SAO/NASA Astrophysics Data System} } diff --git a/reproduce/software/bibtex/sextractor.tex b/reproduce/software/bibtex/sextractor.tex index 0c91a3d..818f04a 100644 --- a/reproduce/software/bibtex/sextractor.tex +++ b/reproduce/software/bibtex/sextractor.tex @@ -1,5 +1,5 @@ -%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> -%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +%% Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> +%% Copyright (C) 2019-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright @@ -11,10 +11,10 @@ title = "{SExtractor: Software for source extraction.}", journal = {A\&AS}, keywords = {METHODS: DATA ANALYSIS, TECHNIQUES: IMAGE PROCESSING, GALAXIES: PHOTOMETRY}, - year = "1996", - month = "Jun", + year = 1996, + month = jun, volume = {117}, - pages = {393}, + pages = {393-404}, doi = {10.1051/aas:1996164}, adsurl = {https://ui.adsabs.harvard.edu/abs/1996A&AS..117..393B}, adsnote = {Provided by the SAO/NASA Astrophysics Data System} diff --git a/reproduce/software/bibtex/sip_tpv.tex b/reproduce/software/bibtex/sip_tpv.tex index 02dfec0..0d52e33 100644 --- a/reproduce/software/bibtex/sip_tpv.tex +++ b/reproduce/software/bibtex/sip_tpv.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +%% Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright diff --git a/reproduce/software/bibtex/swarp.tex b/reproduce/software/bibtex/swarp.tex index 7636f1c..e0989f9 100644 --- a/reproduce/software/bibtex/swarp.tex +++ b/reproduce/software/bibtex/swarp.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +%% Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright diff --git a/reproduce/software/bibtex/sympy.tex b/reproduce/software/bibtex/sympy.tex index 1d07846..01e481b 100644 --- a/reproduce/software/bibtex/sympy.tex +++ b/reproduce/software/bibtex/sympy.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +%% Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright diff --git a/reproduce/software/bibtex/tides.tex b/reproduce/software/bibtex/tides.tex index f3b5490..caf1500 100644 --- a/reproduce/software/bibtex/tides.tex +++ b/reproduce/software/bibtex/tides.tex @@ -1,4 +1,4 @@ -%% Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +%% Copyright (C) 2019-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> %% %% Copying and distribution of this file, with or without modification, %% are permitted in any medium without royalty provided the copyright @@ -13,7 +13,7 @@ month = nov, volume = 39, eid = {5}, - pages = {5:1}, + pages = {1-28}, doi = {10.1145/2382585.2382590}, keywords = {Taylor series method, automatic differentiation, high precision, numerical integration of ODEs, variational equations}, } diff --git a/reproduce/software/config/LOCAL.conf.in b/reproduce/software/config/LOCAL.conf.in index 132c3f7..e60f344 100644 --- a/reproduce/software/config/LOCAL.conf.in +++ b/reproduce/software/config/LOCAL.conf.in @@ -1,17 +1,78 @@ # Local project configuration. # -# This is just a template for the `./project configure' script to fill +# This is just a template for the './project configure' script to fill # in. Please don't make any change to this file. # -# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2018-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> # # Copying and distribution of this file, with or without modification, are # permitted in any medium without royalty provided the copyright notice and # this notice are preserved. This file is offered as-is, without any # warranty. -BDIR = @bdir@ -INDIR = @indir@ + + + + +# Local system settings +# --------------------- +# +# Build directory (mandatory). All the created files of the project will be +# within this directory. +BDIR = @bdir@ + +# Input data directory. This can be empty or a non-existant location. If +# so, then the inputs will be downloaded from the 'INPUTS.conf' into the +# build directory. +INDIR = @indir@ + +# Software source code directory. This can be empty or a non-existant +# location. If so, the software tarballs will be downloaded. DEPENDENCIES-DIR = @ddir@ -SYS_CPATH = @sys_cpath@ -DOWNLOADER = @downloader@ -GROUP-NAME = @groupname@ + +# Other local settings (compiler, downloader and user). +SYS_CPATH = @sys_cpath@ +DOWNLOADER = @downloader@ +GROUP-NAME = @groupname@ + + + + + +# Server authentication/identification +# ------------------------------------ +# +# If you need to identify yourself to the database server (when downloading +# input files), you can write your user name and password in the two +# variables below. When these two variables are defined, the download rule +# (in 'reproduce/analysis/make/initialize.mk') will pass their values to +# the '--user' and '--password' options of WGET.\ +# +# The 'DATABASEAUTHTYPE' specifies which type of authentication is +# recognized by the database with the 'wget' command. It can take the +# following values: +# 'userpass': Assumes --user='XXXX' --password='YYYY' +# 'postdata': Assumes --post-data 'username=XXXX&password=YYYY' +# +# SPECIAL CHARACTERS IN PASSWORD: if your password has special characters +# like '#' or '$' (that can have special meaning for Make), then comment +# them with a back-slash. For example if your password is 'ab#cd', write it +# below as 'ab\#cd'. Within 'initialize.mk', the user name and password are +# placed inside single quotes before being used, so special characters +# won't be problematic. However, if your password includes characters like +# the single quote itself, this can be problematic. In this case, you can +# temporarily (without committing!) directly enter the password in the +# respective rule of 'initialize.mk' and after downloading, undo the change +# (which is easy with 'git restore'). If you have any ideas on how to +# account for such characters in a generic way, please let us know and +# we'll suggest them here for future users. +# +# SECURITY WARNING: only set the values of these two variables in +# 'LOCAL.conf' (which is _not_ under version control), _not_ +# 'LOCAL.conf.in' (which is under version control). It is also recommended +# to remove the ID and password values from 'LOCAL.conf' immediately after +# your downloads finish: even though 'LOCAL.conf' is not under version +# control, it will still be on your computer, which may be accessed by +# others or (mistakenly) shared with others. +DATABASEUSER = +DATABASEPASS = +DATABASEAUTHTYPE = diff --git a/reproduce/software/config/TARGETS.conf b/reproduce/software/config/TARGETS.conf index bec3adb..716fd41 100644 --- a/reproduce/software/config/TARGETS.conf +++ b/reproduce/software/config/TARGETS.conf @@ -1,7 +1,7 @@ # Necessary high-level software to build in this project. # -# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> -# Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +# Copyright (C) 2018-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> # # Copying and distribution of this file, with or without modification, are # permitted in any medium without royalty provided the copyright notice and @@ -21,22 +21,26 @@ # reproduce/software/config/versions.conf # # Please add any software that you need for your project in the respective -# part below (using its name in `versions.conf', but without the `-version' +# part below (using its name in 'versions.conf', but without the '-version' # part). Just note that if a program/library is a dependency of another, # you don't need to include it here (it will be installed before the # higher-level software anyway). # # Note that many low-level software will be installed before those that are # installed in this step. They are clearly distinguished from the -# higher-level (optional) software in `versions.conf'. These low-level +# higher-level (optional) software in 'versions.conf'. These low-level # software MUST NOT be added here. -# Programs and libraries (for Python modules, add to 'top-level-python'). -top-level-programs = gnuastro +# Programs and libraries (for Python or R modules, use respective variable). +top-level-programs = gnuastro # Python libraries/modules. -top-level-python = +top-level-python = + +# R libraries/modules +# [For developers 2022-01-02: 'r-cran-cowplot r-cran-gridExtra' for all] +top-level-r-cran = diff --git a/reproduce/software/config/checksums.conf b/reproduce/software/config/checksums.conf index 0e21912..89738ce 100644 --- a/reproduce/software/config/checksums.conf +++ b/reproduce/software/config/checksums.conf @@ -1,7 +1,8 @@ # sha512 checksums of all the necessary software tarballs. # -# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> -# Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +# Copyright (C) 2018-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> +# Copyright (C) 2022-2025 Pedram Ashofteh Ardakani <pedramardakani@pm.me> # # Copying and distribution of this file, with or without modification, are # permitted in any medium without royalty provided the copyright notice and @@ -14,54 +15,53 @@ # Basic/low-level programs and libraires (installed in any case) # -------------------------------------------------------------- -bash-checksum = e210cd63ce1241636fbb14d1a105c83e9481a0312026f746f76f7115b777707170ddfe1840fb4d3a4093613048e2d1eedf926e843ad15ffc8d66f08525e8b04b -binutils-checksum = 5ad795fab0803be83b53aa955fd5414c8408b4cf2a66eba2f8688298312934b4b1b0cbe9cf887d86de77f88adf1333d85fc9f6ab4a530e85a09b9b2dbf6aaf3f -bzip2-checksum = 00ace5438cfa0c577e5f578d8a808613187eff5217c35164ffe044fbafdfec9e98f4192c02a7d67e01e5a5ccced630583ad1003c37697219b0f147343a3fdd12 -cert-checksum = a81dfa59c70788126a395c576e54cb8f61c1ea34da69b5cd42e2d83ee6426c2a26941360c7302793774ea98ca16846deb6e683144cc7fb6da6ef87b70447e4c8 -coreutils-checksum = 1c8f3584efd61b4b02e7ac5db8e103b63cfb2063432caaf1e64cb2dcc56d8c657d1133bbf10bd41468d6a1f31142e6caa81d16ae68fa3e6e84075c253613a145 -curl-checksum = 614f8d67e6ee942cb5e57f2da1a3faaf84ebff549ffe383aaf3751a4de3c8874ff482076afa27a52e910a137dec9b0f6e98265dc7d4ad7c95845b63f39236fd4 -dash-checksum = 9d55090115ac04f505d70e6790179331178950f96fe713b33fd698fa8bfa60d4eff1b68cb7b8a2f099d29c587d36034a17dccd6658ba1623ff0a625ac1fb9620 -diffutils-checksum = 7b12cf8aea1b9844773748f72272d9c6a38adae9c3c3a8c62048f91fb56c60b76035fa5f51665dceaf2cfbf1d1f4a3efdcc24bf47a5a16ff4350543314b12c9c -file-checksum = 9cf1a7b769c56eb6f5b25c66ce85fa1300128396e445b2e53dbbd8951e5da973a7a07c4ef9f7ebd1fe945d47bdaf2cd9ef09bd2be6c217a0bcb907d9449835e6 -findutils-checksum = 650a24507f8f4ebff83ad28dd27daa4785b4038dcaadc4fe00823b976e848527074cce3f9ec34065b7f037436d2aa6e9ec099bc05d7472c29864ac2c69de7f2e -flock-checksum = ddb997174c0653bc3d29410a5a16b6290e737aa40fbf4b746e2d1db1e88e5acb08ec11a25c27c8a5a5fbf5a00fcac17abeaa245e7df27bd975ae86364d400b86 -gawk-checksum = 682fadd3630d51d13d19443a9a93d4cba1bd6802dd078f1366f17f39d3fa5800f47037b103d742be9254475fdc251d90626f95a2e04b5ace117cfaecebca2281 -gcc-checksum = 42ae38928bd2e8183af445da34220964eb690b675b1892bbeb7cd5bb62be499011ec9a93397dba5e2fb681afadfc6f2767d03b9035b44ba9be807187ae6dc65e -gettext-checksum = f3083af79341bfdc849118333c1598812c12bc225d998181694648187088050160deb4777c252f72a7158e914c2967416489bc6167ef8505664497f2fb94ecbf -git-checksum = a6159c0a15e3c5f9603157d4010664a6d74e7d65b9fe97a03b36fac12607248ed57980d96565841e88eae343001c167222232737d3af812608c8db011941df1a -gmp-checksum = 9975e8766e62a1d48c0b6d7bbdd2fccb5b22243819102ca6c8d91f0edd2d3a1cef21c526d647c2159bb29dd2a7dcbd0d621391b2e4b48662cf63a8e6749561cd -grep-checksum = 0f1506bd19971fbdcb47a111277ca63e8ad045456f096980852fd0a61c860f29f4b369bbaaa5cbce4b0a81718e3e3274d9a078b491f2109baa9a02ce600ee206 -gzip-checksum = 753fbcf5eb104bfc8a8eb81b69b8701f757b5158e6333b17438574169a4662642a122e1fdbd920a536edbcb77253d65fa571e4f507dbe72a70fee5eb161d6324 -isl-checksum = 85d0b40f4dbf14cb99d17aa07048cdcab2dc3eb527d2fbb1e84c41b2de5f351025370e57448b63b2b8a8cf8a0843a089c3263f9baee1542d5c2e1cb37ed39d94 -less-checksum = 79384ff3faa33aeb86da6027c8b264df78f9f8c799af43dc5340e2ca3d86053c9be168140bfa05734a4217e65ef9939652b004d6a536f64b2e0ef3b74b07f535 -libbsd-checksum = b75529785b16c93d31401187f8a58258fbebe565dac071c8311775c913af989f62cd29d5ce2651af3ea6221cffd31cf04826577d3e546ab9ca14340f297777b9 -libiconv-checksum = 365dac0b34b4255a0066e8033a8b3db4bdb94b9b57a9dca17ebf2d779139fe935caf51a465d17fd8ae229ec4b926f3f7025264f37243432075e5583925bb77b7 -libtool-checksum = a6eef35f3cbccf2c9e2667f44a476ebc80ab888725eb768e91a3a6c33b8c931afc46eb23efaee76c8696d3e4eed74ab1c71157bcb924f38ee912c8a90a6521a4 -libunistring-checksum = 01dcab6e05ea4c33572bf96cc0558bcffbfc0e62fc86410cef06c1597a0073d5750525fe2dee4fdb39c9bd704557fcbab864f9645958108a2e07950bc539fe54 -libxml2-checksum = cb7784ba4e72e942614e12e4f83f4ceb275f3d738b30e3b5c1f25edf8e9fa6789e854685974eed95b362049dbf6c8e7357e0327d64c681ed390534ac154e6810 -lzip-checksum = e2e229899002072322a5bf7fb7ef37ff7cc2d0ded7d6525679ac29cec5c1534de89f76bc01e15c3d76584099957d596b9e5b32d3f9cbfc6bc8d6370fc67d7d96 -m4-checksum = a92cad4441b3fd7c033837389ca3499494523d364a5fda043d92c517051510f1758b3b837f0477f42d2258a179ab79a4993e5d1694ef2673db6d96d1faff84fe -make-checksum = ddf0fdcb9ee1b182ef294c5da70c1275288c99bef60e63a25c0abed2ddd44aba1770be4aab1db8cac81e5f624576f2127c5d825a1824e1c7a49df4f16445526b -metastore-checksum = b2a5fdde9de5ddc1e6c368d5da1b2e97e4fdbaa138a7be281ccb40a81dd4a9bb1849d36b2d5d3f01205079bace60441f82a7002097ff3a7037340a35b0f1574a -mpc-checksum = 72d657958b07c7812dc9c7cbae093118ce0e454c68a585bfb0e2fa559f1bf7c5f49b93906f580ab3f1073e5b595d23c6494d4d76b765d16dde857a18dd239628 -mpfr-checksum = d583555d08863bf36c89b289ae26bae353d9a31f08ee3894520992d2c26e5683c4c9c193d7ad139632f71c0a476d85ea76182702a98bf08dde7b6f65a54f8b88 -nano-checksum = d101e7f4802c079254e79340b433749dcd699fa9adec3f96e4218ec12f066a1f6b0954c27254bb6f019bc370ee2116817717870f4e2bc782c552442f2cc75195 -ncurses-checksum = 4c1333dcc30e858e8a9525d4b9aefb60000cfc727bc4a1062bace06ffc4639ad9f6e54f6bdda0e3a0e5ea14de995f96b52b3327d9ec633608792c99a1e8d840d -openssl-checksum = 1523985ba90f38aa91aa6c2d57652f4e243cb2a095ce6336bf34b39b5a9b5b876804299a6825c758b65990e57948da532cca761aa12b10958c97478d04dd6d34 -patchelf-checksum = 39745662651cf0a9915685b2767a611ceab4286f8fa57eace342b3f44248431616e8563d4ac6709c97d8534229c73c05470239e462b7e74b36bf629a876dfbad -perl-checksum = b00f3482f6961be043349a09445227e99472a8ae804919bfa0641de5bbd683249deb607f3b5c6c44ccfcf916408eac125132f9660191d574de0a9d1f17892bc1 -pkgconfig-checksum = 4861ec6428fead416f5cbbbb0bbad10b9152967e481d4b0ff2eb396a9f297f552984c9bb72f6864a37dcd8fca1d9ccceda3ef18d8f121938dbe4fdf2b870fe75 -readline-checksum = 41759d27bc3a258fefd7f4ff3277fa6ab9c21abb7b160e1a75aa8eba547bd90b288514e76264bd94fb0172da8a4faa54aab2c07b68a0356918ecf7f1969e866f -sed-checksum = 7de25d9bc2981c63321c2223f3fbcab61d7b0df4fcf7d4394b72400b91993e1288d8bf53948ed5fffcf5a98c75265726a68ad4fb98e1d571bf768603a108c1c8 -tar-checksum = 4be18afeac54aec4af074cf2358cfade5aaebe2041c5075c5764a81114df4d002e90b28f4444bd1430783e7d6bed82abd0440ef5cb244695f2e56a9a41b42fbc -texinfo-checksum = da55a0d0a760914386393c5e8e864540265d8550dc576f784781a6d72501918e8afce716ff343e5c2a0ce09cf921bfaf0a48ecb49f6182a7d10e920ae3ea17e7 -unzip-checksum = 0694e403ebc57b37218e00ec1a406cae5cc9c5b52b6798e0d4590840b6cdbf9ddc0d9471f67af783e960f8fa2e620394d51384257dca23d06bcd90224a80ce5d -valgrind-checksum = 5695d1355226fb63b0c80809ed43bb077b6eed4d427792d9d7ed944c38b557a84fe3c783517b921e32f161228e10e4625bea0550faa4685872bb4454450cfa7f -wget-checksum = 95fb064f0d79b0a3178a83322f58a85a3a036fb300ed759eb67a538f0bbacdd552f6cbeb60d63b4f0113e8467d923a5ce7ac5570b7a4ce1733b3dfd559bb33b2 -which-checksum = d2f04a5c5291f2d7d1226982da7cf999d36cfe24d3f7bda145508efcfb359511251d3c68b860c0ddcedd66b15a0587b648a35ab6d1f173707565305c506dfc61 -xz-checksum = 7443674247deda2935220fbc4dfc7665e5bb5a260be8ad858c8bd7d7b9f0f868f04ea45e62eb17c0a5e6a2de7c7500ad2d201e2d668c48ca29bd9eea5a73a3ce -zip-checksum = c1c3d62bf1426476c0f9919b568013d6d7b03514912035f09ee283226d94c978791ad2af5310021e96c4c2bf320bfc9d0b8f4045c48e4667e034d98197e1a9b3 -zlib-checksum = 73fd3fff4adeccd4894084c15ddac89890cd10ef105dd5e1835e1e9bbb6a49ff229713bd197d203edfa17c2727700fce65a2a235f07568212d820dca88b528ae +bash-checksum = 52fa7d14e5f05d7b1b5f64ca110388db119fd6b8d4cc25011e0e25848491507e610259091c56f3ca7a7688ee2547f8a98de2005626b35c29558e71f28e29b848 +binutils-checksum = efbc5957bd22804a860261886598778347e82f9cbc0728d21d26748b7296e34328fdc16252868169594a001a301babbcd0964d9c508523736bc79cac2aff8efc +bzip2-checksum = 929cb1b2d0db8a505e2bafe3ce2c893c8f132dd617c0110e86e19f116b570a85de3c8a635483b530456fc8f8b6698c8190d21313e47607f442807b10489ac86e +certpem-checksum = d1198511bf1f61a624691c182ecdd57ff468030a4af8b0f0ef248a5ce1f42ed883c684fc5aafdd791d5d3aa24504fbfbab11043231594b1c4e21fdb7247e5a33 +coreutils-checksum = 2afaee98b305f000ab1c9e25b5fec6413c3e385c685f2afe94e3676593c09efca39d007cfda2b3b122e68e94204d2ebe7f50b39300899d8720518d6f74bce019 +curl-checksum = f391f696d725ace63271fe614ab3067b8d55d04be3a160c70bbca388de93670e5fb986dd5dd45e1b62c0327b2b1374ab229bdb5c9df746f70823e0721c9f048d +dash-checksum = f21a2ac71606a5ae8dbda5639d111f385cc44c357c580a83e453b33faa32c92fd83f8ca6c5d2cf720343f8ab1cfec247ca499596f78ee19f62d67cd1d7ae0c69 +diffutils-checksum = 52582c860a4ce544f0c1a3e61bd9a1ce5f13e1c677b67d3713982439760e40b60267b222de3ef0a27065652822d9880a206899528d7ff8d60c7e64b64721b3e1 +file-checksum = b843b3c25656e8dec52e64eed6f581b29faf36540e0604a803d61c0f0eca830a01a947b81970b0d8a24a1336a37ae5c96bb2ade59daa16c544b1e82fc7db24e8 +findutils-checksum = 826c643e7f5c5d6976a47eabcd9807e51350d09ee8fc7dc931f2d9276f938f65aa0bd97e6213aa979742234784c120e1a6850a52207c327e1c1a465feb374053 +flock-checksum = f711815035e21b46572bf80e730a55822e5abf4cb29749e476ee6cf4d5027e9a7deeacf5f6b8c37f18f17a0cc7a6d98fb0be3936e97b122707f1cb2306d1e1d9 +gawk-checksum = 13cb59a4ef43e5b0d10f13263dcf5ccd72d9344f3ecc512589ed7f6d059baf8a5ae375d38f7654695e29f0694fc33deba3c7c008d61f09e67df57bf81e573f0c +gcc-checksum = 40f239730fc0165a0ef48f252a886e9cbe7a95ed53fe2c7a16531d7f3d6faabecb2d868cc895a25f7561e7bb90892e3372652953efd8ac35359a6ac2e18a4447 +gettext-checksum = c53f3a09cbb62a3ba8870f1310da336c084bfc83534d7e9c236f7660f503d638654a91301558f1e4880ead2d629dcd92f7c3dd7e0d981d445b74215d836d19ea +git-checksum = 6e7ac2188cf3204fceeebffc46f34a8180e53a887987a8e1b9d6c90f84073e4b86d45f39488f69432b1cd0962cd8fbd434c07128fc2e430cd2ec20f1a1fc8b61 +gmp-checksum = ad65de00ecb46cf454ed6c40d2a57ce2528f5fa64df1284dfa15036f1e8cf27760a09a4ecdfcc39048faffb71339bba30d99dd365c54173dbc2ba629bee2fad9 +grep-checksum = b1ad850671290120ca0ced6d1744622d99a903c6e638dd231e89a70a2ab48c382100e218697b6fbbbda8c99316679a7c681383a5e954790bbffb9719bd0d6d8e +gzip-checksum = 19ef6f51ee8274c08aa188ebd26634a40f91c057ff1ad3253719a7fa0c7ac857de5cf289990e0b9fc67d89ca3abb50db6dbaa7664d2c58275c844712834d5e10 +isl-checksum = 615827a86e809c9645e090786008f8b37d7efc7fee6501ff81e937a299a5a72e7998a3965457d1380cb04f7e765b600276d87b7b59cae22d82aa65e71294f499 +less-checksum = 3b52347e1b779a52c89cb18da85a0963eed500e1e94cd3952c0693c56fd0eec4e8606eb02053d451ac92ec8c6e196edd1341eac54515ed84483112513f41f210 +libiconv-checksum = 1f33fcf7f617990812a26e15092dbb2f9e81400cee117eefd40ade813f7ca17c0b9a50a7a9881b4ab0b9c0be8475528204c347d940cbc667a918570e0bde051a +libtool-checksum = 2aa6d535b4d644393d7ca5c8f174b09923dbdb50d947ba40917aad75c8cecf957fec7a54717bd3670c44130331b1f08fae3694e32c79c8e187c31e909b3d401b +libunistring-checksum = 6ca6a2dea2c09b6e8e63eb7ff73ab62b2e2f7c412766209c9f6da5b13a109e3a7cb41e67aaee1fb2b46549965b7df13c508000e40594570abe71819dc9cc2eaf +libxml2-checksum = 7c2c65ae5017be5d695ec5a0e9bf443fe130d33beec97f31bcdbe74c22ac7745ce02524ca603e701035576bdca91c4cb1cbe67ccad5525f9a4566451a8f0c935 +lzip-checksum = 513b7ecdee1d6f12298cbbb025286c887c5e6588e1268b489735955f822a8e305e7a8d4d06054a77b2f4c40f8d82e9e6d779cd73d4227ca3cdfe4002b9e42a98 +m4-checksum = 7f8845f99e64d6a45859b9d80b03352a5526b3de0311ca4d6dd6850e504d26dfc90cd21d1640b10382f786213f8fdf20183bff424b3c41ea11432315993ab829 +make-checksum = 154a0247297a7b0131ff63fa8636d651b33aacc8f4a0dad1db176995b20c2039f4fd58eeb2ec27cc0ca7a95c1853199a4ee35a14afc5084995ecc1d694203412 +mpc-checksum = 76e0720e6287d8b7a3eaa09dc5baa5a2b61dde88198c9e1bc9c458a268a44035d11fbab1c8019501a7ee2d5745c7eddfb0bb3c7297110a0fd9e027acb42d0fc3 +mpfr-checksum = c6013b0f573271a3bc41bc3eb28c61f099c6fc5409096a27624b5ec049e6305a1c48275c6f244d0cbe1fe65f649704c7d5f6487ad24308f6b3becc17032fc222 +nano-checksum = e559c09057ff75cd650f02744dff801750a159785234189e48f976bb4ab90142aca2577a283f80d8eecdc4f2b6c0a55bfe6da69e6aaecc5812f32f923075ca9c +ncurses-checksum = c94eccc1b23a9c6ca9b27881674b19802942802cf21084bd80e45e0b9e50a2d99e0e18a1ecd5cf3e686949982ca93132a7fe2f117a7c4307283db5012b11a2d5 +openssl-checksum = c4c44ab8e4a6d39e6aa69b096a831645c33d5675619d31da6e441452f67e487489b33041a34038a0f8ad596506e17ce8695a8f87bae861700481ecf6b9742b76 +patchelf-checksum = f74409c00e7e50a88590267fa0c173f71239f1471985792f7bcdab750d202a9a25383c015987608bbcd6ace5b3642645d46dba63199ba54fabbcb51ae7fca8f6 +perl-checksum = d53da403f2232b487f11df4b0b889babf58350715c7430515ba32e2e7d9996308c5d75ee626906a9f3c56872672c9cfe3880e10c7bb084c178c7c76bb2a5346f +pkgconfig-checksum = c37ab9336ac15a73bf5a23101f3fd8a04810f39b0679634e420e5d7bbd60a80cfc919122694cf83965beb071d2780681efc8d954ce99d77eb235a4dc7497e250 +podlators-checksum = 2e1e8547b0c890101a9fb8d80807b164a0a708820617a6826127930f85115285a0e4d62ddf8ce2401df14c021521a1b3ce0cad0d308e08f7b3d0c49d1b0dd36f +readline-checksum = ca08305c5a6d5eb6656a7cdff73aab65c1234de62db8da90335e9ea140b1d4f8a867204bdc49ead3acc5e5236358130e73032bd73de5dfe1eee5bb2c76be5cb2 +sed-checksum = e801dacce45a08671e65897f85be0a37a29d97f61e846ca17ff870d3ab9f5a639dba73457491087ff637df4a81099b864d2496d59acf17a07916b43bfe0ae480 +tar-checksum = f8d0b543dc4a2a57d61d8a183d7a64d611d11f033c64683f022a7f1f956d352d364a959903a2549ab1256c1a068de51052960cf34a70f355b80e237f654bfac7 +texinfo-checksum = 06e7d95d73380c16f064e119bd8717942fb464c1cbc532fb1833d2d5dfc3cbbcab2a9001a1d03721c347f676e1db22c2a0f78d19d22df379d0393527be411ef3 +unzip-checksum = 5c1f3c417d5feed64b8c5dbc26b51dd84130b9ea43d77e810cc9e82cee6e965fa76e2636e5ba11a029eae3454a815a6081cc2828079fa3994c511d555b82f12c +valgrind-checksum = a99e09e6d957ce435e64f4ce7b1a14e7e266282578171ce4e3bb3f405ce304e4df3b43d2fca59a6024c176f60c2ac82b3992afc810386e8de03c72f262b32702 +wget-checksum = 75ba60150baf673def8ff5f2688af57d89d699f1b5105cb1a6bc0ce88778be191c74757665d773ac84fdab975851ca9015b30f12f31877fd752500efb49e0034 +which-checksum = cbda59450d35c2fa04f9072db43584ff745844cdb129bce1a382eb50bcd19815f0bbf12f2b03f748cb3f585acf0a16eb2e661b71f9066469236d28e9fa40f168 +xz-checksum = d897a55ae5ee7d778c68edff83c53e6624619f233acd529439fade305681d07b7665ba1d370cf84abb38c7f3f8f88c3d127974c4f59667843814211802501080 +zip-checksum = 433eda6a27074746a960952fa3b08028d5ba43fe976a1306dbace9209be5f89aa0554b17bba5815cd00984c0f64559c5fa0b754b620f6d646a2b145a8b599acc +zlib-checksum = 8f0b28c7d0d5d1906c2c03586f4ad3188a7c7eb73d25295a7acaa88cdf87b7c82ac609282b9b12d2b4d5a368169e4bb5f88ba3b840b57e1cf32c78f2ee3ecd86 @@ -71,150 +71,222 @@ zlib-checksum = 73fd3fff4adeccd4894084c15ddac89890cd10ef105dd5e1835e1e9bbb6a49ff # ------------------------------------------ # # These are programs and libraries that are optional, The ones in -# `reproduce/software/config/TARGETS.conf' will be built as part of a -# project. To specify a software there, just remove the `-checksum' suffix +# 'reproduce/software/config/TARGETS.conf' will be built as part of a +# project. To specify a software there, just remove the '-checksum' suffix # from the list below. -apachelog4cxx-checksum = aa59ce549c2c5cbeec031361dfce09cdfc3e62ee3bc9ecbc809507b7ec878c14409b98536b7d13c27690809c8e9d5ebafc3589c9fb5e4aecd5cc064943ae7d6b -apr-checksum = daa140c83c7e2c45c3980d9dc81d34fa662bebd050653562c39572d0ddf2eaedb71767c518a59d77f59db9b32e00221ef48b9f72ec3666c4521dd511969f3706 -apr-util-checksum = 84da76e9b64da2de0996d4d6f3ab3f23db3724eb6352d218e0e8196bcc0b0a5d4fe791f41b4cc350ce3d04cce3bb3cf8bfb513d777d0cd030928368e6b55a536 -astrometrynet-checksum = 1ee3a3490fb687dc580e660a05ec21dc65972e6c1c97b9a5f648b9e4ac39bbb6b783b351d9b3fd8a4721ce01bb891166c18809f24144f6fc0d436955f0bc435e -atlas-checksum = bf17306f09f2aa973cb776e2c9eacfb2409ad4d95d19802e1c4e0597d0a099fccdb5eaafe273c2682a41e41a3c6fabc8bbba4ce03180cffea40ede5df1d1f56e -autoconf-checksum = c25e834251bfc2befe822614caf1c80d7e1314a83e7173304abc235fd15a958b8db9fbc801e8ad98328dfd6d9dbc425bfbbefec500fa268992ae7bbf4fa5bc35 -automake-checksum = 8bd5b1d698eeb1d969ca0a32184ad46014b3ec334e3b8a0caf0e462ea5e100fe4ccaa7dcc2224c5db6c87e215d594593db0bf4431649186afb821b0a711c1a82 -bison-checksum = 8763e7b08fbcdeea3200fcb8d65a2589a12b17988d088ace58d19ea09e44d32e73a34ab1d6c4a2106e7147149997aaeabaada22b07159a5a0055cf60a9db162b -boost-checksum = 2844dddd3357e76909f2f3d008f686949f8b54aed29e1c650e18becd0b50701a273bb3754f1a4e56c0c056693b27c354c9ba54ddbec92599e192c2f6736fe3be -cairo-checksum = 9eb27c4cf01c0b8b56f2e15e651f6d4e52c99d0005875546405b64f1132aed12fbf84727273f493d84056a13105e065009d89e94a8bfaf2be2649e232b82377f -cdsclient-checksum = 2d7abf0079189b9dd19cb8919061445fd19ea9f7dfd54e8ceee26b743218cf62ab00eba0147abe82d9294223927f04b4cc3328620dfc9184a7049f8d515b29e4 -cfitsio-checksum = 08a13931726b0ee15bd4e2ad6dd4debb8268f3b0bc33adadec5c6a29295dd536bcccb3cc949721c6cebac6f43b6118e5e38332ac0ba8a07a43553416d8debae5 -cmake-checksum = e0591d5fb234f3e7b74d6d2aad44fbf3e19e69547bd428681ba6ad0461d4f3d2a154605808b4733531d2c66f0e91eb39a179ae0d89a37e92a3f20e9cae691468 -eigen-checksum = 34cf600914cce719d61511577ef9cd26fbdcb7a6fad1d0ab8396f98b887fac6a5577d3967e84a8f56225cc50de38f3b91f34f447d14312028383e32b34ea1972 -emacs-checksum = dfb26531d2c19cf9fb56505f03d799654b45e5f9528e777900e8280ed2c1d21e04c52f510528e31e015977c471ae63164cedee6174b7439ebcf479a21fc18064 -expat-checksum = 514ff2ef3c93af0b1715b7a08732db33c13a113c4c72422716a22ee26c09235deed71ec55510cee24c33bcd6b2347602bd71ce70a432d5583fb63765ff9e0e09 -fftw-checksum = ab918b742a7c7dcb56390a0a0014f517a6dff9a2e4b4591060deeb2c652bf3c6868aa74559a422a276b853289b4b701bdcbd3d4d8c08943acf29167a7be81a38 -flex-checksum = b4ef58d4a1d66b213e2f59df06959decf46d26b253cdc3f51cd26e2e2b505461ef23dafa974dd2005b1f0cafa5a83fe9258baf78004b2fdae6dfc299bc17bfd1 -freetype-checksum = cbb1b6bb7f99f6ecb473ce6027ec5f2868af939f793dd7b083b23e9823e18c4bcbac0b92483ebe70804ad7f4ef5bf4ea5c6b476e7f631a3e6a1b3e904a41e1a5 -gdb-checksum = 0ccd3a2a24963c848976848ba890f99a458569ff10da4d2bb6dd4782852662ab2df7fb9b3525dd4fd04f95070cea07e1bae6dbe2969e321cbb4004a033845d1c -ghostscript-checksum = 32fb2a3d4e81ac9e281202aaed2f7811e80c939cbce3ffef7ec7cf78213e5da8a2f6c13d15f0c6c8fd24566579ba8b69364d4c66f4e4b7851f6df9209d0ff046 -ghostscript-fonts-gnu-checksum = 222cb81e6956d9724e746f5f4d5c7b47b04bfd75b889f437f946c29a5bf16b5ed57b7584cf7fb0d8561287ef9f2e1ac53e4d231b6eaf23cba2612b47e8a5f919 -ghostscript-fonts-std-checksum = d4fe6b097a3bdd9694dc704a2d986e22d649fbb7ca8302f872dff573525596a3f38246bd6dd166a5970a4fbca8cce6ebfb9a7030806b4662630afdf243438e1f -gnuastro-checksum = e236814001c1740355cfb1490926c667d14c026c8e93249afb25c1fc934a8ef8c52178276b7ab5cdbeb819117f962a4c7f65df74f0fc35bf512388cbe3b26d39 -gperf-checksum = 855ebce5ff36753238a44f14c95be7afdc3990b085960345ca2caf1a2db884f7db74d406ce9eec2f4a52abb8a063d4ed000a36b317c9a353ef4e25e2cca9a3f4 -gsl-checksum = 0be8240715f0b86aba2c63d9f12da4dba4719d4e350e9308d279e0dd3b2f0519ea26fd2e38a17f3e8cf43aacbaa2455207a7ca0d6c305f3b8725e8ece2250a74 -hdf5-checksum = f828ee9d63533effe1ad358230e5ce7b64c5016e49291d9533575f713cbfba496290fc0151fd9617898bdf36785984ddb38a9207f529d7702d4e23838fe050d8 -healpix-checksum = 29fe680d757bd94651bf029654257cb67286643aad510df4c2f0b06245174411376ec1beca64feebfac14a6fc0194525170635842916d79dcaddeddd9ac6f6c7 -help2man-checksum = 786a6bd4336c591cfeb0b4f2dc1429f6545e36514e7b238453c91368b8f531c46db2be025f02dc52e6dd8b971d6edbb4ff1a8e1b519f9253a3957ad7157790be -imagemagick-checksum = ad4325df57769f9c4edf8ac71370cb9bb19e090e588d47eb0311e3f4895abd7a7edcbd2e7a495f21acd1daca97fa224bdf1fd978577588e45c11a7799c3d67f4 +apachelog4cxx-checksum = 2c4b907a47ae00c38ae9bcd5d215be5115bfb677646cac50538eb1c75824f594aa997dafc9a7828dc6bfd5f22f4c486f13e4210b9a1d8a9c98098d96384ef9c3 +apr-checksum = d0ddf8b10dd8d3a831b94e541d387414cc4d507ad48ff752d3274fb808afe6628d7951364aa3e0f71f373c78d0081411f6a0595d01c62bfb0a54bbc86002c82a +apr-util-checksum = 6589948e3f9daf4ecd700bdbd2053fcb83005cec9d339278dda067996ba696e4a947116f066cee03214458aa15e0cb6f0df0103e7bc6f8f32327722eb7265f35 +astrometrynet-checksum = 33bf92ec1d5aad50525739f5afb3abd2abe27c8b0cce35a6923831b7b7c35930cb6f14425df47c60dc5561cd27b4af55ea6ccdabaa2d444c81a6c86c7ef9629c +atlas-checksum = 13634a63c686800bef8affcf4e8f5f89d08f485fc557eb1b9ba51a405926014e659e439268f655984bb7f59ffe497d594cd7a607103826f1046325d89796e1b0 +autoconf-checksum = 99edec992950b85f7709323247772d1ec0f6ebfd2b066d83af65e856e8ab2facfab257f43d316fc5a8905fec0ce3c24768afd2873a1b85c795c8c120ae5f9277 +automake-checksum = f4b6485c7358554860b370d587bb5cd6c6c1050529eceb2bb0a0aecd408347405b3d1aa2c63e4a0aa38dc394178ea1e05599f1b5b6951c0e3d8f2f85639ffc71 +bison-checksum = 08c3e9bcf9fc01bc8b3a6c5e5f8ecdf628e07d6ce0874341e9df6d7b2925db7720a29b3be9a98d644f05a9e55e1130e5d182cb764e1481891414df93aeb2794f +boost-checksum = df252f6aabd9b1d5421afc478fa288e947a7c5e11f26e54f0a8619c9ac5e16688c20f44106a429ae798b20a4c97cbf800ee0ac9b2aa7febe1bd2755c88802ac1 +cairo-checksum = 5de8528196b450d01da5cd4562dec0dd8c31028b77640b6c611172f7c5ad1dc9a4d30bd4e38259344c3ee42b979e29be702a3190aae5fbf47c82c79ee0c14a8e +cdsclient-checksum = 24584eedeb84ab4666cbd2ed0b7264c92c63c9cf090595b75b01dd46f848419d6a7e5cff4db3946695f879d0ee2bfbeb527e88c2be85e769b577302d9b2ad2b7 +cfitsio-checksum = fd40e0cec22bf3a35a7c48ad61a6ebad5a54d3230606c7492161c067b7ccd303ed34d149427bf1d3365e7a3082d89b51df46fc8a9c3d3c0b1eef8756374d711e +cmake-checksum = 3904964eeb256ae4443d000a24ebdbfd76c677aec08d006f8665020f1ebf2008c65b6a1cffd6fb5b8a29cdb52420415ed2e0e2ea258db2e9888dc48068fc3941 +eigen-checksum = 39c1944e0daca50bb01e929edc98e2bfc234accb30ca019dcccfc7b02db5cc81035592be219ae93cc3ec16fe0255e7e4d1f29ca2ad3dfee7e63546c47cb2f807 +emacs-checksum = 91313dced8cd74f9e4a996eef42806aad00fb2576fc77b3694bb7b670624b6a1551ff49c2a3326dbeeba916e5732ca5975b5dc40b3b063f6e73a22839009abe9 +expat-checksum = c006dff4ee90ee3722df94ef227cfaab3ee4ebf7ee8193a65b904798b4d185e89c863c75a1e8eae9c120bfbf79336c1e31a8b6aa3042f4131b9a1eb0e97c6643 +fftw-checksum = 28bd2f620399a415181027d30d7ea193aa487c7a277c9943d0051488908fd87e2731de0dfc3bebd22a6121d1deaa46037be8296a8a9cdb711f9fde4510c3d368 +flex-checksum = a18e0dcdd21bd51779fd70e5efc2d77fa7b2a56d4d0e929d8abb437f86d7fcfa2cd09b8268185698200866dac8bec4444c5a7cff3140c515eaa49fc77c5aea66 +freetype-checksum = fb2d89421633a56d4276b192106d930c9f29cf2b70c52d9e13ba0c6a2679b6f69c20ab9f14480522992c263b493f198f24593bdeddb5806a4d801ab7604a5926 +gdb-checksum = 4afd6660d2bbc4c48ce726062fb42217bdaa5974774407358b2624036a2ace48ed853746e4d6b83b9a409cd8fefc622b0727c79da8826e28e3d1444fa3a9d148 +ghostscript-checksum = 2d6422d434cbe6612d4e995022322e17d0167d0d92fe1f6fe122da73558a81fa43025458411e38363a0e30a573cbc6cf997ae7aa562501787d0af88bf1b5b387 +ghostscript-fonts-gnu-checksum = d8de17e5d920bd3803ecdc07aca224fa5b9a26db847ddae0ecc79488dc3e867ab3155a8565f20df7855e5228bc7f36a549b082ba0b8b56c9b948d6cfb655fdc0 +ghostscript-fonts-std-checksum = 40e01f88dd113c3120686c11da1c4cf4ee77c8db315b21cc936c3ccfb42cad4949e207298ec6d78d03327ff05122fcd75ac82d5e7cdc9698d4bbf1f72b19756a +gnuastro-checksum = d27399993ab65d1864c915fe107efb298c758e1f24e3f300830f8f120768dd3a54daea1d2af5537e9dd3b50a13a51edd237f434bfbb442ab992828b2dab1386a +gperf-checksum = 854cdb24337f14b0d4199cc97c4a1d2ec7f953ad8125a47a932d93f79bdc067e2de231859ac0e842f14b1a8d80c5606fdc2c295a370df5e85ea15023b478805b +gsl-checksum = 9a46b431eed536182f31e1ecad781ade214ac8bd885ac88bbfd02b6e08482c294775a529e561d358c1f0211d8d20db4ec99b448b9006c3bf7599c076d3fbbcc3 +hdf5-checksum = 58d6d9a1c201efa0863d9c31d189b4d40b1a9b1fb1ab9cb3fdeb4463f508c043e467aa4d44484c2af111d4e2fb775223286da69568774e6edef285208290c67b +healpix-checksum = c11949e92aae9919fd41de91cbab72beffe2519c1203ab153ba217c6b6c81f2c10ceb07b683da0d04fb53ef5c80a9b208bdbb5379f379b7da9d0611d2430cc8e +help2man-checksum = 83dca38c2020c85a66da882cd994b4e291eb6a0584149b7b3a74fec1444399ec5ecfc6296080fbc34071cd9f8ec5c931e249fde1f755c7f40930cecafb3efab0 +icu-checksum = ab14b52a3fdf2dcde6b5160ab7218eac381b850d3c278324379741c49d71fa6040fbacca94c6937e6c9fc15843761121deff302ca6854da5ca1cd5b26a34e839 +imagemagick-checksum = 2132614540b7422c9772fcebe7e8e358994efcfb53d8e48fa52992313b09b191847e395bad305322c377a4697014353bb8c15adc4edfd712e038504fc7f17c5e imfit-checksum = 15edd2349232c1c8e611b31d3a46b0700112d274515f54d0a0085bb4bfa6d3d5f8a15cd926516e043a29ce841accf3534ae58dbfb952d858dc9445199c957096 -lapack-checksum = 17786cb7306fccdc9b4a242de7f64fc261ebe6a10b6ec55f519deb4cb673cb137e8742aa5698fd2dc52f1cd56d3bd116af3f593a01dcf6770c4dcc86c50b2a7f -libffi-checksum = 980ca30a8d76f963fca722432b1fe5af77d7a4e4d2eac5144fbc5374d4c596609a293440573f4294207e1bdd9fda80ad1e1cafb2ffb543df5a275bc3bd546483 -libidn-checksum = 0d66e10bf7a8de4b27f692a427d2c8e901b8bed73b0a36268d8f939205df81f6a30f0634fd3b87370d4e81c1327c0b10391fa122a0a5459c32a3541b8a2149ad -libgit2-checksum = 7c307822b22e3771e5e908b115600310f7901b3250287532c498003b25a5b1e007bfa23592f16ec4d83c1567a9213710526f78cab7c120316e9a8fc74c5e57a9 -libjpeg-checksum = 74ea5af3545657d4ac03f8f7933913112cc2d982f0e379d0e5647f1acac21931468e53806297c30ebe180c7bcf84919a0ac20a4195afb03db03060d57904ef6c +lapack-checksum = ff670e194a1d8c998f05e6143e01a09e6b43176c511217ea3c77742afd9f2566251c50fc23aeb916442401f7118c1d1fe21f0172382a7f4f2c516c1d7d873e24 +libbsd-checksum = 5c7d98474000af1271a36ab769e54aba41578e0b0f06e47af2986d6821b6586ac430ec04cc51b7836823834dd9d0aec9f4ab3af088b94f963b89729fa2cc95d8 +libffi-checksum = 027416da7066bd36ac0c9ed8228693e5eeeb5e11dc17afcd7ed7faa66a9e57f505e94dffaf18f8978e6b40964938d4289aa3b5fe8778abe8fb97a68138b8b120 +libgit2-checksum = dd8a2acb214a2f6ba05a0a51a05fa7c04e09f482fe166a6d74f072c8335f23a1bbd3358395c3fa6cbb0811369940be26e3463f9c8fe48d70fce062b69ac996b2 +libidn-checksum = 0ffb80ca195ef0b1b4aec7bb45499d64fbf6b45eb0f68c4ea83193148dc3e761a0098d996722c2ac6e54f18d25057fa8c54b55a9d15593924eb733a30cf8137c +libjpeg-checksum = 312b0abf986dc937d57dad31b49472258cb6175f9dbc8796a67ed0971a4410bba40d87fae136e42677521dc14df376771982f9acdccf5143ac7edc9d2e91a34d +libmd-checksum = a598b61dcbd7c3daf25a4affccfcdb2f6d58a0d2057b4f24e8fb92cddbeb0d62f1f5e3dd55195dd83be405d187184a331ea080b936b6c336ad4271be03b0beb0 libnsl-checksum = a3c8f674357674b7ed4b26c05adde607f39be8d6dc9ff715448e1fcc5fc23d11fbb4ce85a6e493b79bdb0bb450dc3ffb1fb480715779f738d7bc016fae91621d -libpaper-checksum = 3bf6ebb0af89931d2f72ea4a09a7fa958b2facda5f238983ec7bac39652e08614b33f0de3af74a03457b2a4203eee4950bf18a4b726e79aa64093ace6a1fb0bc -libpng-checksum = 59e8c1059013497ae616a14c3abbe239322d3873c6ded0912403fc62fb260561768230b6ab997e2cccc3b868c09f539fd13635616b9fa0dd6279a3f63ec7e074 -libtiff-checksum = d213e5db09fd56b8977b187c5a756f60d6e3e998be172550c2892dbdb4b2a8e8c750202bc863fe27d0d1c577ab9de1710d15e9f6ed665aadbfd857525a81eea8 +libpaper-checksum = 8532e5e95b53e0dcb2b2c4c161d3840a34087b5870d449950a853dc312a4016fb1443851385ddfaf4196ad3b2d431e0d861efe82e1a0442392c435c4455acd76 +libpng-checksum = c4eff9a427302c6f228b93a5a6b74d1e667439ce77f20c086d91ec6efa932a0e7f5752b976f4af5cd07574c5e4999c86e2b9cae3e82cc448ee61f3d0dde9253d +libtiff-checksum = 72ce12ce317ab2a5c188d3aeb575004eacf890dcb2729e9cb46f73c15cde2132624b4e229539af01626e59ea6a4a7380b4b5f90d5fecad787197b8b901615d20 libtirpc-checksum = bcb6b5c062c1301aa1246ec93ae0a5c1d221b8421126d020863517cb814b43ed038fb6c0c2faf4e68ff133b69abefe4f4d42bfc870671da6c27ca941a30b155a +metastore-checksum = ac1230686535a652e95024abaf6c5585bdab36f4e092bee5fa4deff2a913cd60f3a6bd8020c6887ccab97f0da3a284a0d4619fad5464a269e7b2040d6f7e6aaf missfits-checksum = 32727f5eb30573a1cedacb8900e2536867e4815059eee32e64e3db65be9291b8a91b9f45b2c9f3cf6fc2a8cc448012ea3d502bdd9dee516008e17d5086aee795 -netpbm-checksum = 064720f8a9d0a502488e1af4daecdbf3936910996507ca6f311073a0ad842346692a148eb1ddf7b717f7b108f60500246cb4b83f4d3665f5fc285a84ae1d63d6 -openblas-checksum = 64a5f983b2f6e02cdb6e0f14433498cc5daa1ccfb49246f7a2dcd38f9982fa608f2abea069fe0e35012af8c1441c43d1f6418eaccd40795f5002fed1c36ce05d -openmpi-checksum = aea02a66dc67820c86172fae80f5aa4c71e918525abbfaf06df8ba6898ff78ec3b0ef9638952b2c1d1a9c878fc0999d6f6d06e840679a68779d6b81f0336eb76 +netpbm-checksum = aef81d2e46850fae1fb34a38fa9f634be3c47c7c4e80f300c61aadd5532bcbccdec636e6c701e54caca67ac2d6ce3a7b6a9f090deb00cd4db718439986f89d8e +ninjabuild-checksum = 3a6bfeef3ff1143d427a658d2d4119e4c9d85ebba37393760dd7a6dcce1c4a9e217009b6600b5a72338edb34680f0c45dc07b6b09afe1b0a9eb163993f2bc41a +openblas-checksum = c87485bb2b74cee175a6e631c63cb52fab174a2275d375c00680c4cbcd8d12a116223fda0349fc677cc9fe49794ce7b0342740855840df491b6141c8f2f4784a +openmpi-checksum = 88c73fd708dd5fe3a893d5517425a8a4c3e252fae4715df9a9b0f2311abe94cf5f71be92f153820fbaeb32c7d0ac0175d5165bdadcadc731427309102182c1bd openssh-checksum = e280fa2d56f550efd37c5d2477670326261aa8b94d991f9eb17aad90e0c6c9c939efa90fe87d33260d0f709485cb05c379f0fd1bd44fc0d5190298b6398c9982 patch-checksum = 75d4e1544484da12185418cd4a1571994398140a91ac606fa08dd067004187dad77d1413f0eb3319b3fe4df076714615c98b29df06af052bb65960fa8b0c86bf pcre-checksum = abac4c4f9df9e61d7d7761a9c50843882611752e1df0842a54318f358c28f5953025eba2d78997d21ee690756b56cc9f1c04a5ed591dd60654cc78ba16d9ecfb -pixman-checksum = 1b0205dbe9d9185c68813ce577a889f3c83e83fbd9955c3a72d411c3b476e6be93fc246b5b6ef4ee17e2bb8eb6fb5559e01dff7feb6a6c4c6314f980e960d690 -python-checksum = 392459354b8438f76670ec2086b4618ee21f615ca4b22e5dbb366273665a470de5047a3f241925a62b60bf3cddde5984a05144a55d0c5ced43342f5b94576952 -R-checksum = b7330613ee9795f54cde3dd9f7509be83d9156fb8577c17179727ee01450db27704249f68bd48e0331e2df09c2d9833d8bb019c4f9ce9ba669df74650ff2e842 +pixman-checksum = 333732b99994f7ea636d647e0b6123075351b27601b5b6370c9bc821a1ab3921386ddb92b51e015f3fc35104ba09be1e0d7bef47f0b4f73036b01d1d70396dd9 +plplot-checksum = 2c5a36d84ebd948402c924d304427b5702bc75bdd22753c9b3b910b382dfdcc26910382aae452f02d86b25c1c813bbe1f40c9305de1d2809d575975f610fdf10 +python-checksum = de940d7eceee69b8a04f12c613753f61db53ddac1c5cde5388dd342e88a09bae8da1f56b71b96f20997f6203d169e92e9e2558c43b38dbc323221d7bb362abfc +r-cran-checksum = 54cc07956a70c09b5a533188eb063d2a9dc67a8aa5648ec7f9c107f626220c9f6e17ab7175e65dd54a8d608a1ca4106c8ad2856709eb995ab66439b5f033e725 rpcsvc-proto-checksum = c3011d7d7ef97a4a751f6921df1a23e1dd8ac50fb0690c759d37010ed7be27968a2130e3b8872cb48d5914216f9d539096a424f1ec38a75f7ed899748151c6f4 -scamp-checksum = 35034a367d2cd09dc51e727e0f23ef6234edc0d978fd71cda1e80391d86af160138cb57281f7f7f9047e35b1246a0de6b235414086a62524413ed423f498583f +scamp-checksum = c7a192f357c6808acd16d1c5d2657c5c8c2f61e4fecb8a4b18d39b07f4c444f85d5029c21571d41ec3ba9c8d075de8bd660b0e029bf5dd63e3819e963430a938 scons-checksum = 0477038b014674049f12899b64584d44a85283d521b2422561e42020a5ae296a5af005684087c3ff410ed3fcbdcc5ff61998bc429eb29513f2a864138ffc4945 sextractor-checksum = 4035710f9b8a20a0bb1a3913dab2dadd8444c179bf6dee425e0e8bb66a772944ea189bfce89fd791d316a790fc4b2cb15a62633b19d1d5331b1803dec2e70af7 -swarp-checksum = 80f4ade59738df3d4c9b47bda04148b53c6ba995d523fa8d1e02fb5d952b6078a53cc7d273849a033505de127a4f318b95adf2bf5a2dc38e8cc9bdaf5658487a -swig-checksum = 5eaa2e06d8e4197fd02194051db1e518325dbb074a4c55a91099ad9c55193874f577764afc9029409a41bd520a95154095f26e33ef5add5c102bb2c1d98d33eb +swarp-checksum = 810af6ea0a2dfc7b78b0f6aaa486b80c29f517b90c29f5037d05f31f761e96a55c8b5dcc259e1b435b25f45f01dac45ddba61da84000a69676cce119880bb05d +swig-checksum = ecb1d940f584c23df036a8f77288916003f861851dfdf836ddc5bc45b95312f9259150084191c1d9f086c006e64e12a4db22f7ea382fbb71667b811b8f99995d tides-checksum = c3360ff0d023b43749ba09a33302ca059f017a157b3ce7cdcf4f1a1578e90d3e7fa420077043adbee6b1ebf94bd698c8d6b279012f36d2a05b4de5351e30e108 -util-linux-checksum = c95d26b9037d6b877a247e6aeb58d17aa80f7e1bd6b523a4e0fde559fe07b3d924ece6d373300fefb65d1f206b3f990aeddb3a03605040e72ce6d6ee88591021 -vim-checksum = 06ba43386fcf308520d88d7a68e9bc1fabd824b05078b8f9112500a2ad4e50a91f1a1c2925889b7c06dbce34307f12abf508e2172b05fd283f965cc06552eb6d -wcslib-checksum = 8c98c4b575056e2d966b77a4bc951256d02ecee3a11847e140fd38d93afd0f76b3e906d590c952dc9fc58ceeb1ba062b19d8e1e676ee0032f5b7ed13a9dfa892 +util-linux-checksum = 9d421976a344f982f2bd910f0c888a7df4370a1e367c7d2582d03a58cb8500f1fe65cf6a7164c492a9ebd76bff04560c344b3a0f63ecdb372bfea2379b383030 +vim-checksum = 85d083fcf3638f2c3a049b88da46d569d6a250ae132f3821d440c07c4792befffc09e4235241ca96d7b0307a1bb96cd2222ac8a63fb41c6add8c2bdc6c17aabb +wcslib-checksum = cc72804611df1b0087faed2ec19edbc17352cc631559edc8ceb03792162837cdaa6bbf144897c69a58c9fab7e569a1958149be7eaea6552d72190406e4755d12 xlsxio-checksum = 22870fda7bd4eefd5fea2a9ad7530c9049135129d9b69805091777e6b54b2fc6c3f0e69c6954f36bce54eebbfeccaf637cce9e271a593221a4296d6632470a6c yaml-checksum = dadd7d8e0d88b5ebab005e5d521d56d541580198aa497370966b98c904586e642a1cd4f3881094eb57624f218d50db77417bbfd0ffdce50340f011e35e8c4c02 -zlib-checksum = 73fd3fff4adeccd4894084c15ddac89890cd10ef105dd5e1835e1e9bbb6a49ff229713bd197d203edfa17c2727700fce65a2a235f07568212d820dca88b528ae # Xorg related packages -util-macros-checksum = 3f51504b27f0478c136126f15110cf3cdbba218c4d74a8e974cca1381c6e8364609bd0c444f2fb19aa86a7f4e848dfce4f4da940463b224036f75a60b3d88619 -xorgproto-checksum = 2d10533e54987f293e1c8578aa742c38cf2fac8551db1d317cf5ee4a6a9600eced50f59c57c38b35dd210fe02d350294c967512f04cb9876d6c0c1f7e89cb199 -libxau-checksum = 3ca454ba466a807ea28b0f715066d73dc76ad312697b121d43e4d5766215052e9b7ffb8fe3ed3e496fa3f2a13f164ac692ff85cc428e26731b679f0f06a1d562 -libxdmcp-checksum = cb1d4650f97d66e73acd2465ec7d757b9b797cce2f85e301860a44997a461837eea845ec9bd5b639ec5ca34c804f8bdd870697a5ce3f4e270b687c9ef74f25ec -xcb-proto-checksum = de66d568163b6da2be9d6c59984f3afa3acd119a781378638045fd68018665ef5c9af98f024e9962ba3eb7c7a4d85c27ba70ffafceb2324ccc6940f34de16690 -libxcb-checksum = b90a23204b0d2c29d8b115577edb01df0465e02d6a8876550fecd62375d24a5d5f872ddd5946772ddba077cadce75b12c7a6d218469dc30b5b92bc82188e8bc6 -fontconfig-checksum = f97f2a9db294fd72d416a7d76dd7db5934ade2cf76903764b09e7decc33e0e2eed1a1d35c5f1c7fd9ea39e2c7653b9e65365f0c6205e047e95e38ba5000dd100 -xtrans-checksum = 4fea89a3455c0e13321cbefa43340016dbb59bdd0dbdb5b796c1a6d2a6b1fd63cf1327b769ab426286b9c54b32ec764a50cd2b46228e4e43b841bda6b94de214 -libx11-checksum = fc18f0dc17ade1fc37402179f52e1f2b9c7b7d3a1a9590fea13046eb0c5193b4796289431cd99388eac01e8e59de77db45d2c9675d4f05ef8cf3ba6382c3dd31 -libxext-checksum = 09146397d95f80c04701be1cc0a9c580ab5a085842ac31d17dfb6d4c2e42b4253b89cba695e54444e520be359883a76ffd02f42484c9e2ba2c33a5a40c29df4a -libice-checksum = 2f1ef2c32c833c71894a08fa7e7ed53f301f6c7bd22485d71c12884d8e8b36b99f362ec886349dcc84d08edc81c8b2cea035320831d64974edeba021b433c468 -libsm-checksum = 74c42e27029db78475e62025b4711dbac5e22d2f8e8a24be98a1c31b03c0fc4afe859928f851800ea0b76854f12147900dc4f27bbfd3d8ea45daaaf24b70a903 -libxt-checksum = 06248508b6fe5dfba8ceb4518475f656162351d78136eeb5d65086d680dabe9aca7bba3c94347f9c13ef03f82dab3ac19d0952ee610bc8c51c14cee7cf65f0b1 -libpthread-stubs-checksum = 5293c847f5d0c47a6956dd85b6630866f717e51e1e9c48fa10f70aa1e8268adc778eaf92504989c5df58c0dcde656f036248993b0ea5f79d4303012bfeff3c72 +fontconfig-checksum = e71c684ffcc75015b1aa13918d6598a213b926630317e4babf876fc503e0444ed3211277080a08505cc26da033ee8e673bf5e246cb9d75d2fbe18a43160a0dad +libice-checksum = c38b5f14d8ac92c5c2507f4369a70c5e2738ee547765ea7a160fc6a0ebc217deb042bad65e16b76a1874ebd131eacc5f53e9630bded5ae1805a7c4c61ff5f357 +libpthread-stubs-checksum = 7895a95e4674fb0c4f206abf744818f58272597f0dd84480a1a217cb4358ff413d153f5e13f748867904e104cf26c43ff8d59a33e0f0a7cfaa81ee21d5d847fd +libsm-checksum = 7b03a81ae81704ed97426e596849ec1209771f6ca4db296aedd7a698750b586cd56d149690922686c52c64c1c582672474fce00816dddca2c90ad9bded51a269 +libx11-checksum = c5ee35af06a90bfa0e9c5f42feea9887c5beb88960a7658f8465524a360e400a5640e5dcda741549d7ecf470e7142d3728a32659e12f95a316203655a408db42 +libxau-checksum = db9c8ce453a650a493f83e30d4fa3b50aabaf01e98b5e24f666c32238f46a8c2c120cd18ac8fda447c3c7f9bf300f9e3a08a1cd2bd1c4ef4ace72a54cfb8243c +libxcb-checksum = e360337777e578c76251b793dc4e7e9203271f4dd0e1032abbc092f96fa228717fbb702965dff061264cca52f2540c983f4085fa486ced0145ebcc4b9e5b766a +libxdmcp-checksum = 3c25a8d37f2ea553ebadfd9d886592f9c181e7da0a73b5e7bc853f15f0cddbae39483e55dc417c47ed55b45b8401e633efcb02d692af715b1ec1549b75a67378 +libxext-checksum = 99b2dc7b1e65e78b3335138176a1031a8259820b60abe0ebc87b0c8cc1d3bc9f65aca8b43942811ff78b4e85640e0d4cd63b092423b1351daf32bc4109debf88 +libxt-checksum = 776457c702a8685cdda0c2c0b1d1c441f8e20513b89f83b031c3037ae132b56dec53027465c15b2c2d7d5dd0e6933840b5325de8a7242ef0daa76ae5f28016a5 +util-macros-checksum = a9facbdcf973bfb202832b57207731152e7d58ca4ba034b4de31b931256af55daa651d0463309bed5ad84ed892b19b319c919aab77c849e08deac083d1445529 +xcb-proto-checksum = ea92650cacf0a864381fa29ec5e0c9562435cdbd8604ada5639a79e6aa359b3ef0d7ad0a10c5b1599fa1ddd49d86fade4048526445a9525c2978fa984a34f7fb +xorgproto-checksum = 1b52be01bea4c22c0bd92ea073d1a11e08e9233e93e28d578a7cb247385f3a270b4fb4ce463cb853f3e1a9c6acb45da456e5f6caf4cfa7676a9e316e98bd22cc +xtrans-checksum = 84f1142994f93882c9c68a4a85a218290c1c979162dbd4c5ee6b6becf0f96b69685d292cae1793ce65db8d7f1fe3f8955499f4b2ced771081894921512e2e16a # Python packages # --------------- # # Similar to optional programs and libraries above. # -# IMPORTANT: If you intend to change the version of any of the Python -# modules/libraries below, please fix the hash strings of the respective -# URL in `reproduce/software/make/python.mk'. +# The sha512sum hash strings for the Python modules/libraries below should +# correspond to the version numbers in +# 'reproduce/software/conf/version.conf'. If you update the version and are +# confident that the new version is safe to use, then you can update the +# hash here. asn1crypto-checksum = 44d442a6ddfa971e31e24712fe084368356deb5e1c4c3b3e813e0910931860215bc1c4f9eb2c4bd4fdef607c324086c096e9357068646efd28c97f2d4f85c62f asteval-checksum = 4d64900b2f7dfdd098d6c8c102f9d9fd46f9ec265a54330e7d94479ba41f0ee0698855658e18b8b32b9c255159eb9a085af5f0306eb6508663d3fea7d2e00b4a -astropy-checksum = c32e874d208f312f894643ab5b3d71dc37630e544da0ceb5ee998d752f9a055d32f6e4319f2cb6928637aaf8573bac58d2882bd636b6a89f5501e3ac7e5ab681 +astropy-checksum = a91e327784a6ce0bb55119639e52037db0d802305dafedd46f27d9de96db819b43e336a4d43558a57e57459c4d1b7d6d3fe290771006e061d3779034f089aa9b +astropy-iers-data-checksum = b8bb1aacf67fdab241e95952919a8691db09488493d0e4ea6eb7a65822c6e4bfb7fc55d4ee2f6a0e9e563fb671d16b06474d38f0ad4d3b0f41d6dc341dc7ec7a astroquery-checksum = 43846791d8469a26cf6bb8819db58b830cfe50a34bc0091c2e843dd7dc78b1317530855d432a3a567a9f6a6f4d2682382a32edc91ea01716246b99b3625ec521 -beautifulsoup4-checksum = 7aa77bc6008bbcbbbe91b0a850007ab237d2832b63a787fbd94b7cbf47d4276b185e0c61c134df73221406458edff2b75b6b8c2b53b543aa3bb1b0e2202dac5a +beautifulsoup4-checksum = bf8fd3e54da63a506f294f0e5f1201fd46bf2edcc2db23e99eda995313b8a8d24db3cd2d1903853c539a9320bfb9eb4b29e311772bfddc0d125f0b4aec71e384 +beniget-checksum = 32a19d77323a0a21544ce7fbbb71cc5f4c66949dba280d81deb36f38364544d9fdfb3d24fc48a1ff6d251fb22c24357e81dd7fa4a7e6c11d8b931723e150a182 certifi-checksum = 6a6bf1ff98caefcdbf78a8c83e11e155368bacdd806f0ae0c6afa8f513667df6598e594b3584de61acdca3d6049f4a776937f2aa8672b602bd6db7b737f6074e -cffi-checksum = af4fe47cf5d6f1126222898365cfa21e9f11d0e71b87d869014dbb37af30dca9ddf50c989030d0f610f50e8099e8dfd08a688d8c3629abbcc4f0294f5f91b817 +cffi-checksum = 50e2b8215ddedfa8f1d569680cc0023cf061c2c52b7b86b6b26ced56e5107b362aaf6dc10a77b9dbbbfca2e5611f7cfb42c59501f9ab7da29fb3ad3ccf418cb4 chardet-checksum = 61a03b23447a2bfe52ceed4dd1b9afdb5784da1933a623776883ee9f297e341f633e27f0ce0230bd5fdc5fdb5382105ab42736a74a417ddeb9f83af57455dba5 +contourpy-checksum = dab72dc7b9c64b1c06674ac4c7ba4d1058bfa5922763087dbe9d30237fe38045201e3801423541cc65ef0cd6c92eb5f4ffb4b6ff8da21d79f2f23a36ddc4bcf3 corner-checksum = ebd625ab1e4591b4c21d25ec706c35d37f560b727e1e0d6a79948c4a112ee6f21d3ca30162901a27715074e1345f3bdee1a0345c63e5fec24113e495fb094127 -cryptography-checksum = f14319e24d9dca52e74548cada5b78a6235f089ef875dbff4799e862f94da8b087f1b6e03e84dcef9fc7d7693c4a349c5f0cd54b8535806da777420ce8757d39 -cycler-checksum = b7d2ba19861ffaf4dea0444bfe68b5a6264a022d7b3f02c9ff5e5859e3901de12a90f8dc7469e995e09c418515b3df55dbf05a0cfe5368d40790a2c878a74819 -cython-checksum = 6216e63996e83b887cdcee6cd912d42e7da853640336b9190f5115d687848a902ee5a8edd6bfaef645c066b89e17dcd80ca1387688eb80a527ec23a0a4636e8f +cppy-checksum = 562b41f22d2819fa7be65ad7d05b18afa9646999277df8a5e6399c69d66320ed0119a5d2653de5711ccaf65360d55fadd85611bf2f7c42348e2f74fbf4eed45e +cryptography-checksum = 051b5007512521638981a8e975c7bbcb688b1a1c384babc81ba105c7bc2a7e776a8bf46af5939627528b2b57d99aaacf215a041018c47f6d3ae6d7a0e8eafccf +cycler-checksum = c958a7c732432cac9dd81948aa451a8cc91e5c828591b1f935d80f5fc2d23481ffa1c1be716c44037f487ec2f4dd1be11c5318f5786f4accab6bbb84dcbcb291 +cython-checksum = cabcaad9c009680b262780030b08298f9785c98e7b2af043a7c85c1d57dbafd08d20d03de2b9298bd229da7c816babdfa2838229e275330ac1fa1bdcdb974725 eigency-checksum = 1e7cdfc43071da5edba30a0d32cd655442b516f15c166b049a195d151dec8c20a2177ad69bed3bba4788a668fa25a4c551ef4990717ff98d5b2f407bffb214c8 emcee-checksum = dd60aace8879525fd3fe42b747d82170b24b2ed21f538f9186ba96b9d04c084812e3303f5d2e04119dabd2f9d3286d510b4d4a5324c71dd24b1c7e5f0a9a0ac6 entrypoints-checksum = aa1274362d3a4b00266103319ca51aa266605b4999c89a9d0673eb61bfae9e646cb0ec6b86c95544493f6fe048385a2c7641d64adca8f45815546fb1e663c858 -esutil-checksum = c1cf8e7912b16dc675fdc7195ecc2bbffcd156c1257b2df45afdf09cc487c49f2e18a66cd5520ab30c77e60abb8ca802e327075bdd0f23eb634cc4f5a7e974f3 +esutil-checksum = 7f1f22b0a05855819e3bd9bc2eee14942c2f536fb99f4af03575eb41d3d3fd1d2e6091ad076bfab4d51a8f30e213b607af35ed00d063fb2895b58f7a57cde5ae +extension-helpers-checksum = 3a8d971e33542931754baf1626141b6407c11e98d58088f9898aaffcb2dcd9592adc094b236a7fdd74837b6aa4d38e7191763d7a414fb34d6e923985c7db3534 +features-checksum = 91b88e02ebcccfaac9a9cf4b8f00e177880e119287514c781fd591c06231428f4f7aa479bb9ba74f59b2544f12ec08061486f75bf91aac8480d23bdc91e2c511 flake8-checksum = a25076bb7f978ca23af0f2016b30bf5fe4680371b4c0939361a6f8c496f3c846cf98ecadee35fc2dedd588116c8cd982529972dd2411ab139621912b69132a1e +flit-core-checksum = 59912228fbab70435a438c978c2cfdf7c1fd4e14f45ea223c9b1cb58a56b86ef932b42e1fcc69cadf3fd5c8ca52d407a99428aa874d58029fcaeb2b6acc6bbd5 +fonttools-checksum = 3bec5dbc51d93d439a9cfffdb45e1de4d1b79d6bd91c5510eaf4b21fed077648deb6c4858c95222f93bfe5da1f9c82cc57e8b3df67f214ee29425037f1261e22 future-checksum = c70565a660cf87c5e7e994bae0d4eb0b2b8b607ecb5ce65521c027bf0a39ce5699f0578413bd3c7edd5d01aeb1617de48dcea098a9e9021d8487a73007573030 -galsim-checksum = 11ecbfb9628cf85a4c25024c6f3a34bbdadf522f64e4e93e6428b545c65a48906a05eb7774d0faaacf52a48dddfd599bfc7cc7252ca854a18c3930f116533a3c +galsim-checksum = 65393c4bcf52a3e723604a956aac5f58f490e41c7d5ddbd5d61660658738e2496aa30f632d4d83b765aaf1e46ac691fcd02fc827380e07c8f921c160a9f6a6fd +gast-checksum = 8311f48788daf060bee54ad8986eae054c2a426adcf97f1503cde37c4da2827cd0f664188e51f5c2f932a0a4ded78166a8466465595a640106060c8b6c7072a5 +gpep517-checksum = e937f18c9593c76c15f8b2e77143aa11a0a822b1ff686860d5539f4418b02ca7764d71dbef817e8fe8453f9bf3c45fc2f97af1244cc30068701347f584ace91f h5py-checksum = c49b04f7dcddf03f36d4f063d79ecbe544e0b1daee432d4a76cfa83dac3a1f2cb144f40a74fb85ea17cb5b778f57f709969ea5d1a2afc5bdd5aecbc9d732898d healpy-checksum = installed-with-healpix html5lib-checksum = 35939b4450893864da04e735ee5e0addacf1dd34bae6a6909c76572abf6bfded446a78a713dfde91c1485ba45867d7abeb6a45cf0545c16ea968707be7de5dd2 idna-checksum = 8ca5cfe6350c51250bafdac7c6e4ddd54c4a5d6bf7acbcef896760a759868c8e9df1fdf550121d8512fa3eb316dcf031ec6058e03b4f66eadee21b63e2187d33 jeepney-checksum = 43083994a7c6af84a5a68d3ff8f6dc4d9129ce9fa55517838fb62d9f62bb78bdf52067649d0b95d08d689b7d7475cb9b2a956662e265a776ad42dcf4ccc0ab63 +jinja2-checksum = db19498dbc9bd6419b474fbb465ce8c689b96f72d33ed1a31cee3c42e2c545dfd0fd5bfca610075e2e126fbdd27300a73efd76fb7ea449727ed61a6127806dcd keyring-checksum = 3863f2cd89131d7e927e55691b4eb4b7c2599af189525293d0948aef5215efbbbd74d45db3cd4885ba5b18a79f0fa0b0edc3783a020a6702f6a6fb7ed86b2cf2 -kiwisolver-checksum = 80ed3d7429a2ed4944aa09766bebac2709a3a0e0a184ff414f04bf3c7ec3034ea7d0f70dff319922b870dedaf13d64fc87eff86a82e720ec93a2510885ad738d +kiwisolver-checksum = baa1e6b8b31b8000e51cc9bdacd57be20c3030551782fd7989fd473b15d225f1cad6309fc846d2ba07d7a7abb729151260a873d6beafa46d881f4fdddcde0f3f lmfit-checksum = 8435328fd8baf92b424299c341aafb995dc45245e17b1774e9eaabdd994dd6ec3b2e70f7506d67b587631cebd2dc584fcd1a9902286925a61e2135b7e11ece21 lsstdesccoord-checksum = a66b8b702cd8122f0c8aee05abe0fc0c6299f8bdb99b151f54bd7ad7430d6bc9f843f7294a7f08f3ce9f8606ea0ced0796a63b79cc8a53ae73068b691e2e6871 -matplotlib-checksum = 2ffd9e79e300a37092ab4b5a7156c2f57e408975818abd413c74477ad622030c7636695467aab821c9996564cddec57e26ba6a8004f6639070964fcb7a510a75 -mpi4py-checksum = 041768f753c8188b2560fe92711861780f0d77eda3281433520c98bb1e9b4da6a89c364f2d1c4623868ffbbcfde34ef556198b1bef6fc1c4a9c19cd5e71b546c +markupsafe-checksum = 4bc1171f1dc86516d6002d6dc3df2ccf78481480812025114b3a1511779b152a605904f0dc3aa0a6c8c282111a74564891a2b02650f90c44b667e5e0622c62c0 +matplotlib-checksum = 2911fa5bc27d082aac00131f012503572f21228e0da49622b57d1d1a53eed942fa73a3457ae09efd03d09cb80172e2c344eacbb85234287be085a749622f9505 +meson-checksum = 6ba8af3a835f24b12e105b90948177964930274fed7d134405b3852cf333a4cbfc11d955778b87994cd9ece57a0de424483b40cf6e43ffe3fd6d9d249257f04c +meson-python-checksum = 08ce6b89f4dd898ef4a92f9b358080aa8984047c3c8b1991347496a2e1283f357424d2d19a1cf5e03054e9dc0750773f2936f5eee2838523db2cedb783546477 +mpi4py-checksum = 68b1d4ffaab8425ed3eb6c4adf2c008a489d4086eba1dd042c29cba29c95f790d49db9c4bf2870c677f5d27e6e5103781a8113ade6e28fe6cea655487e5e07b5 mpmath-checksum = 58c69a801f65d73cc0eeb0d2c79277ed638568c656e7213d06ab4709c218aac908b2752377139010bed0e91bbfff01d129c60835ff0a928ba1185aded6de7c0a -numpy-checksum = ddc5f5e262d6f7f6e1a8879165860427baa237b6aad5b09d5b943a36bb0d5922424aac4c0e32a2cc3e14087bf6eb671988b6ff6df0d9ab8c89724e093220eead +numpy-checksum = 139ee0c38aa9acf4ddd6d5fd14b3c6c6ff3275b89cd3925ce5c528f58d9dd0e367edf77827685026fe10f686ff662bac95b367249891d4f2360fae8a60a8dec8 +packaging-checksum = 0f0be603c444b20917c7a36e1356bf161a958d7326bc47198bb06d23283017e04f4b3be60eaec66a865bb9704c717342a2ef55c9069bd8ea48bd1810d1d424bb pexpect-checksum = 4cea4229332c1f3de26dfcad596877665b3c02e91d51ca3c45c1f9b44462adb7c82abc7b76eb09a73822c2d1ccc9d812574cf79bf6bc8fb0b7d2f1093962cc3f +pillow-checksum = 5134db8c35fa66c559a16e7aa3f341e00c7d21e90d4906f5d95f3c912d804aa10fc8d8ca889677299d1f5431b421c4963d3420a614c7587bb43e2d7079278913 pip-checksum = c6e13da3a57462371d32982c80575c5181592f5c6a8e70d60ec879e689442f4ad468e7aef97eb58c9da50a5a770385aa35e701eefd713a8e9fafeb12e11d956b -pybind11-checksum = a863b92a03a23395ba67f6e4916c479ba800060e89a6d80e586533a23603df111b5cba9fd03b3ed500956169edfd8cb1b32a0bc05c42f928740beeea5bd63352 +ply-checksum = 29d3c4de42a74497083dc6780fbfccd319bd3be8098a2a59ea733988a159e760bdd0eead3482fda33a4e36d2b9b88916f9f9408db2fd3a6b7a0c51269b024485 +pybind11-checksum = b877d8d66b2c47b076224bf8e8414fe889f4fc3281c440d6d097edfad0b7a8edd9fb15e04cc60932358232ebe6c14b89863f4736aa43edfd739f4605b6fa6f44 pycodestyle-checksum = 84e751a7d00048393b02ca743de5d71d1641e948ee1b4daebbdf2d07e0cd8f087ca4e81f826061114b40ef41920bbcd680c9f479e7cc1a159a70188425717208 pycparser-checksum = 7f830e1c9066ee2d297a55e2bf6db4bf6447b6d9da0145d11a88c3bb98505755fb7986eafa6e06ae0b7680838f5e5d6a6d188245ca5ad45c2a727587bac93ab5 +pyerfa-checksum = 44a1d3d714a72b9aa66c9f72f692ff46b273b25d3af2b05ddced62305727f23891b6c63a2ff85fd99f2e2d1663da6da995fef0091c541523867100c72c6a18ca pyflakes-checksum = 7ebf5843b38146305c1063e070480fea8ec3b47fa1be546b1fafaeb242a688a5a001f978e7257fd71d5905b9a338b466ef17c7330725191587e9c40ba632c3f8 -pyparsing-checksum = fc8342780c38fd08c07d0b885c4d6c77d59f38098aa13c3abd05a790ee6caf2065a0bdab33bc290a6e2da123f4ac7e38c049e389b740546499dc48eba7ae431f +pyparsing-checksum = 9d10454faaa0608eeb739a36a001f76d433c8f2ac9c7f40e9d1de5a9431b99e66a00fbd1618901d2e938ff36f752b77fc6782328ec08413f6d018bf6e5a420e7 pypkgconfig-checksum = 1fd9aa973bd20a8fab864722598f1d19b94c23c7f2b522556b3182b19fe016bda7aa2be5e48a1b8fefa70a069611007d6d790e24defcb462e4594a382de85b00 -python-dateutil-checksum = ec7da86203572582f883a4686acf8a732a2de4f396d809057eb51b2c60dbca5623a7fa90c2c0618c281a2282c60841739bd837731a51cc876f4ff369297f2f81 -pyyaml-checksum = 8f27f92bdfa310a99dd6d83947332cc033fa18f0011998bb585ad5c4340a2da20d8c20bfdb53beaae15651198d1240c986818379b0a05b230f74d1f30f53e7fd +pyproject-metadata-checksum = ff36bf73b6d10331d0b58af473a6af4fb4c67b7d2f4d567c36dc75030b6765e4640261041497b7660ce538a5a5baf11cd24acad6752ff45179b50162987a4005 +python-dateutil-checksum = a329abd60db1b3f2ab4cd516c1728c3596831ba7f7ba21003da7a39f7fe571ef38338125a210dc2931a0f1aae34b15f942be148095e0ec0d9a4c10389673c388 +python-installer-checksum = df34826ffc192547e5c87ff03e33e6a431d98eb6b9af6d7783963795204318fb69dce8ff49b757379e88ae6163c4802e43f58213f33b19d1d257bcdebe1d5e27 +pythran-checksum = 5d2d14480149b044ffc159990d2a9b02a236c54c348a899929f11637d0545fee7bb16806660aab6e096a22e1f2e220e344084e5d9c96b2318e51c16767b26c59 +pyyaml-checksum = 3f19057713a73586fdc73d54a6bd96400bce4edb4939381cbbe4be9bb3294dbc124403f16d880a4d21442df14ea4354231489cc2e97103360407f3936d41781b requests-checksum = 385e1d80993a21c09e7c4682500ca8c24155962ba41ecd8e73612722b2ff6618b736e827fc48ad1683b0d2bc7a420cfe680f5107860aca52656ef777f1d60104 -scipy-checksum = 45463df30a0f6270d9f4cf52235f31607904a6ae1375e12600e7f1ab2d27b1dc25a6211b49dceb71506be22c756890adaf9f81d9e6be7455def86c9caf0dc923 +scipy-checksum = fe7c3ac114190782ebf2b155182597a78f36fc24a5de841380957d8f78faf865f3cb6b4feff2f601547240a7d09cf7756b7f9f0eb87d40e3a805e4dbc0714bb4 secretstorage-checksum = 295e0f75c772edf153480730dc20051f06e60e040a18f169620cb4aaa37b8c1a254a496464d3794a38cf49fa884a7b561fe364816f0d12a5b2622c77259f03cf -setuptools-checksum = 1bed75e7173ea0399f078c96cf5c1818dada7a4b86a93e61ff72da6a25694e321b9047e024617aa775b71a25fe8cb0848323902e1fb9c8c8a1f39434f0de6434 -setuptools_scm-checksum = 196d4785a1802875d89b9e54ae788e791a9c5cb685109784059955b691242984e42b96d77075116790935f56be82259bc2588d95d65ecbb101261d76daddb83c +semantic-version-checksum = c0a354ade9761168a7e3a22b5edb10fab21abe527996266b02e1dc4d631b7f80440fc58a04c71708c1dc24317cb880f9629bff926b55517f798b680c8eb207e1 +setuptools-checksum = 9c1f68e63fda8e65654f9b04423f35e20c1a854cc12d012a3cc9d48bf14aff0bfff37e038dc98675cc246997cfc33af6ee80bde36fd96b58c7cdf917c68a5c67 +setuptools-rust-checksum = fb440e1c619c1b50b67686cf8fc214c88beb01b189f06d43848a31dac910c9c6a8bf915a621299371d8a5148469665f6c2704c4853d218f5dc52fff2b014b6f2 +setuptools-scm-checksum = c00990fd7ea58fc246874c8216e746c2914a6adac2780e7596382a771cfb48a65a0114e04eae0ec12f44dbfb79ccb29a35ebc3c46a03caaa7399b57cd79c5e64 sip_tpv-checksum = 5e03279cb3650dd506332dfcb31aa4a20f23f55b8a29fd18da5c6d422d1b7dc49e12362ceae2ff7417c874401b5e87a73ca1ac0f3c8747c8984e4269cad56c3f -six-checksum = 937728372edf1e0ac13bbd706723d0de35e015c30d0ae41f789c5ed2e3669bb0db70cdc6e036ec2d437a6c4aa0d5d1e727b6d09ac34cca7e4e92e5d3b4775151 +six-checksum = abc5da98a94fc20b8f4edbd881497ebdd5d86f30867d7838bfe388fc35ff415b8973884e24509ddda7de1b4eb8af6ac5fe22e6408826b9297b217d3c53a5a6bb soupsieve-checksum = abdcbb6a13563e7afadd3056141587fdc3d7d644e346f789bca0a16242d860219e462491b0c624b287300af960fb8e3f85c79f5137580939a9fc8c3d6961478c sympy-checksum = 6ae09be7260b1624b4f92d39c68d5cdf54e6e33010d9215f46d62d989c04cdbee6f9f9c8b11ebeda53257d154954fb926b3ab7335b738e33ad248764875b6ddb -uncertainties-checksum = 420fa4f58ac8dff17875029bc3dcd6539c638e8c8ffa5bcc273b486d05f0d1cc71b1db140a5098c1cd6472f93b6869303f57f48675296e859814197d899327bd +uncertainties-checksum = 5f09e75db2e4b2838858f2640a8427cd01cf691a6bec66e58f75125cb80515e2a7c164ef6d414dab2cb175a2cc696f42259729d0ebe4b4603f98e3f3ef9e262d urllib3-checksum = 4c12d08076b0f260727d5aac780f5e9a24e0164755ff05b02a1f5a697876741ff13ba278fdd6e46ef678e8e1146bc39de1fc49ee10ee839229a70540a9424a99 virtualenv-checksum = 3306f59bbcb48ceec225c07083e0b6831379b3e632e4a23c376849559449de1a04db66e0e7ceabb40ddcd3ae984a2a18dfdc4f1c38777d4bc04537f85a0137ac webencodings-checksum = b727b01bac6ec79bca517960d27b4c0668b295f25559471b9641c2c33dab55db6dac9c990952177964c6418382c22831b14d57df5e632d51d7abf97b61f24326 +wheel-checksum = df45f00e9eaeae2f27f813f31591590c961da2f6bff15bba6fb2a14d529c221f39b29894b8da408fe49cd4b760840a0e05c4baef377ccfacd9983c0bba83d6d8 + +# R-CRAN packages +# --------------- +# +# Similar to optional programs and libraries above. +# +# The sha512sum hash strings for the R modules/libraries below should +# correspond to the version numbers in +# 'reproduce/software/conf/versions.conf'. If you update the version and are +# confident that the new version is safe to use, then you can update the +# hash here. +r-cran-MASS-checksum = 0d22ff69cff1414bf52d11aefd75b442c9c8fecb343fe3733c7b28a6b881f3c87ac8b6e4b0d76709c36e612523ef386d207d403c2292881c083e03d21b3709ec +r-cran-R6-checksum = 5986510ff19c1e0129bb0b88d72020d30255e26d6da9a40c123b7b0c42c3e121188c8de34b79ad5eb0e0390eb4d59f2b7f0224b58679459bec0c4d677dec6ea9 +r-cran-RColorBrewer-checksum = 62aae85a0cf349a58ff51922886f71e2a9dc7b9548b061390f3b370f240bcec72340b3ebc03077a0374ce18ccf16121247e0500eca875ef2ab2f755c809e8f28 +r-cran-cli-checksum = 29782b4cb00a94536c871276835a5f762f3deca8355c35b9366434cd2873604ea20f2746ef624eb4e6ec1514cb9079e8c101e50ace85fb1336f3befcf77be244 +r-cran-colorspace-checksum = b6c24e1dd104d444e09049df61fcaac680aafc1f530bcff07fc8ce5f2bcfc9a3521aac032a506b61dd10de885a914ed82c4095ee95113e407ffd0cd43662f098 +r-cran-cowplot-checksum = 39e3b29078ffa67e03911c21b4cc09a8dcfe0bf38fb180691e82f2ff87c993e78f89173a750dcf08424fa1e0ab39ae5d07fc59af792ac0333ef02cd9ea8afc03 +r-cran-crayon-checksum = 7034a7ac446361c90088d53f2079144a561e2d62095884edb85700512decf4dab9fb4a85fa6d0fcdddf3d5d8a3d219d56384d1d1af9c6351d81abb5778147cde +r-cran-digest-checksum = 44a2978be00b7a90f88f05ad8ec114fb07a8e3816fa263749d44aa61f7706c3f802feaa7b7e9267cce1d0dd5c49e7f7ae3ccd9f968a7025aeef16181f7c0d70f +r-cran-ellipsis-checksum = b82fab404c3e1bb9df2f7ee5ae6e62b237c15c661a417adabbd86f984a2128970e0ad5581e9a8b7541b9f9b07418de469398e6bc06775bd4a3c8bc1472947f62 +r-cran-fansi-checksum = 50a7a8e597b89b49fa33fa93861a906c38508a42980072027a5f2746f047b82333074248e5c7724faf4559eb6a4cdfe7a8ccfd2dc5b637a8f3d5a8584afdbc75 +r-cran-farver-checksum = 66918ddd268b3044625d9b691fa0a2251301400026bdd12524927ccd9f3976ae55189a11ecbbc042ceef0c61e7692577cbf630f3a45fee455a4f3bd5e19e6937 +r-cran-ggplot2-checksum = f515a4710c13f69d3ed8a8d59195aa3d2b52c345ed94a5f36ebe04e1bb621e07d0b5b79290ea42c81a764fc94b17573041a27c1fb06e11b08834ea9e3e89b39a +r-cran-glue-checksum = 5e679884f45a00a011e284f59370cabc2d470515351324ba2748b06ec780d65b6f64901eb5589713e9b122b103e58cd040eb342c88d7d4a3734a6e23064552d1 +r-cran-gridExtra-checksum = c92c9cb46312300ad9477cfe849c11a9d79fcce1f909c5bdc76e46a6741a636f598a457883fd278e0b00a47eb385bce4489ad04d3cf93ce7def9ae8771dffc71 +r-cran-gtable-checksum = e3cc4328bbb42a59ff1b315f0b030a9fe240bc6b6844f460835c25497179c3a5d9ea10fa2af1aec4b7384e78a564eb99eb4c51067f98a57ef919f2e11f6fbafe +r-cran-isoband-checksum = 253d0b768492b64d5c32ab20f3b896cfe206305b8a53b31055813bc6957069eb76ea94a403b931fdada06ab33ee91f7105ef080fcd88af0c05860865950d99d1 +r-cran-labeling-checksum = 0ba0cb33863f79868ec6f2289f29ddc742cc620b7ed85364099448d39d71a51cce45028097100aeeaf8a8a7e4ad0982b459ef20a15faed9c5c7db79e9f9b01fe +r-cran-lifecycle-checksum = 1ac7aafb41925d19abfd78f454d1acd5102a72ee7017840f14cf54d035cf3999d5b6dc2979b00214358e3c26dfac3407c8b525b434e8882c123a92a132bb07df +r-cran-magrittr-checksum = b0bca6000602b90c996efc3681799a8a965a5bb90eb5e3436b7a26be15e5a219ef6300f4086f0b6efda734272775688a8ca77a4094eda429926a9179125b87c7 +r-cran-mgcv-checksum = 83df262152d9ee5081b3d16711b5cae3860f7dd879f97db014b987edd6b110b11219bac0e07f8ca04169181291aea76399d465b9bf0e96a1b6aaf83692fdf5bc +r-cran-munsell-checksum = 59536096c637d84b80f097bf400bcbb9fcb7ddf6267ce6b026e3c7b3cbcf548caef118439a1928dd8833359a18b14e49f38fe90ea7919c28c215b3b297c036a3 +r-cran-pillar-checksum = dd2bc53539ec6838cb8d5bb5ddf30cff9c43c5de0f8d148374d5a394385429a06243db48e7cf6bdb463bad7a130fb47b172ceaac8d04ca3205b4c4210ab672ff +r-cran-pkgconfig-checksum = 0b636d9b1233a00974ac93eb2a1e0ad6b552fa043bfe9743ec74ef884c1d01a73c372586516abca8772a836c50dcd9eed4d10c50f6883fc68ba2494e79872aaf +r-cran-rlang-checksum = 8dc647e59dc767471345f9cfda7b77316215b0bf5a96e28f63e216d80f239ba9ed260b90926d175080e256d645f9a1b621d7602bfe27c32d0151758753b6cac8 +r-cran-scales-checksum = a1c693f993010613a33b9a165d5f853ad5f4fbbd790deae2bd5b19adc6ea0adbf27962e0f56807297ec56a71a6a5f57e3aa0ef78c266e74dc600da34c24d16e8 +r-cran-tibble-checksum = 4a4953b9083064ba10e240e304617f0b2c65039eb188f56761cfac228e99186054b543aef045607b63e5ebf9da2d20255f0ae724fd974c497507706d21a92864 +r-cran-utf8-checksum = 51b98a3fd07dd82fba249ff78f6e8b1318592151be49f1fe98c061051dca4d4506f7c7c8f1ebccadab4312cef7e01f700b39f4d5a6ba0b7227acd27e028f515d +r-cran-vctrs-checksum = 23cde893f027f876f37b704a5074fafbbc376b85a053ccdc2862a3e6dc0b68ae2ac3511e2c61ad7a0a0d55524d3f3b724f39a4eeaf1a855ace62634ddea04b4f +r-cran-viridisLite-checksum = f59a771b4cdca74df641c936812a008bdec99578cd1c82ee8f3119087dbdc72e307b2f3093df971ee7166c010a246fb5ef6c05cec4e51f7cfe721f3343e0fd6a +r-cran-withr-checksum = 97877fa1ec21244285366818ad51dbe9c5086b377c1217d4ffc250a1f83cd142cc28d35f9aac0527f26bb2e3f0b399537b386a27de2fe546c301029f59614f65 diff --git a/reproduce/software/config/numpy-scipy.cfg b/reproduce/software/config/numpy-scipy.cfg index 3a3171e..c3cea11 100644 --- a/reproduce/software/config/numpy-scipy.cfg +++ b/reproduce/software/config/numpy-scipy.cfg @@ -7,8 +7,8 @@ # appropriate sections. Not all packages will use all sections so you # should leave out sections that your package does not use. # -# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> -# Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +# Copyright (C) 2018-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> # # Copying and distribution of this file, with or without modification, are # permitted in any medium without royalty provided the copyright notice and @@ -18,7 +18,7 @@ # IMPORTANT NOTE # -------------- # -# The `ALL' grouping does't apply to ATLAS!!!!! +# The 'ALL' grouping does't apply to ATLAS!!!!! [ALL] library_dirs = @LIBDIR@ diff --git a/reproduce/software/config/servers-backup.conf b/reproduce/software/config/servers-backup.conf index 8db0ce1..f0e040e 100644 --- a/reproduce/software/config/servers-backup.conf +++ b/reproduce/software/config/servers-backup.conf @@ -3,12 +3,12 @@ # is irrelevant). Note that this is not a to be read as a variable but will # be parsed as a list. # -# Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2020-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> # # Copying and distribution of this file, with or without modification, are # permitted in any medium without royalty provided the copyright notice and # this notice are preserved. This file is offered as-is, without any # warranty. +http://gitlab.cefca.es/maneage/tarballs-software/-/raw/master/ http://gitlab.com/maneage/tarballs-software/-/raw/master http://git.maneage.org/tarballs-software.git/plain -http://akhlaghi.org/maneage-software diff --git a/reproduce/software/config/software_acknowledge_context.sh b/reproduce/software/config/software_acknowledge_context.sh index f1454c0..deae01b 100755 --- a/reproduce/software/config/software_acknowledge_context.sh +++ b/reproduce/software/config/software_acknowledge_context.sh @@ -10,8 +10,8 @@ # your project to make a smoothly readable English text. Afterwards, please # feel free to modify them as you wish. # -# Copyright (C) 2021 Boud Roukema <boud@cosmo.torun.pl> -# Copyright (C) 2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2021-2025 Boud Roukema <boud@cosmo.torun.pl> +# Copyright (C) 2021-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> # # This script is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the diff --git a/reproduce/software/config/texlive-packages.conf b/reproduce/software/config/texlive-packages.conf index 2cca87a..e68a771 100644 --- a/reproduce/software/config/texlive-packages.conf +++ b/reproduce/software/config/texlive-packages.conf @@ -1,24 +1,57 @@ # Necessary packages to install in TeX Live. # # If any extra TeX package is necessary to build your paper, just add its -# name to this variable (you can check in `ctan.org' to find the official +# name to this variable (you can check in 'ctan.org' to find the official # name). # -# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2018-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2022-2025 Boud Roukema <boud@astro.uni.torun.pl> # # Copying and distribution of this file, with or without modification, are # permitted in any medium without royalty provided the copyright notice and # this notice are preserved. This file is offered as-is, without any # warranty. +# Hints: +# +# - For debugging: after a partial or successful build, look through +# '.build/software/installed/texlive/maneage/tlpkg/texlive.tlpdb.main.*' +# to see what packages and files were looked at during the install. -# Note on `tex' and `fancyhdr': These two packages are installed along with -# the basic installation scheme that we used to install tlmgr, they will be -# ignored in the `tlmgr install' command, but will be used later when we -# want their versions. -texlive-packages = tex fancyhdr ec newtx fontaxes xkeyval etoolbox xstring \ - xcolor setspace caption footmisc datetime fmtcount \ - titlesec preprint ulem biblatex biber logreq pgf pgfplots \ - fp courier tex-gyre txfonts times csquotes kastrup \ - trimspaces pdftexcmds pdfescape letltxmacro bitset \ - mweights +# Notes: +# +# - tex and fancyhdr: These two packages are installed along with the basic +# installation scheme that we used to install tlmgr, they will be ignored +# in the 'tlmgr install' command, but will be used later when we want +# their versions. +# +# - fancyvrb: needed by R. +texlive-packages = biber \ + biblatex \ + caption \ + courier \ + csquotes \ + datetime \ + fancyvrb \ + fmtcount \ + fontaxes \ + footmisc \ + fp \ + kastrup \ + logreq \ + mweights \ + newtx \ + pgf \ + pgfplots \ + preprint \ + setspace \ + tex-gyre \ + times \ + titlesec \ + trimspaces \ + txfonts \ + ulem \ + xcolor \ + xkeyval \ + xpatch \ + xstring diff --git a/reproduce/software/config/texlive.conf b/reproduce/software/config/texlive.conf index 94a4c89..0babd69 100644 --- a/reproduce/software/config/texlive.conf +++ b/reproduce/software/config/texlive.conf @@ -2,7 +2,7 @@ # # installdir: Install directory # -# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2018-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> # # Copying and distribution of this file, with or without modification, are # permitted in any medium without royalty provided the copyright notice and diff --git a/reproduce/software/config/urls.conf b/reproduce/software/config/urls.conf index a311524..8bf7b23 100644 --- a/reproduce/software/config/urls.conf +++ b/reproduce/software/config/urls.conf @@ -3,8 +3,8 @@ # If un-commented the URLs of this file will be used. Otherwise default # servers (primarily on 'zenodo.org') will be checked for the tarball. # -# Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> -# Copyright (C) 2020-2021 Raul Infante-Sainz <infantesainz@gmail.com> +# Copyright (C) 2020-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2020-2025 Raul Infante-Sainz <infantesainz@gmail.com> # # Copying and distribution of this file, with or without modification, are # permitted in any medium without royalty provided the copyright notice and @@ -19,15 +19,15 @@ # ------------------------------------------------------------- #bash-url = http://akhlaghi.org/src #binutils-url = http://ftp.gnu.org/gnu/binutils -#bzip2-url = http://akhlaghi.org/maneage-software -#cert-url = http://akhlaghi.org/maneage-software +#bzip2-url = https://sourceware.org/pub/bzip2 +#certpem-url = http://akhlaghi.org/maneage-software #coreutils-url = http://ftp.gnu.org/gnu/coreutils #curl-url = https://curl.haxx.se/download -#dash-url = http://akhlaghi.org/maneage-software +#dash-url = http://gondor.apana.org.au/~herbert/dash/files #diffutils-url = http://ftp.gnu.org/gnu/diffutils #file-url = ftp://ftp.astron.com/pub/file #findutils-url = http://ftp.gnu.org/gnu/findutils -#flock-url = http://akhlaghi.org/src +#flock-url = https://github.com/discoteq/flock/releases #gawk-url = http://ftp.gnu.org/gnu/gawk #gcc-url = http://ftp.gnu.org/gnu/gcc/gcc-$(gcc-version) #gettext-url = https://ftp.gnu.org/gnu/gettext @@ -36,24 +36,23 @@ #grep-url = http://ftp.gnu.org/gnu/grep #gzip-url = http://akhlaghi.org/src #isl-url = ftp://gcc.gnu.org/pub/gcc/infrastructure -less-url = http://www.greenwoodsoftware.com/less/ -#libbsd-url = http://libbsd.freedesktop.org/releases +#less-url = http://www.greenwoodsoftware.com/less/ #libiconv-url = https://ftp.gnu.org/pub/gnu/libiconv #libtool-url = http://ftp.gnu.org/gnu/libtool #libunistring-url = http://ftp.gnu.org/gnu/libunistring #libxml2-url = ftp://xmlsoft.org/libxml2 -#lzip-url = http://akhlaghi.org/src +#lzip-url = https://download.savannah.gnu.org/releases/lzip/ #m4-url = http://akhlaghi.org/maneage-software #make-url = http://akhlaghi.org/src -#metastore-url = http://akhlaghi.org/maneage-software #mpc-url = http://ftp.gnu.org/gnu/mpc #mpfr-url = http://www.mpfr.org/mpfr-current #nano-url = https://www.nano-editor.org/dist/v$(word 1, $(subst ., ,$(nano-version))) #ncurses-url = http://ftp.gnu.org/gnu/ncurses #openssl-url = http://www.openssl.org/source -#patchelf-url = http://nixos.org/releases/patchelf/patchelf-$(patchelf-version) +#patchelf-url = https://github.com/NixOS/patchelf/releases/download/$(patchelf-version) #perl-url = $(shell echo https://www.cpan.org/src/$$(echo $(perl-version) | sed -e's/\./ /g' | awk '{printf("%d.0", $$1)}')) #pkgconfig-url = http://pkg-config.freedesktop.org/releases +#podlators-url = https://www.eyrie.org/~eagle/software/podlators/ #readline-url = http://ftp.gnu.org/gnu/readline #sed-url = http://ftp.gnu.org/gnu/sed #tar-url = http://ftp.gnu.org/gnu/tar @@ -63,7 +62,7 @@ less-url = http://www.greenwoodsoftware.com/less/ #which-url = http://ftp.gnu.org/gnu/which #xz-url = http://tukaani.org/xz #zip-url = $(shell echo ftp://ftp.info-zip.org/pub/infozip/src/zip$$(echo $(zip-version) | sed -e's/\.//').tgz) -#zlib-url = http://www.zlib.net +#zlib-url = https://zlib.net/current/zlib.tar.gz # The version is in the tar @@ -73,30 +72,31 @@ less-url = http://www.greenwoodsoftware.com/less/ # ------------------------------------------ # # These are programs and libraries that are optional, The ones in -# `reproduce/software/config/TARGETS.conf' will be built as -# part of a project. To specify a software there, just remove the -# `-url' suffix from the list below. +# 'reproduce/software/config/TARGETS.conf' will be built as +# part of a project. To specify a software package there, use +# the name in the list below, without the '-url' suffix. + #apachelog4cxx-url = http://akhlaghi.org/maneage-software -#apr-url = https://www-us.apache.org/dist/apr -#apr-util-url = https://www-us.apache.org/dist/apr +#apr-url = https://archive.apache.org/dist/apr/ +#apr-util-url = https://archive.apache.org/dist/apr/ #astrometrynet-url = http://astrometry.net/downloads #atlas-url = https://sourceforge.net/projects/math-atlas/files/Stable/$(atlas-version)/atlas$(atlas-version).tar.bz2/download #autoconf-url = http://akhlaghi.org/maneage-software #automake-url = http://ftp.gnu.org/gnu/automake #bison-url = http://ftp.gnu.org/gnu/bison -#boost-url = $(shell vstr=$$(echo $(boost-version) | sed -e's/\./_/g'); echo https://dl.bintray.com/boostorg/release/$(boost-version)/source) +#boost-url = https://archives.boost.io/release/$(boost-version)/source #cairo-url = https://www.cairographics.org/releases #cdsclient-url = http://cdsarc.u-strasbg.fr/ftp/pub/sw #cfitsio-url = https://heasarc.gsfc.nasa.gov/FTP/software/fitsio/c #cmake-url = $(shell majv=$$(echo $(cmake-version) | sed -e's/\./ /' | awk '{printf("%d.%d", $$1, $$2)}'); echo https://cmake.org/files/v$$majv) -#eigen-url = http://bitbucket.org/eigen/eigen/get/$(eigen-version).tar.gz +#eigen-url = https://eigen.tuxfamily.org #emacs-url = http://ftp.gnu.org/gnu/emacs -#expat-url = $(shell vstr=$$(echo $(expat-version) | sed -e's/\./_/g'); echo https://github.com/libexpat/libexpat/releases/download/R_$$vstr/expat-$(expat-version).tar.lz) -#fftw-url = ftp://ftp.fftw.org/pub/fftw +#expat-url = https://github.com/libexpat/libexpat/releases +#fftw-url = https://fftw.org #flex-url = https://github.com/westes/flex/files/981163 #freetype-url = https://download.savannah.gnu.org/releases/freetype #gdb-url = http://ftp.gnu.org/gnu/gdb -#ghostscript-url = $(shell v=$$(echo $(ghostscript-version) | sed -e's/\.//'); echo https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs$$v) +#ghostscript-url = https://www.ghostscript.com/releases/gsdnld.html #gnuastro-url = http://ftp.gnu.org/gnu/gnuastro #gperf-url = http://ftp.gnu.org/pub/gnu/gperf #gs-fonts-gnu-url = https://downloads.sourceforge.net/gs-fonts @@ -105,28 +105,37 @@ less-url = http://www.greenwoodsoftware.com/less/ #hdf5-url = $(shell majorver=$$(echo $(hdf5-version) | sed -e 's/\./ /g' | awk '{printf("%d.%d", $$1, $$2)}'); echo https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-$$majorver/hdf5-$(hdf5-version)/src) #healpix-url = http://akhlaghi.org/maneage-software #help2man-url = http://ftp.gnu.org/gnu/help2man -#ibpaper-url=http://ftp.debian.org/debian/pool/main/libp/libpaper/ -#imagemagick-url = http://akhlaghi.org/maneage-software +#libpaper-url = http://ftp.debian.org/debian/pool/main/libp/libpaper/ +#icu-url = https://github.com/unicode-org/icu/releases +#imagemagick-url = https://download.imagemagick.org/ImageMagick/download #imfit-url = http://www.mpe.mpg.de/~erwin/resources/imfit #lapack-url = http://www.netlib.org/lapack -#libffi-url = ftp://sourceware.org/pub/libffi +#libbsd-url = http://libbsd.freedesktop.org/releases +#libffi-url = https://github.com/libffi/libffi #libgit2-url = http://akhlaghi.org/src #libidn-url = https://ftp.gnu.org/gnu/libidn #libjpeg-url = http://ijg.org/files +#libmd-url = https://archive.hadrons.org/software/libmd/ #libnsl-url = http://akhlaghi.org/maneage-software +#libpaper-url = http://ftp.debian.org/debian/pool/main/libp/libpaper/ #libpng-url = https://download.sourceforge.net/libpng #libtiff-url = https://download.osgeo.org/libtiff #libtirpc-url = https://downloads.sourceforge.net/libtirpc +#metastore-url = http://akhlaghi.org/maneage-software #minizip-url = #missfits-url = https://www.astromatic.net/download/missfits -#netpbm-url = http://akhlaghi.org/maneage-software +#netpbm-url = https://sourceforge.net/projects/netpbm/files/super_stable/ +#ninjabuild-url = https://github.com/ninja-build/ninja/archive/refs/tags #openblas-url = https://github.com/xianyi/OpenBLAS/archive +#openblas-url = https://github.com/OpenMathLib/OpenBLAS/releases/download #openmpi-url = $(shell majorver=$$(echo $(openmpi-version) | sed -e 's/\./ /g' | awk '{printf("%d.%d", $$1, $$2)}'); echo https://download.open-mpi.org/release/open-mpi/v$$majorver) #openssh-url = https://artfiles.org/openbsd/OpenSSH/portable #patch-url = http://ftp.gnu.org/gnu/patch #pixman-url = https://www.cairographics.org/releases -#python-url = http://akhlaghi.org/src -#R-url = $(shell majver=$$(echo $(R-version) | sed -e's/\./ /g' | awk '{print $$1}'); echo https://cran.r-project.org/src/base/R-$$majver) +#python-url = https://www.python.org/downloads/ +#https://www.python.org/ftp/python/3.13.2/Python-3.13.2.tar.xz +#python-url = https://www.python.org/ftp/python +#r-cran-url = $(shell majver=$$(echo $(r-cran-version) | sed -e's/\./ /g' | awk '{print $$1}'); echo https://cran.r-project.org/src/base/R-$$majver) #rpcsvc-proto-url = https://github.com/thkukuk/rpcsvc-proto/releases/download #scamp-url = http://akhlaghi.org/maneage-software #scons-url = https://sourceforge.net/projects/scons/files/scons/$(scons-version)/scons-$(scons-version).tar.gz/download @@ -134,21 +143,27 @@ less-url = http://www.greenwoodsoftware.com/less/ #swarp-url = https://www.astromatic.net/download/swarp #swig-url = https://sourceforge.net/projects/swig/files/swig/swig-$(swig-version) #tides-url = http://akhlaghi.org/maneage-software -#util-linux-url = https://mirrors.edge.kernel.org/pub/linux/utils/util-linux/v2.35/ +#util-linux-url = https://mirrors.edge.kernel.org/pub/linux/utils/util-linux/v2.40 #valgrind-url = https://sourceware.org/pub/valgrind #vim-url = ftp://ftp.vim.org/pub/vim/unix #wcslib-url = ftp://ftp.atnf.csiro.au/pub/software/wcslib #xlsxio-url = https://github.com/brechtsanders/xlsxio/archive #yaml-url = pyyaml.org/download/libyaml + # Xorg packages # ------------- #fontconfig-url = https://www.freedesktop.org/software/fontconfig/release +#libice-url = https://www.x.org/archive/individual/lib/ #libpthread-stubs-url = https://xcb.freedesktop.org/dist +#libsm-url = https://www.x.org/archive/individual/lib/ #libx11-url = https://www.x.org/pub/individual/lib #libxau-url = https://www.x.org/pub/individual/lib +#libxcb-url = https://xcb.freedesktop.org/dist/ #libxdmcp-url = https://www.x.org/pub/individual/lib #libxext-url = https://www.x.org/pub/individual/lib +#libxext-url = https://www.x.org/archive/individual/lib/ +#libxt-url = https://www.x.org/archive/individual/lib #util-macros-url = https://www.x.org/pub/individual/util #xcb-proto-url = https://xorg.freedesktop.org/archive/individual/proto #xorgproto-url = https://xorg.freedesktop.org/archive/individual/proto @@ -173,9 +188,41 @@ less-url = http://www.greenwoodsoftware.com/less/ # (with full filename of 'numpy-1.18.5.zip') the link will be like this. # # numpy-url = https://files.pythonhosted.org/packages/01/1b/d3ddcabd5817be02df0e6ee20d64f77ff6d0d97f83b77f65e98c8a651981 -# + # Hence, if the version changes, the checksum, and its two top host # directories also change. This is why we aren't putting any URL here by -# default, if you need to add one, simply follow the numpy example above. +# default, if you need to add one, follow the numpy example above. # But be sure to send us a link to the tarball so we upload it to Maneage's # backup servers. +#astropy-url = https://files.pythonhosted.org/packages/45/12/a1c582b3f9df5e2680eae0ed82c8057ae06d6130a1f7a5c0770fa899737e +#astropy-iers-data-url = https://files.pythonhosted.org/packages/a1/4d/b9511aba29d4330437497166a7049ab9bac53e344c54e44a35390724ca37 +#contourpy-url = https://files.pythonhosted.org/packages/25/c2/fc7193cc5383637ff390a712e88e4ded0452c9fbcf84abe3de5ea3df1866 +#cppy-url = https://files.pythonhosted.org/packages/45/ed/b35645a1b285bce356f30cc0fe77a042375c385660ccd61e0cdc4c1f7c44 +#cycler-url = https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8 +#cython-url = https://files.pythonhosted.org/packages/5a/25/886e197c97a4b8e254173002cdc141441e878ff29aaa7d9ba560cd6e4866 +#extension-helpers-url = https://files.pythonhosted.org/packages/7f/12/da64a4492f1330f09e7f42c3acd9e6395f74e840dc4a5c3fe1fa4db272b5 +#features-url = https://files.pythonhosted.org/packages/98/cc/dcaf1fb02d4eca207be4df3bf6a68d9af8d2af7cb37435dc76e71b389dc3 +#flit-core-url = https://files.pythonhosted.org/packages/bc/18/b9b81cab2b8f63e6e7f72e1ba2766a0454fcd563e7a77b8299cb917ba805 +#fonttools-url = https://files.pythonhosted.org/packages/1c/8c/9ffa2a555af0e5e5d0e2ed7fdd8c9bef474ed676995bb4c57c9cd0014248 +#gpep517-url = https://files.pythonhosted.org/packages/1c/53/e6ad6aa7036d62ac009986e1e5180465353344afd958ddc8ac15e3b632bc +#kiwisolver-url = https://files.pythonhosted.org/packages/82/59/7c91426a8ac292e1cdd53a63b6d9439abd573c875c3f92c146767dd33faf +#matplotlib-url = https://files.pythonhosted.org/packages/68/dd/fa2e1a45fce2d09f4aea3cee169760e672c8262325aa5796c49d543dc7e6 +#meson-url = https://files.pythonhosted.org/packages/02/98/bbcaf6caaaa0510a68834f119ac793a8abade6ff827fc2791eeb6f8b4a66 +#meson-python-url = https://files.pythonhosted.org/packages/67/66/91d242ea8dd1729addd36069318ba2cd03874872764f316c3bb51b633ed2 +#numpy-url = https://files.pythonhosted.org/packages/fb/90/8956572f5c4ae52201fdec7ba2044b2c882832dcec7d5d0922c9e9acf2de +#packaging-url = https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da +#pillow-url = https://files.pythonhosted.org/packages/f3/af/c097e544e7bd278333db77933e535098c259609c4eb3b85381109602fb5b +#pybind11-url = https://files.pythonhosted.org/packages/d2/c1/72b9622fcb32ff98b054f724e213c7f70d6898baa714f4516288456ceaba +#pyerfa-url = https://files.pythonhosted.org/packages/71/39/63cc8291b0cf324ae710df41527faf7d331bce573899199d926b3e492260 +#pyparsing-url = https://files.pythonhosted.org/packages/8b/1a/3544f4f299a47911c2ab3710f534e52fea62a633c96806995da5d25be4b2 +#pyproject-metadata-url = https://files.pythonhosted.org/packages/c0/79/406a9f56c435caaaca4a1c66397e4f63ecd48a72a6c4fc1d9ecdaac66acb +#python-dateutil-url = https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db +#python-installer-url = https://files.pythonhosted.org/packages/05/18/ceeb4e3ab3aa54495775775b38ae42b10a92f42ce42dfa44da684289b8c8 +#pyyaml-url = https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17 +#scipy-url = https://files.pythonhosted.org/packages/b7/b9/31ba9cd990e626574baf93fbc1ac61cf9ed54faafd04c479117517661637 +#semantic-version-url = https://files.pythonhosted.org/packages/7d/31/f2289ce78b9b473d582568c234e104d2a342fd658cc288a7553d83bb8595 +#setuptools-url = https://files.pythonhosted.org/packages/92/ec/089608b791d210aec4e7f97488e67ab0d33add3efccb83a056cbafe3a2a6 +#setuptools-rust-url = https://files.pythonhosted.org/packages/d3/6b/99a1588d826ceb108694ba00f78bc6afda10ed5d72d550ae8f256af1f7b4 +#setuptools-scm-url = https://files.pythonhosted.org/packages/4b/bd/c5d16dd95900567e09744af92119da7abc5f447320d53ec1d9415ec30263 +#six-url = https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2 +#wheel-url = https://files.pythonhosted.org/packages/8a/98/2d9906746cdc6a6ef809ae6338005b3f21bb568bea3165cfc6a243fdc25c diff --git a/reproduce/software/config/versions.conf b/reproduce/software/config/versions.conf index 102ebbd..166e8ff 100644 --- a/reproduce/software/config/versions.conf +++ b/reproduce/software/config/versions.conf @@ -1,7 +1,8 @@ # Versions of the various dependencies # -# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> -# Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +# Copyright (C) 2018-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> +# Copyright (C) 2022-2025 Pedram Ashofteh Ardakani <pedramardakani@pm.me> # # Copying and distribution of this file, with or without modification, are # permitted in any medium without royalty provided the copyright notice and @@ -15,51 +16,48 @@ # Basic/low-level programs and libraires (installed in any case) # -------------------------------------------------------------- # -# CLASS:BASIC (important identifier, don't modify this line) -bash-version = 5.0.18 -binutils-version = 2.35 -coreutils-version = 8.32 -curl-version = 7.71.1 -dash-version = 0.5.10.2 -diffutils-version = 3.7 -file-version = 5.39 -findutils-version = 4.7.0 -flock-version = 0.2.3 -gawk-version = 5.1.0 -gcc-version = 10.2.0 -gettext-version = 0.21 -git-version = 2.28.0 -gmp-version = 6.2.0 -grep-version = 3.4 -gzip-version = 1.10 -isl-version = 0.18 -less-version = 563 -libbsd-version = 0.10.0 -libiconv-version = 1.16 -libtool-version = 2.4.6 -libunistring-version = 0.9.10 -libxml2-version = 2.9.9 -lzip-version = 1.22-rc2 -m4-version = 1.4.18-patched -make-version = 4.3 -metastore-version = 1.1.2-23-fa9170b -mpc-version = 1.1.0 -mpfr-version = 4.0.2 -nano-version = 5.2 -ncurses-version = 6.2 -patchelf-version = 0.10 -perl-version = 5.32.0 -pkgconfig-version = 0.29.2 -readline-version = 8.0 -sed-version = 4.8 -tar-version = 1.32 -texinfo-version = 6.7 +# CLASS:BASIC (important identifier for 'awk'; don't modify this line) +binutils-version = 2.43.1 +bison-version = 3.8.2 +coreutils-version = 9.6 +curl-version = 8.11.1 +dash-version = 0.5.12 +diffutils-version = 3.10 +file-version = 5.46 +findutils-version = 4.10.0 +flock-version = 0.4.0 +gawk-version = 5.3.1 +gcc-version = 14.2.0 +gettext-version = 0.23.1 +git-version = 2.48.1 +gmp-version = 6.3.0 +grep-version = 3.11 +gzip-version = 1.13 +help2man-version = 1.49.3 +isl-version = 0.27 +less-version = 668 +libiconv-version = 1.18 +libtool-version = 2.5.4 +libunistring-version = 1.3 +libxml2-version = 2.13.5 +lzip-version = 1.25 +make-version = 4.4.1 +mpc-version = 1.3.1 +mpfr-version = 4.2.1 +nano-version = 8.3 +openssl-version = 3.4.0 +perl-version = 5.40.1 +podlators-version = 6.0.2 +readline-version = 8.2.13 +sed-version = 4.9 +tar-version = 1.35 +texinfo-version = 7.2 unzip-version = 6.0 -wget-version = 1.20.3 -which-version = 2.21 -xz-version = 5.2.5 +wget-version = 1.25.0 +which-version = 2.23 +xz-version = 5.6.3 zip-version = 3.0 -zlib-version = 1.2.11 +zlib-version = 1.3.1 # Version-dependent build # ----------------------- @@ -67,7 +65,40 @@ zlib-version = 1.2.11 # When updating the version of these programs/libraries, please look into # the build rule first: In one way or another, the version string becomes # necessary during their build and must be accounted for. -bzip2-version = 1.0.6 +bzip2-version = 1.0.8 + +# PEM Certificate File +# -------------------- +# +# This file should be shipped with operating systems and can be found in +# '/etc/ssl/cert.pem' (which may be a symbolic link to its actual +# location). The current version was taken from an up-to-date ArchLinux at +# the date that is also equal to the version. +certpem-version = 2025-02-10 + +# PatchELF +# -------- +# +# PatchELF 0.15 requires a C++ compiler with the 2017 standard. There are +# still systems that don't have such compilers and there hasn't been any +# Maneage-related bug in PatchELF that would need newer versions. So we'll +# stick to this version until the stdc++17 compilers are more commonly +# supported. +patchelf-version = 0.13 + +# Not working with C23 +# -------------------- +# +# As of GCC 15.1, the default C standard has been changed from C17 to C23 +# and the following software cannot be built with C23. So we have added +# '-std=c17' to the CFLAGS environment variable in their build rules. After +# updating their version (and if you have GCC 15.1 or later) first remove +# '-std=c17' and then try the build. If it works, move the software back up +# to the main list before the commit. +ncurses-version = 6.5 +bash-version = 5.2.37 +m4-version = 1.4.19 +pkgconfig-version = 0.29.2 @@ -77,112 +108,111 @@ bzip2-version = 1.0.6 # ------------------------------------------ # # These are programs and libraries that are optional, The ones in -# `reproduce/software/config/TARGETS.conf' will be built as +# 'reproduce/software/config/TARGETS.conf' will be built as # part of a project. To specify a software there, just remove the -# `-version' suffix from the list below. +# '-version' suffix from the list below. # -# CLASS:HIGHLEVEL (important identifier, don't modify this line.) -apachelog4cxx-version = 0.10.0-603-014954db -apr-version = 1.7.0 +# CLASS:HIGHLEVEL (important identifier for 'awk'; don't modify this line) +apachelog4cxx-version = 0.12.1 apr-util-version = 1.6.1 -astrometrynet-version = 0.80 +apr-version = 1.7.0 +astrometrynet-version = 0.91 atlas-version = 3.10.3 -autoconf-version = 2.69.200-babc -automake-version = 1.16.2 -bison-version = 3.7 -boost-version = 1.73.0 +autoconf-version = 2.72 +automake-version = 1.17 +boost-version = 1.87.0 cairo-version = 1.16.0 cdsclient-version = 3.84 -cfitsio-version = 3.48 -cmake-version = 3.18.1 -eigen-version = 3.3.7 -emacs-version = 27.1 -expat-version = 2.2.9 -fftw-version = 3.3.8 -flex-version = 2.6.4-91-ga631f5d -freetype-version = 2.10.2 -gdb-version = 9.2 -ghostscript-version = 9.52 +cfitsio-version = 4.5.0 +cmake-version = 3.31.5 +eigen-version = 3.4.0 +emacs-version = 28.1 +expat-version = 2.6.4 +fftw-version = 3.3.10 +flex-version = 2.6.4-410-74a89fd +freetype-version = 2.13.3 +gdb-version = 11.1 ghostscript-fonts-gnu-version = 6.0 ghostscript-fonts-std-version = 8.11 -gnuastro-version = 0.12 +ghostscript-version = 10.04.0 +gnuastro-version = 0.23 gperf-version = 3.1 -gsl-version = 2.6 -healpix-version = 3.50 -help2man-version = 1.47.11 -imagemagick-version = 7.0.8-67 +gsl-version = 2.8 +icu-version = 70.1 +imagemagick-version = 7.1.0-13 imfit-version = 1.6.1 -libffi-version = 3.2.1 -libidn-version = 1.36 -libjpeg-version = v9b +libbsd-version = 0.11.3 +libffi-version = 3.4.7 +libidn-version = 1.42 +libjpeg-version = 9f +libmd-version = 1.0.4 libnsl-version = 1.2.0-4a062cf -libpaper-version = 1.1.28 -libpng-version = 1.6.37 -libtiff-version = 4.0.10 +libpaper-version = 1.1.29 +libpng-version = 1.6.46 +libtiff-version = 4.7.0 libtirpc-version = 1.2.6 +metastore-version = 1.1.2-23-fa9170b missfits-version = 2.8.0 -openblas-version = 0.3.10 -openmpi-version = 4.0.4 +ninjabuild-version = 1.12.1 +openblas-version = 0.3.29 +openmpi-version = 4.1.1 openssh-version = 8.0p1 patch-version = 2.7.6 pcre-version = 8.44 -pixman-version = 0.38.0 -python-version = 3.8.5 -R-version = 4.0.2 +pixman-version = 0.40.0 +plplot-version = 5.15.0 +r-cran-version = 4.1.2 rpcsvc-proto-version = 1.4 -scamp-version = 2.6.7 +scamp-version = 2.10.0 scons-version = 3.0.5 sextractor-version = 2.25.0 -swarp-version = 2.38.0 -swig-version = 3.0.12 +swarp-version = 2.41.5 +swig-version = 4.0.2 tides-version = 2.0 -util-linux-version = 2.35 -vim-version = 8.2 +util-linux-version = 2.40.4 +valgrind-version = 3.18.1 +vim-version = 9.0 +wcslib-version = 8.4 xlsxio-version = 0.2.21 yaml-version = 0.2.5 # Xorg packages -util-macros-version = 1.19.2 -xorgproto-version = 2020.1 -libxau-version = 1.0.9 -libxdmcp-version = 1.1.3 -xcb-proto-version = 1.14 -libxcb-version = 1.14 -fontconfig-version = 2.13.1 -xtrans-version = 1.4.0 -libx11-version = 1.6.9 -libxext-version = 1.3.4 -libice-version = 1.0.10 -libsm-version = 1.2.3 -libxt-version = 1.2.0 -libpthread-stubs-version = 0.4 +fontconfig-version = 2.16.0 +libice-version = 1.1.2 +libpthread-stubs-version = 0.5 +libsm-version = 1.2.5 +libx11-version = 1.8 +libxau-version = 1.0.12 +libxcb-version = 1.17.0 +libxdmcp-version = 1.1.5 +libxext-version = 1.3.6 +libxt-version = 1.3.1 +util-macros-version = 1.20.2 +xcb-proto-version = 1.17.0 +xorgproto-version = 2024.1 +xtrans-version = 1.5.2 # Version-dependent build # ----------------------- lapack-version = 3.8.0 -libgit2-version = 1.0.1 -wcslib-version = 7.3 +libgit2-version = 1.9.0 + +# Python should only be updated by the maintainer for Python packages; see +# https://savannah.nongnu.org/task/?16622. +python-version = 3.13.2 # Netpbm's questions in the configure steps maybe change with different or # new versions. -netpbm-version = 10.86.99 +netpbm-version = 10.73.39 -# Minizip is installed with the same `zlib' tarball, and they have the same +# Minizip is installed with the same 'zlib' tarball, and they have the same # version. minizip-version = $(zlib-version) -# From version 1.2 OpenSSL may not need a manual addition, as described in -# its comments and `https://savannah.nongnu.org/bugs/?58263'. If it doesn't -# cause problems, put it back in the list of "Basic/low-level" tools. -openssl-version = 1.1.1a - -# Version 3.15.0 needs two patches, please check if they are necessary on -# any future release. -valgrind-version = 3.15.0 - # Be careful with updateing hdf5 because h5py 2.10.0 doesn't work with # version 1.12.0. -hdf5-version = 1.10.5 +hdf5-version = 1.13.1 + @@ -194,65 +224,163 @@ hdf5-version = 1.10.5 # # IMPORTANT: If you intend to change the version of any of the Python # modules/libraries below, please fix the hash strings of the respective -# URL in `reproduce/software/make/python.mk'. +# URL in 'reproduce/software/make/python.mk'. # -# CLASS:PYTHON (important identifier, don't modify this line.) +# CLASS:PYTHON-START (important identifier for 'awk'; don't modify this line) asn1crypto-version = 0.24.0 asteval-version = 0.9.16 -astropy-version = 4.0 -astroquery-version = 0.4 -beautifulsoup4-version = 4.7.1 +astropy-version = 7.0.1 +astropy-iers-data-version = 0.2025.2.24.0.34.4 +beautifulsoup4-version = 4.10.0 +beniget-version = 0.4.1 certifi-version = 2018.11.29 -cffi-version = 1.12.2 +cffi-version = 1.15.0 chardet-version = 3.0.4 +contourpy-version = 1.3.1 corner-version = 2.0.1 -cryptography-version = 2.6.1 -cycler-version = 0.10.0 -cython-version = 0.29.21 +cppy-version = 1.3.1 +cycler-version = 0.12.1 +cython-version = 3.0.12 eigency-version = 1.77 emcee-version = 3.0.1 entrypoints-version = 0.3 -esutil-version = 0.6.4 +esutil-version = 0.6.9 +extension-helpers-version = 1.2.0 +features-version = 0.5.12 flake8-version = 3.7.8 +flit-core-version = 3.11.0 +fonttools-version = 4.56.0 future-version = 0.18.1 -galsim-version = 2.2.1 +galsim-version = 2.3.5 +gast-version = 0.5.3 +gpep517-version = 16 h5py-version = 2.10.0 html5lib-version = 1.0.1 idna-version = 2.8 jeepney-version = 0.4 -keyring-version = 18.0.0 -kiwisolver-version = 1.0.1 +jinja2-version = 3.0.3 +kiwisolver-version = 1.4.8 lmfit-version = 0.9.14 lsstdesccoord-version = 1.2.0 -matplotlib-version = 3.3.0 -mpi4py-version = 3.0.3 +markupsafe-version = 2.0.1 +meson-version = 1.7.0 +meson-python-version = 0.17.1 +mpi4py-version = 3.1.3 mpmath-version = 1.1.0 -numpy-version = 1.19.1 +numpy-version = 2.2.3 +packaging-version = 24.2 pexpect-version = 4.7.0 +pillow-version = 11.1.0 pip-version = 19.0.2 +ply-version = 3.11 +pybind11-version = 2.13.6 pycodestyle-version = 2.5.0 pycparser-version = 2.19 +pyerfa-version = 2.0.1.5 pyflakes-version = 2.1.1 -pybind11-version = 2.5.0 -pyparsing-version = 2.3.1 +pyproject-metadata-version = 0.9.0 +pyparsing-version = 3.2.1 pypkgconfig-version = 1.5.1 -python-dateutil-version = 2.8.0 -pyyaml-version = 5.1 +python-dateutil-version = 2.9.0.post0 +python-installer-version = 0.7.0 +pythran-version = 0.11.0 +pyyaml-version = 6.0.2 requests-version = 2.21.0 -scipy-version = 1.5.2 -secretstorage-version = 3.1.1 -setuptools-version = 41.6.0 -setuptools_scm-version = 3.3.3 +setuptools-version = 75.8.0 +setuptools-scm-version = 8.2.0 sip_tpv-version = 1.1 -six-version = 1.12.0 +six-version = 1.17.0 soupsieve-version = 1.8 sympy-version = 1.4 -uncertainties-version = 3.1.2 +uncertainties-version = 3.1.6 urllib3-version = 1.24.1 webencodings-version = 0.5.1 +wheel-version = 0.45.1 + +# Matplotlib and Scipy are big packages that may have difficulty resolving +# their dependency hell: +matplotlib-version = 3.10.0 +scipy-version = 1.15.2 + +# CLASS:PYTHON-END (important identifier for 'awk'; don't modify this line) # Special Python modules: # # Healpy: When any Python module is requested, healpix will also build its # Python module Healpy. -healpy-version = xxxxx +# +# Bug: Healpix 3.50 doesn't yet support Python 3.10, so we are commenting +# it here. When future versions fix the problem, we'll un-comment it again. +#healpy-version = xxxxx + + + + + +# Problematic software that are currently ignored: + +# Healpix 3.80 has different installation scheme that is not yet +# implemented here, and unfortunately healpix 3.50 has conflicts with +# CFITSIO's new version macro and Python 3.10. So for now, we are ignoring +# it. +#healpix-version = 3.50 + +# Setuptools-rust crash (https://savannah.nongnu.org/bugs/index.php?61731), +# so it and its dependencies are being ignored: 'cryptography', and thus +# 'secretstorage' and thus 'keyring' and thus 'astroquery'. +setuptools-rust-version = 1.10.2 +semantic-version-version = 2.10.0 +#cryptography-version = 36.0.1 +#secretstorage-version = 3.1.1 +#keyring-version = 18.0.0 +#astroquery-version = 0.4 + + + + + +# R-CRAN (i.e. 'R') packages +# ------------------ +# +# Similar to optional programs and libraries above. +# +# Notation: The R package and variables associated with it are by default +# called 'R-CRAN' (or 'r-cran') in Maneage, because searching on a single +# letter string 'R' is extremely inefficient. +# +# IMPORTANT: Similar to all software, if you intend to change the version +# of any of the R-CRAN modules/libraries below, please fix the hash strings +# of the respective packages in 'reproduce/software/conf/checksums.conf'. +# +# CLASS:R-CRAN-START (important identifier for 'awk'; don't modify this line) +r-cran-MASS-version = 7.3-54 +r-cran-R6-version = 2.5.0 +r-cran-RColorBrewer-version = 1.1-2 +r-cran-cli-version = 2.5.0 +r-cran-colorspace-version = 2.0-1 +r-cran-cowplot-version = 1.1.1 +r-cran-crayon-version = 1.4.1 +r-cran-digest-version = 0.6.27 +r-cran-ellipsis-version = 0.3.2 +r-cran-fansi-version = 0.5.0 +r-cran-farver-version = 2.1.0 +r-cran-ggplot2-version = 3.3.4 +r-cran-glue-version = 1.4.2 +r-cran-gridExtra-version = 2.3 +r-cran-gtable-version = 0.3.0 +r-cran-isoband-version = 0.2.4 +r-cran-labeling-version = 0.4.2 +r-cran-lifecycle-version = 1.0.0 +r-cran-magrittr-version = 2.0.1 +r-cran-mgcv-version = 1.8-36 +r-cran-munsell-version = 0.5.0 +r-cran-pillar-version = 1.6.1 +r-cran-pkgconfig-version = 2.0.3 +r-cran-rlang-version = 0.4.11 +r-cran-scales-version = 1.1.1 +r-cran-tibble-version = 3.1.2 +r-cran-utf8-version = 1.2.1 +r-cran-vctrs-version = 0.3.8 +r-cran-viridisLite-version = 0.4.0 +r-cran-withr-version = 2.4.2 +# CLASS:R-CRAN-END (important identifier for 'awk'; don't modify this line) diff --git a/reproduce/software/make/README.md b/reproduce/software/make/README.md index 8d12d7a..00afef4 100644 --- a/reproduce/software/make/README.md +++ b/reproduce/software/make/README.md @@ -1,7 +1,7 @@ Software building instructions ------------------------------ -Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org>\ +Copyright (C) 2019-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org>\ See the end of the file for license conditions. This directory contains Makefiles that are called by the high-level diff --git a/reproduce/software/make/atlas-multiple.mk b/reproduce/software/make/atlas-multiple.mk index e782578..6e7d415 100644 --- a/reproduce/software/make/atlas-multiple.mk +++ b/reproduce/software/make/atlas-multiple.mk @@ -3,12 +3,12 @@ # ------------------------------------------------------------------------ # !!!!! IMPORTANT NOTES !!!!! # -# This Makefile will be run during the initial `./project configure' +# This Makefile will be run during the initial './project configure' # script. It is not included into the reproduction pipe after that. # # ------------------------------------------------------------------------ # -# Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2019-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> # # This Makefile is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by diff --git a/reproduce/software/make/atlas-single.mk b/reproduce/software/make/atlas-single.mk index 0934b66..2b68677 100644 --- a/reproduce/software/make/atlas-single.mk +++ b/reproduce/software/make/atlas-single.mk @@ -3,12 +3,12 @@ # ------------------------------------------------------------------------ # !!!!! IMPORTANT NOTES !!!!! # -# This Makefile will be run during the initial `./project configure' +# This Makefile will be run during the initial './project configure' # script. It is not included into the reproduction pipe after that. # # ------------------------------------------------------------------------ # -# Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2019-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> # # This Makefile is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by diff --git a/reproduce/software/make/basic.mk b/reproduce/software/make/basic.mk index 0d114db..4b18c29 100644 --- a/reproduce/software/make/basic.mk +++ b/reproduce/software/make/basic.mk @@ -4,7 +4,7 @@ # ------------------------------------------------------------------------ # !!!!! IMPORTANT NOTES !!!!! # -# This Makefile will be run by the initial `./project configure' script. It +# This Makefile will be run by the initial './project configure' script. It # is not included into the project afterwards. # # This Makefile builds low-level and basic tools that are necessary in any @@ -21,8 +21,9 @@ # # ------------------------------------------------------------------------ # -# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> -# Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +# Copyright (C) 2018-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> +# Copyright (C) 2022-2025 Pedram Ashofteh Ardakani <pedramardakani@pm.me> # # This Makefile is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -38,7 +39,7 @@ # along with this Makefile. If not, see <http://www.gnu.org/licenses/>. # Top level environment -include reproduce/software/config/LOCAL.conf +include .build/software/config/LOCAL.conf include reproduce/software/make/build-rules.mk include reproduce/software/config/versions.conf include reproduce/software/config/checksums.conf @@ -48,22 +49,27 @@ include reproduce/software/config/checksums.conf include reproduce/software/config/urls.conf # Basic directories -lockdir = $(BDIR)/software/locks -tdir = $(BDIR)/software/tarballs -ddir = $(BDIR)/software/build-tmp -idir = $(BDIR)/software/installed -ibdir = $(BDIR)/software/installed/bin -ildir = $(BDIR)/software/installed/lib -ibidir = $(BDIR)/software/installed/version-info/proglib +lockdir = $(BDIR)/software/locks +tdir = $(BDIR)/software/tarballs +ddir = $(BDIR)/software/build-tmp +idir = $(BDIR)/software/installed +ibdir = $(BDIR)/software/installed/bin +ildir = $(BDIR)/software/installed/lib +iidir = $(BDIR)/software/installed/include +shsrcdir = "$(shell pwd)"/reproduce/software/shell +ibidir = $(BDIR)/software/installed/version-info/proglib # Ultimate Makefile target. GNU Nano (a simple and very light-weight text # editor) is installed by default, it is recommended to have it in the # 'basic.mk', so Maneaged projects can be edited on any system (even when -# there is no command-line text editor is available). +# there is no command-line text editor available). +# +# The recipe is '@echo > /dev/null' so Make does not print "make: Nothing +# to be done for 'all'." targets-proglib = low-level-links \ gcc-$(gcc-version) \ nano-$(nano-version) -all: $(foreach p, $(targets-proglib), $(ibidir)/$(p)) +all: $(foreach p, $(targets-proglib), $(ibidir)/$(p)); @echo > /dev/null # Define the shell environment # ---------------------------- @@ -86,21 +92,38 @@ export SHELL := $(ibdir)/dash export PATH := $(ibdir):$(PATH) export PKG_CONFIG_PATH := $(ildir)/pkgconfig export PKG_CONFIG_LIBDIR := $(ildir)/pkgconfig -export CPPFLAGS := -I$(idir)/include $(CPPFLAGS) \ - -Wno-nullability-completeness export LDFLAGS := $(rpath_command) -L$(ildir) $(LDFLAGS) +# Disable built-in rules (which are not needed here!) +.SUFFIXES: + +# See description of '-Wno-nullability-completeness' in +# 'reproduce/software/shell/configure.sh'. +ifeq ($(on_mac_os),yes) + noccwarnings=-Wno-nullability-completeness +endif +export CPPFLAGS := -I$(idir)/include $(CPPFLAGS) $(noccwarnings) + # This is the "basic" tools where we are relying on the host operating # system, but are slowly populating our basic software envirnoment. To run -# (system or template) programs, `LD_LIBRARY_PATH' is necessary, so here, +# (system or template) programs, 'LD_LIBRARY_PATH' is necessary, so here, # we'll first tell the programs to look into any possible pre-defined -# `LD_LIBRARY_PATH', then we'll add our own newly installed libraries. We +# 'LD_LIBRARY_PATH', then we'll add our own newly installed libraries. We # will also make sure that there is no "current directory" in it (by -# removing a starting or trailing `:' and any occurance of `::'. +# removing a starting or trailing ':' and any occurance of '::'. +# +# But first: in case LD_LIBRARY_PATH is empty, give it the default value of +# $(sys_library_sh_path) (which was the location of the libraries needed by +# the host's shell). This is because after we add the Maneage's library +# path, on some systems, no other libraries will be checked except those +# that are in 'LD_LIBRARY_PATH'. +ifeq ($(strip $(LD_LIBRARY_PATH)),) +export LD_LIBRARY_PATH=$(sys_library_sh_path) +endif export LD_LIBRARY_PATH := $(shell echo $(LD_LIBRARY_PATH):$(ildir) \ | sed -e's/::/:/g' -e's/^://' -e's/:$$//') -# RPATH is automatically written in macOS, so `DYLD_LIBRARY_PATH' is +# RPATH is automatically written in macOS, so 'DYLD_LIBRARY_PATH' is # ultimately redundant. But on some systems, even having a single value # causes crashs (see bug #56682). So we'll just give it no value at all. export DYLD_LIBRARY_PATH := @@ -117,17 +140,24 @@ export DYLD_LIBRARY_PATH := # Afer putting everything together, we use the first server as the # reference for all software if their '-url' variable isn't defined (in # 'reproduce/software/config/urls.conf'). -downloadwrapper = ./reproduce/analysis/bash/download-multi-try +downloadwrapper = ./reproduce/analysis/bash/download-multi-try.sh maneage_backup_urls := $(shell awk '!/^#/{printf "%s ", $$1}' \ reproduce/software/config/servers-backup.conf) backupservers_all = $(user_backup_urls) $(maneage_backup_urls) topbackupserver = $(word 1, $(backupservers_all)) backupservers = $(filter-out $(topbackupserver),$(backupservers_all)) - - - - +# When building in Apptainer containers, as of 2025-04-18, we need to +# configure Maneage as root (within the container). In such cases, we need +# to activate the 'FORCE_UNSAFE_CONFIGURE' environment variable to build +# some of the software. The 'if' statement is here to make sure we are in +# Apptainer: in other situations, the "unsafe" configure script shouldn't +# be activated. Note that this doesn't happen in Docker (where the Maneage +# source is in the same directory) because we build a non-root ('maneager' +# user there who executes the configure command. +unsafe-config = if [ $$(pwd) = "/home/maneager/source" ] \ + && [ $$(whoami) = root ]; then \ + export FORCE_UNSAFE_CONFIGURE=1; fi @@ -150,10 +180,10 @@ backupservers = $(filter-out $(topbackupserver),$(backupservers_all)) # # About ccache: ccache acts like a wrapper over the C compiler and is made # to avoid/speed-up compiling of identical files in a system (it is -# commonly used on large servers). It actually makes `gcc' or `g++' a +# commonly used on large servers). It actually makes 'gcc' or 'g++' a # symbolic link to itself so it can control them internally. So, for our # purpose here, it is very annoying and can cause many complications. We -# thus remove any part of PATH of that has `ccache' in it before making +# thus remove any part of PATH of that has 'ccache' in it before making # symbolic links to the programs we are not building ourselves. # # The double quotations after the starting 'export PATH' are necessary in @@ -172,9 +202,9 @@ makelink = origpath="$$PATH"; \ | tr '\n' :)"; \ if type $(1) > /dev/null 2> /dev/null; then \ if [ x$(3) = x ]; then \ - ln -sf "$$(realpath $$(which $(1)))" $(ibdir)/$(1); \ + ln -sf "$$(realpath $$(command -v $(1)))" $(ibdir)/$(1); \ else \ - ln -sf "$$(realpath $$(which $(1)))" $(ibdir)/$(3); \ + ln -sf "$$(realpath $$(command -v $(1)))" $(ibdir)/$(3); \ fi; \ else \ if [ "x$(strip $(2))" = xmandatory ]; then \ @@ -189,26 +219,27 @@ $(ibdir) $(ildir):; mkdir $@ $(ibidir)/low-level-links: $(ibidir)/grep-$(grep-version) \ | $(ibdir) $(ildir) - # Hardware specific +# Hardware specific $(call makelink,lp) # For printing, necessary for R. $(call makelink,lpr) # For printing, necessary for R. - # Mac OS specific +# Mac OS specific $(call makelink,mig) $(call makelink,xcrun) $(call makelink,sysctl) $(call makelink,sw_vers) + $(call makelink,codesign) $(call makelink,dsymutil) $(call makelink,install_name_tool) - # On Mac OS, libtool is different compared to GNU Libtool. The - # libtool we'll build in the high-level dependencies has the - # executable name `glibtool'. +# On Mac OS, libtool is different compared to GNU Libtool. The +# libtool we'll build in the high-level dependencies has the +# executable name 'glibtool'. $(call makelink,libtool) - # Necessary libraries: - # Libdl (for dynamic loading libraries at runtime) - # POSIX Threads library for multi-threaded programs. +# Necessary libraries: +# Libdl (for dynamic loading libraries at runtime) +# POSIX Threads library for multi-threaded programs. for l in dl pthread; do if [ -f /usr/lib/lib$$l.a ]; then for f in /usr/lib/lib$$l.*; do @@ -218,8 +249,12 @@ $(ibidir)/low-level-links: $(ibidir)/grep-$(grep-version) \ fi done - # We want this to be empty (so it doesn't interefere with the other - # files in `ibidir'. +# Useful tools: 'ldd' (list libraries linked by binary on GNU +# systems) + $(call makelink,ldd) + +# We want this to be empty (so it doesn't interefere with the other +# files in 'ibidir'. touch $@ @@ -236,11 +271,8 @@ $(ibidir)/low-level-links: $(ibidir)/grep-$(grep-version) \ # # The first set of programs to be built are those that we need to unpack # the source code tarballs of each program. We have already installed Lzip -# before calling 'basic.mk', so it is present and working. Hence we first -# build the Lzipped tarball of Gzip, then use our own Gzip to unpack the -# tarballs of the other compression programs. Once all the compression -# programs/libraries are complete, we build our own GNU Tar and continue -# with other software. +# before calling 'basic.mk', so it is present and working. So the only +# prerequisites of these (until reaching Tar) is the necessary directories. $(lockdir): | $(BDIR); mkdir $@ $(ibidir)/gzip-$(gzip-version): | $(ibdir) $(ildir) $(lockdir) tarball=gzip-$(gzip-version).tar.lz @@ -248,27 +280,27 @@ $(ibidir)/gzip-$(gzip-version): | $(ibdir) $(ildir) $(lockdir) $(call gbuild, gzip-$(gzip-version), static, , V=1) echo "GNU Gzip $(gzip-version)" > $@ -$(ibidir)/xz-$(xz-version): $(ibidir)/gzip-$(gzip-version) - tarball=xz-$(xz-version).tar.gz +$(ibidir)/xz-$(xz-version): | $(ibdir) $(ildir) $(lockdir) + tarball=xz-$(xz-version).tar.lz $(call import-source, $(xz-url), $(xz-checksum)) $(call gbuild, xz-$(xz-version), static) echo "XZ Utils $(xz-version)" > $@ -$(ibidir)/bzip2-$(bzip2-version): $(ibidir)/gzip-$(gzip-version) +$(ibidir)/bzip2-$(bzip2-version): | $(ibdir) $(ildir) $(lockdir) - # Download the tarball. - tarball=bzip2-$(bzip2-version).tar.gz +# Download the tarball. + tarball=bzip2-$(bzip2-version).tar.lz $(call import-source, $(bzip2-url), $(bzip2-checksum)) - # Bzip2 doesn't have a `./configure' script, and its Makefile - # doesn't build a shared library. So we can't use the `gbuild' - # function here and we need to take some extra steps (inspired - # from the GNU/Linux from Scratch (LFS) guide for Bzip2): - # 1) The `sed' call is for relative installed symbolic links. - # 2) The special Makefile-libbz2_so builds shared libraries. - # - # NOTE: the major version number appears in the final symbolic - # link. +# Bzip2 doesn't have a './configure' script, and its Makefile doesn't +# build a shared library. So we can't use the 'gbuild' function here +# and we need to take some extra steps (inspired from the GNU/Linux +# from Scratch (LFS) guide for Bzip2): +# +# 1) The 'sed' call is for relative installed symbolic links. +# 2) The special Makefile-libbz2_so builds shared libraries. +# +# NOTE: the major version number appears in the final symbolic link. tdir=bzip2-$(bzip2-version) if [ $(static_build) = yes ]; then makecommand="make LDFLAGS=-static" @@ -283,8 +315,9 @@ $(ibidir)/bzip2-$(bzip2-version): $(ibidir)/gzip-$(gzip-version) fi cd $(ddir) rm -rf $$tdir - tar xf $(tdir)/$$tarball + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions cd $$tdir + $(shsrcdir)/prep-source.sh $(ibdir) sed -e 's@\(ln -s -f \)$$(PREFIX)/bin/@\1@' Makefile \ > Makefile.sed mv Makefile.sed Makefile @@ -296,38 +329,16 @@ $(ibidir)/bzip2-$(bzip2-version): $(ibidir)/gzip-$(gzip-version) cd .. rm -rf $$tdir cd $(ildir) - ln -fs libbz2.so.1.0 libbz2.so + ln -fs libbz2.so.$(bzip2-version) libbz2.so echo "Bzip2 $(bzip2-version)" > $@ -$(ibidir)/unzip-$(unzip-version): $(ibidir)/gzip-$(gzip-version) - tarball=unzip-$(unzip-version).tar.gz - v=$$(echo $(unzip-version) | sed -e's/\.//') - $(call import-source, $(unzip-url), $(unzip-checksum)) - $(call gbuild, unzip$$v, static,, \ - -f unix/Makefile generic \ - CFLAGS="-DBIG_MEM -DMMAP",,pwd, \ - -f unix/Makefile generic \ - BINDIR=$(ibdir) MANDIR=$(idir)/man/man1 ) - echo "Unzip $(unzip-version)" > $@ - -$(ibidir)/zip-$(zip-version): $(ibidir)/gzip-$(gzip-version) - tarball=zip-$(zip-version).tar.gz - v=$$(echo $(zip-version) | sed -e's/\.//') - $(call import-source, $(zip-url), $(zip-checksum)) - $(call gbuild, zip$$v, static,, \ - -f unix/Makefile generic \ - CFLAGS="-DBIG_MEM -DMMAP",,pwd, \ - -f unix/Makefile generic \ - BINDIR=$(ibdir) MANDIR=$(idir)/man/man1 ) - echo "Zip $(zip-version)" > $@ - # Some programs (like Wget and CMake) that use zlib need it to be dynamic # so they use our custom build. So we won't force a static-only build. # -# Note for a static-only build: Zlib's `./configure' doesn't use Autoconf's -# configure script, it just accepts a direct `--static' option. -$(ibidir)/zlib-$(zlib-version): $(ibidir)/gzip-$(gzip-version) - tarball=zlib-$(zlib-version).tar.gz +# Note for a static-only build: Zlib's './configure' doesn't use Autoconf's +# configure script, it just accepts a direct '--static' option. +$(ibidir)/zlib-$(zlib-version): | $(ibdir) $(ildir) $(lockdir) + tarball=zlib-$(zlib-version).tar.lz $(call import-source, $(zlib-url), $(zlib-checksum)) $(call gbuild, zlib-$(zlib-version)) echo "Zlib $(zlib-version)" > $@ @@ -339,16 +350,16 @@ $(ibidir)/zlib-$(zlib-version): $(ibidir)/gzip-$(gzip-version) # software to be built). $(ibidir)/tar-$(tar-version): \ $(ibidir)/xz-$(xz-version) \ - $(ibidir)/zip-$(zip-version) \ $(ibidir)/gzip-$(gzip-version) \ $(ibidir)/zlib-$(zlib-version) \ - $(ibidir)/bzip2-$(bzip2-version) \ - $(ibidir)/unzip-$(unzip-version) - # Since all later programs depend on Tar, the configuration will be - # stuck here, only making Tar. So its more efficient to built it on - # multiple threads (when the user's Make doesn't pass down the - # number of threads). - tarball=tar-$(tar-version).tar.gz + $(ibidir)/bzip2-$(bzip2-version) + +# Since all later programs depend on Tar, the configuration will hit +# a bottleneck here: only making Tar. So its more efficient to built +# it on multiple threads (even when the user's Make doesn't pass down +# the number of threads). + $(call unsafe-config) + tarball=tar-$(tar-version).tar.lz $(call import-source, $(tar-url), $(tar-checksum)) $(call gbuild, tar-$(tar-version), , , -j$(numthreads) V=1) echo "GNU Tar $(tar-version)" > $@ @@ -370,7 +381,7 @@ $(ibidir)/tar-$(tar-version): \ # a prerequisite (and forgetting in others causing bugs), we'll put it as a # dependancy of 'tar'. $(ibidir)/patchelf-$(patchelf-version): $(ibidir)/tar-$(tar-version) - tarball=patchelf-$(patchelf-version).tar.gz + tarball=patchelf-$(patchelf-version).tar.lz $(call import-source, $(patchelf-url), $(patchelf-checksum)) if [ x$(on_mac_os) = xyes ]; then echo "" > $@ @@ -395,75 +406,80 @@ $(ibidir)/patchelf-$(patchelf-version): $(ibidir)/tar-$(tar-version) # basic dependencies. # # Unfortunately Make needs dynamic linking in two instances: when loading -# objects (dynamically linked libraries), or when using the `getpwnam' +# objects (dynamically linked libraries), or when using the 'getpwnam' # function (for tilde expansion). The first can be disabled with -# `--disable-load', but unfortunately I don't know any way to fix the +# '--disable-load', but unfortunately I don't know any way to fix the # second. So, we'll have to build it dynamically for now. $(ibidir)/ncurses-$(ncurses-version): $(ibidir)/patchelf-$(patchelf-version) - tarball=ncurses-$(ncurses-version).tar.gz + tarball=ncurses-$(ncurses-version).tar.lz $(call import-source, $(ncurses-url), $(ncurses-checksum)) - # Delete the library that will be installed (so we can make sure - # the build process completed afterwards and reset the links). +# Delete the library that will be installed (so we can make sure the +# build process completed afterwards and reset the links). rm -f $(ildir)/libncursesw* - # Delete the (possibly existing) low-level programs that depend on - # `readline', and thus `ncurses'. Since these programs are actually - # used during the building of `ncurses', we need to delete them so - # the build process doesn't use the project's Bash and AWK, but the - # host's. +# Delete the (possibly existing) low-level programs that depend on +# 'readline', and thus 'ncurses'. Since these programs are actually +# used during the building of 'ncurses', we need to delete them so +# the build process doesn't use the project's Bash and AWK, but the +# host's. rm -f $(ibdir)/bash* $(ibdir)/awk* $(ibdir)/gawk* - # Standard build process. +# Standard build process. + export CFLAGS="-std=gnu17 $$CFLAGS" $(call gbuild, ncurses-$(ncurses-version), static, \ --with-shared --enable-rpath --without-normal \ --without-debug --with-cxx-binding \ --with-cxx-shared --enable-widec --enable-pc-files \ --with-pkg-config=$(ildir)/pkgconfig, -j$(numthreads)) - # Unfortunately there are many problems with `ncurses' using - # "normal" (or 8-bit) characters. The standard way that will work - # is to build it with wide character mode as you see above in the - # configuration (or the `w' prefix you see below). Also, most - # programs (and in particular Bash and AWK), first look for other - # (mostly obsolete) libraries like tinfo, which define the same - # symbols. The links below address both situations: we need to fool - # higher-level packages to find this library even if they aren't - # explicitly mentioning its name correctly (as a value to `-l' at - # link time in their configure scripts). - # - # This part is taken from the Arch GNU/Linux build script[1], then - # extended to Mac thanks to Homebrew's script [2]. - # - # [1] https://git.archlinux.org/svntogit/packages.git/tree/trunk/PKGBUILD?h=packages/ncurses - # [2] https://github.com/Homebrew/homebrew-core/blob/master/Formula/ncurses.rb - # - # Since we can't have comments, in the connected script, here is a - # summary: - # - # 1. We find the actual suffix of the library, from the file that - # is not a symbolic link (starting with `-' in the output of - # `ls -l'). - # - # 2. We make symbolic links to all the "ncurses", "ncurses++", - # "form", "panel" and "menu" libraries to point to their - # "wide" (character) library. - # - # 3. We make symbolic links to the "tic" and "tinfo" libraries to - # point to the same `libncursesw' library. - # - # 4. Some programs link with "curses" (not "ncurses", notice the - # starting "n"), so we'll also make links for these to point - # to the `libncursesw' library. - # - # 5. A link is made to also be able to include files from the - # `ncurses' headers. +# Unfortunately there are many problems with 'ncurses' using "normal" +# (or 8-bit) characters. The standard way that will work is to build +# it with wide character mode as you see above in the configuration +# (or the 'w' prefix you see below). Also, most programs (and in +# particular Bash and AWK), first look for other (mostly obsolete) +# libraries like tinfo, which define the same symbols. The links +# below address both situations: we need to fool higher-level +# packages to find this library even if they aren't explicitly +# mentioning its name correctly (as a value to '-l' at link time in +# their configure scripts). +# +# This part is taken from the Arch GNU/Linux build script[1], then +# extended to Mac thanks to Homebrew's script [2]. +# +# [1] https://git.archlinux.org/svntogit/packages.git/tree/trunk/PKGBUILD?h=packages/ncurses +# [2] https://github.com/Homebrew/homebrew-core/blob/master/Formula/ncurses.rb +# +# Since we can't have comments, in the connected script, here is a +# summary: +# +# 1. We find the actual suffix of the library, from the file that +# is not a symbolic link (starting with '-' in the output of 'ls +# -l'). +# +# 2. We make symbolic links to all the "ncurses", "ncurses++", +# "form", "panel" and "menu" libraries to point to their "wide" +# (character) library. +# +# 3. We make symbolic links to the "tic" and "tinfo" libraries to +# point to the same 'libncursesw' library. +# +# 4. Some programs link with "curses" (not "ncurses", notice the +# starting "n"), so we'll also make links for these to point to +# the 'libncursesw' library. +# +# 5. A link is made to also be able to include files from the +# 'ncurses' headers. +# +# 6. Top-level symbolic links are made for the 'include' (.h) files. +# if [ x$(on_mac_os) = xyes ]; then so="dylib"; else so="so"; fi if [ -f $(ildir)/libncursesw.$$so ]; then + unalias ls || true # avoid decorated 'ls' commands with extra characters sov=$$(ls -l $(ildir)/libncursesw* \ | awk '/^-/{print $$NF}' \ - | sed -e's|'$(ildir)/libncursesw.'||') + | sed -e "s;$(ildir)/libncursesw\.;;") cd "$(ildir)" for lib in ncurses ncurses++ form panel menu; do @@ -481,6 +497,11 @@ $(ibidir)/ncurses-$(ncurses-version): $(ibidir)/patchelf-$(patchelf-version) ln -fs $(ildir)/pkgconfig/ncursesw.pc pkgconfig/cursesw.pc ln -fs $(idir)/include/ncursesw $(idir)/include/ncurses + +# Add symbolic links for the ncursesw/*.h 'include' files +# so that they can be found in the top-level include/ directory. + cd "$(iidir)" + ln -fsv $(idir)/include/ncursesw/*.h . echo "GNU NCURSES $(ncurses-version)" > $@ else exit 1 @@ -488,7 +509,7 @@ $(ibidir)/ncurses-$(ncurses-version): $(ibidir)/patchelf-$(patchelf-version) $(ibidir)/readline-$(readline-version): \ $(ibidir)/ncurses-$(ncurses-version) - tarball=readline-$(readline-version).tar.gz + tarball=readline-$(readline-version).tar.lz $(call import-source, $(readline-url), $(readline-checksum)) $(call gbuild, readline-$(readline-version), static, \ --with-curses --disable-install-examples, \ @@ -496,12 +517,12 @@ $(ibidir)/readline-$(readline-version): \ echo "GNU Readline $(readline-version)" > $@ -# IMPORTANT: Even though we have enabled `rpath', Bash doesn't write the +# IMPORTANT: Even though we have enabled 'rpath', Bash doesn't write the # absolute adddress of the libraries it depends on! Therefore, if we -# configure Bash with `--with-installed-readline' (so the installed version +# configure Bash with '--with-installed-readline' (so the installed version # of Readline, that we build below as a prerequisite or AWK, is used) and -# you run `ldd $(ibdir)/bash' on the resulting binary, it will say that it -# is linking with the system's `readline'. But if you run that same command +# you run 'ldd $(ibdir)/bash' on the resulting binary, it will say that it +# is linking with the system's 'readline'. But if you run that same command # within a rule in this project, you'll see that it is indeed linking with # our own built readline. # @@ -509,11 +530,11 @@ $(ibidir)/readline-$(readline-version): \ # released as patches. Therefore we'll need to make our own fully-working # and updated tarball to build the proper version of Bash. You download and # apply them to the original tarball and make a new one with the following -# series of commands (just replace `NUMBER' with the total number of +# series of commands (just replace 'NUMBER' with the total number of # patches that you want to apply). # # $ number=NUMBER -# $ tar xf bash-5.0.tar.gz +# $ tar -xf bash-5.0.tar.gz # $ cd bash-5.0 # $ for i in $(seq 1 $number); do \ # pname=bash50-$(printf "%03d" $i); \ @@ -529,22 +550,22 @@ $(ibidir)/bash-$(bash-version): \ $(ibidir)/gettext-$(gettext-version) \ $(ibidir)/readline-$(readline-version) - # Download the tarball. +# Download the tarball. tarball=bash-$(bash-version).tar.lz $(call import-source, $(bash-url), $(bash-checksum)) - # Delete the (possibly) existing Bash executable in the project, - # let it use the default shell of the host. +# Delete the (possibly) existing Bash executable in the project, +# let it use the default shell of the host. rm -f $(ibdir)/bash - # Bash has many `--enable' features which are already enabled by - # default. As described in the manual, they are mainly useful when - # you disable them all with `--enable-minimal-config' and enable a - # subset using the `--enable' options. +# Bash has many '--enable' features which are already enabled by +# default. As described in the manual, they are mainly useful when +# you disable them all with '--enable-minimal-config' and enable a +# subset using the '--enable' options. if [ "x$(static_build)" = xyes ]; then stopt="--enable-static-link" else stopt="" fi; - export CFLAGS="$$CFLAGS \ + export CFLAGS="$$CFLAGS -std=gnu17 \ -DDEFAULT_PATH_VALUE='\"$(ibdir)\"' \ -DSTANDARD_UTILS_PATH='\"$(ibdir)\"' \ -DSYS_BASHRC='\"$(BASH_ENV)\"' " @@ -553,22 +574,20 @@ $(ibidir)/bash-$(bash-version): \ --with-curses=yes, \ -j$(numthreads)) - # Atleast on GNU/Linux systems, Bash doesn't include RPATH by - # default. So, we have to manually include it, currently we are - # only doing this on GNU/Linux systems (using the `patchelf' - # program). +# Atleast on GNU/Linux systems, Bash doesn't include RPATH by +# default. So, we have to manually include it, currently we are only +# doing this on GNU/Linux systems (using the 'patchelf' program). if [ -f $(ibdir)/patchelf ]; then $(ibdir)/patchelf --set-rpath $(ildir) $(ibdir)/bash; fi - # To be generic, some systems use the `sh' command to call the - # shell. By convention, `sh' is just a symbolic link to the - # preferred shell executable. So we'll define `$(ibdir)/sh' as a - # symbolic link to the Bash that we just built and installed. - # - # Just to be sure that the installation step above went well, - # before making the link, we'll see if the file actually exists - # there. +# To be generic, some systems use the 'sh' command to call the +# shell. By convention, 'sh' is just a symbolic link to the preferred +# shell executable. So we'll define '$(ibdir)/sh' as a symbolic link +# to the Bash that we just built and installed. +# +# Just to be sure that the installation step above went well, before +# making the link, we'll see if the file actually exists there. ln -fs $(ibdir)/bash $(ibdir)/sh echo "GNU Bash $(bash-version)" > $@ @@ -584,9 +603,9 @@ $(ibidir)/bash-$(bash-version): \ # Level 4: Most other programs # ---------------------------- -# In Perl, The `-shared' flag will cause problems while building on macOS, +# In Perl, The '-shared' flag will cause problems while building on macOS, # so we'll only use this configuration option when we are GNU/Linux -# systems. However, since the whole option must be used (which includes `=' +# systems. However, since the whole option must be used (which includes '=' # and empty space), its easier to define the variable as a Make variable # outside the recipe, not as a shell variable inside it. ifeq ($(on_mac_os),yes) @@ -595,7 +614,7 @@ else perl-conflddlflags = -Dlddlflags="-shared $$LDFLAGS" endif $(ibidir)/perl-$(perl-version): $(ibidir)/patchelf-$(patchelf-version) - tarball=perl-$(perl-version).tar.gz + tarball=perl-$(perl-version).tar.lz $(call import-source, $(perl-url), $(perl-checksum)) major_version=$$(echo $(perl-version) \ | sed -e's/\./ /g' \ @@ -605,8 +624,9 @@ $(ibidir)/perl-$(perl-version): $(ibidir)/patchelf-$(patchelf-version) | awk '{printf("%d.%d", $$1, $$2)}') cd $(ddir) rm -rf perl-$(perl-version) - tar xf $(tdir)/$$tarball + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions cd perl-$(perl-version) + $(shsrcdir)/prep-source.sh $(ibdir) ./Configure -des \ -Dusethreads \ -Duseshrplib \ @@ -615,7 +635,7 @@ $(ibidir)/perl-$(perl-version): $(ibidir)/patchelf-$(patchelf-version) -Dprivlib=$(idir)/share/perl$$major_version/core_perl \ -Darchlib=$(idir)/lib/perl$$major_version/$$base_version/core_perl \ -Dsitelib=$(idir)/share/perl$$major_version/site_perl \ - -Dsitearch=$(idir)/lib/perl$$major_version/$$basever/site_perl \ + -Dsitearch=$(idir)/lib/perl$$major_version/$$base_version/site_perl \ -Dvendorlib=$(idir)/share/perl$$major_version/vendor_perl \ -Dvendorarch=$(idir)/lib/perl$$major_version/$$base_version/vendor_perl \ -Dscriptdir=$(idir)/bin/core_perl \ @@ -627,7 +647,7 @@ $(ibidir)/perl-$(perl-version): $(ibidir)/patchelf-$(patchelf-version) -Dcccdlflags='-fPIC' \ $(perl-conflddlflags) \ -Dldflags="$$LDFLAGS" - make -j$(numthreads) + make -j$(numthreads) V=1 make install cd .. rm -rf perl-$(perl-version) @@ -641,17 +661,17 @@ $(ibidir)/perl-$(perl-version): $(ibidir)/patchelf-$(patchelf-version) # Coreutils # --------- # -# For some reason, Coreutils doesn't include `rpath' in its installed +# For some reason, Coreutils doesn't include 'rpath' in its installed # executables (even though it says that by default its included and that -# even when calling `--enable-rpath=yes'). So we have to manually add -# `rpath' to Coreutils' executables after the standard build is +# even when calling '--enable-rpath=yes'). So we have to manually add +# 'rpath' to Coreutils' executables after the standard build is # complete. # # One problem is that Coreutils installs many very basic executables which # might be in used by other programs. So we must make sure that when # Coreutils is being built, no other program is being built in # parallel. The solution to the many executables it installs is to make a -# fake installation (with `DESTDIR'), and get a list of the contents of the +# fake installation (with 'DESTDIR'), and get a list of the contents of the # directory to find the names. # # The echo after the PatchELF loop is to avoid a crash if the last @@ -664,32 +684,29 @@ $(ibidir)/coreutils-$(coreutils-version): \ $(ibidir)/perl-$(perl-version) \ $(ibidir)/openssl-$(openssl-version) - # Import, unpack and enter the source directory. - tarball=coreutils-$(coreutils-version).tar.xz +# Import, unpack and enter the source directory. + $(call unsafe-config) + tarball=coreutils-$(coreutils-version).tar.lz $(call import-source, $(coreutils-url), $(coreutils-checksum)) + +# Unpack and enter the source. cd $(ddir) rm -rf coreutils-$(coreutils-version) - tar xf $(tdir)/$$tarball + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions cd coreutils-$(coreutils-version) + $(shsrcdir)/prep-source.sh $(ibdir) - # Set the configure script to use our shell, note that we can't - # assume GNU SED here yet (it installs after Coreutils). - sed -e's|\#\! /bin/sh|\#\! $(ibdir)/bash|' \ - -e's|\#\!/bin/sh|\#\! $(ibdir)/bash|' \ - configure > configure-tmp - mv configure-tmp configure - chmod +x configure - - # Configure, build and install Coreutils. +# Configure, build and install Coreutils. ./configure --prefix=$(idir) SHELL=$(ibdir)/bash \ LDFLAGS="$(LDFLAGS)" CPPFLAGS="$(CPPFLAGS)" \ --disable-silent-rules --with-openssl=yes make SHELL=$(ibdir)/bash -j$(numthreads) make SHELL=$(ibdir)/bash install - # Fix RPATH if necessary. +# Fix RPATH if necessary. if [ -f $(ibdir)/patchelf ]; then make SHELL=$(ibdir)/bash install DESTDIR=junkinst + unalias ls || true # Not decorated 'ls' (with extra characters). instprogs=$$(ls junkinst/$(ibdir)) for f in $$instprogs; do $(ibdir)/patchelf --set-rpath $(ildir) $(ibdir)/$$f @@ -697,12 +714,35 @@ $(ibidir)/coreutils-$(coreutils-version): \ echo "PatchELF applied to all programs." fi - # Come back up to the unpacking directory, delete the source - # directory and write the final target. +# Come back up to the unpacking directory, delete the source +# directory and write the final target. cd .. rm -rf coreutils-$(coreutils-version) echo "GNU Coreutils $(coreutils-version)" > $@ +# Podlators +# +# POD is short for "Plain Old Documentation", that is the format used in +# Perl's documentation. Podlators provies two executables pod2man and +# pod2text convert this into the roff format (used in man pages) or pod2 It +# is used by some software like OpenSSL to create their man pages. +$(ibidir)/podlators-$(podlators-version): $(ibidir)/perl-$(perl-version) + tarball=podlators-$(podlators-version).tar.lz + $(call import-source, $(podlators-url), $(podlators-checksum)) + cd $(ddir) + rm -rf podlators-$(podlators-version) + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions + cd podlators-$(podlators-version) + $(shsrcdir)/prep-source.sh $(ibdir) + perl Makefile.PL + make + make install + ln -sf $(ibdir)/site_perl/pod2man $(ibdir)/pod2man + ln -sf $(ibdir)/site_perl/pod2text $(ibdir)/pod2text + cd .. + rm -rf podlators-$(podlators-version) + echo "podlators $(podlators-version)" > $@ + # OpenSSL # # Until we find a nice and generic way to create an updated CA file in the @@ -710,26 +750,26 @@ $(ibidir)/coreutils-$(coreutils-version): \ # along with the other tarballs. $(idir)/etc:; mkdir $@ $(idir)/etc/ssl: | $(idir)/etc; mkdir $@ -$(ibidir)/openssl-$(openssl-version): $(ibidir)/perl-$(perl-version) \ +$(ibidir)/openssl-$(openssl-version): $(ibidir)/podlators-$(podlators-version) \ | $(idir)/etc/ssl - # First download the certificates and copy them into the - # installation directory. - tarball=cert.pem - $(call import-source, $(cert-url), $(cert-checksum)) - cp $(tdir)/cert.pem $(idir)/etc/ssl/cert.pem +# First download the certificates and copy them into the +# installation directory. + tarball=cert.pem-$(certpem-version) + $(call import-source, $(certpem-url), $(certpem-checksum)) + cp $(tdir)/cert.pem-$(certpem-version) $(idir)/etc/ssl/cert.pem - # Now download the OpenSSL tarball. - tarball=openssl-$(openssl-version).tar.gz +# Now download the OpenSSL tarball. + tarball=openssl-$(openssl-version).tar.lz $(call import-source, $(openssl-url), $(openssl-checksum)) - # According to OpenSSL's Wiki (link bellow), it can't automatically - # detect Mac OS's structure. It will need some help. So we'll use - # the `on_mac_os' Make variable that we defined in the configure - # script and help it with some extra configuration options and an - # environment variable. - # - # https://wiki.openssl.org/index.php/Compilation_and_Installation +# According to OpenSSL's Wiki (link bellow), it can't automatically +# detect Mac OS's structure. It will need some help. So we'll use the +# 'on_mac_os' Make variable that we defined in the configure script +# and help it with some extra configuration options and an +# environment variable. +# +# https://wiki.openssl.org/index.php/Compilation_and_Installation if [ x$(on_mac_os) = xyes ]; then export KERNEL_BITS=64 copt="shared no-ssl2 no-ssl3 enable-ec_nistp_64_gcc_128" @@ -743,29 +783,19 @@ $(ibidir)/openssl-$(openssl-version): $(ibidir)/perl-$(perl-version) \ --with-zlib-include=$(idir)/include, \ -j$(numthreads), , ./config ) - # Manually insert RPATH inside the two created libraries. +# Manually insert RPATH inside the two created libraries. if [ -f $(ibdir)/patchelf ]; then patchelf --set-rpath $(ildir) $(ildir)/libssl.so patchelf --set-rpath $(ildir) $(ildir)/libcrypto.so fi - # Bug 58263 (https://savannah.nongnu.org/bugs/?58263): In OpenSSL - # Version 1.1.1a (also checked in 1.1.1g), `openssl/ec.h' fails to - # include `openssl/openconf.h' on some OSs. The SED hack below - # inserts a hardwired element of `openssl/openconf.h' that is - # needed to include sections of code `f` that are deprecated in - # 1.2.0, but not yet in 1.1.1. This problem may be solved in - # version 1.2.x, so please check again in that bug. - mv -v $(idir)/include/openssl/ec.h $(idir)/include/openssl/ec.h.orig - sed -e 's,\(# include .openssl/opensslconf\.h.\),\1\n#ifndef DEPRECATEDIN_1_2_0\n#define DEPRECATEDIN_1_2_0(f) f;\n#endif\n,' \ - $(idir)/include/openssl/ec.h.orig > $(idir)/include/openssl/ec.h - - # Build the final target. +# Build the final target. echo "OpenSSL $(openssl-version)" > $@ + # Downloaders # ----------- @@ -774,13 +804,13 @@ $(ibidir)/openssl-$(openssl-version): $(ibidir)/perl-$(perl-version) \ # cURL can optionally link with many different network-related libraries on # the host system that we are not yet building in the template. Many of # these are not relevant to most science projects, so we are explicitly -# using `--without-XXX' or `--disable-XXX' so cURL doesn't link with +# using '--without-XXX' or '--disable-XXX' so cURL doesn't link with # them. Note that if it does link with them, the configuration will crash # when the library is updated/changed by the host, and the whole purpose of # this project is avoid dependency on the host as much as possible. $(ibidir)/curl-$(curl-version): $(ibidir)/coreutils-$(coreutils-version) - tarball=curl-$(curl-version).tar.gz + tarball=curl-$(curl-version).tar.lz $(call import-source, $(curl-url), $(curl-checksum)) $(call gbuild, curl-$(curl-version), , \ @@ -792,6 +822,8 @@ $(ibidir)/curl-$(curl-version): $(ibidir)/coreutils-$(coreutils-version) --without-librtmp \ --without-libidn2 \ --without-wolfssl \ + --without-nghttp2 \ + --without-nghttp3 \ --without-brotli \ --without-gnutls \ --without-cyassl \ @@ -799,6 +831,7 @@ $(ibidir)/curl-$(curl-version): $(ibidir)/coreutils-$(coreutils-version) --without-axtls \ --disable-ldaps \ --disable-ldap \ + --without-zstd \ --without-nss, V=1) if [ -f $(ibdir)/patchelf ]; then @@ -810,8 +843,8 @@ $(ibidir)/curl-$(curl-version): $(ibidir)/coreutils-$(coreutils-version) # GNU Wget # # Note that on some systems (for example GNU/Linux) Wget needs to explicity -# link with `libdl', but on others (for example Mac OS) it doesn't. We -# check this at configure time and define the `needs_ldl' variable. +# link with 'libdl', but on others (for example Mac OS) it doesn't. We +# check this at configure time and define the 'needs_ldl' variable. # # Also note that since Wget needs to load outside libraries dynamically, it # gives a segmentation fault when built statically. @@ -824,12 +857,12 @@ $(ibidir)/wget-$(wget-version): \ $(ibidir)/libiconv-$(libiconv-version) \ $(ibidir)/coreutils-$(coreutils-version) - # Download the tarball. +# Download the tarball. tarball=wget-$(wget-version).tar.lz $(call import-source, $(wget-url), $(wget-checksum)) - # We need to explicitly disable `libiconv', because of the - # `pkg-config' and `libiconv' problem. +# We need to explicitly disable 'libiconv', because of the +# 'pkg-config' and 'libiconv' problem. libs="-pthread" if [ x$(needs_ldl) = xyes ]; then libs="$$libs -ldl"; fi $(call gbuild, wget-$(wget-version), , \ @@ -860,15 +893,22 @@ $(ibidir)/wget-$(wget-version): \ # process of the higher-level programs and libraries. Note that during the # building of those higher-level programs (after this Makefile finishes), # there is no access to the system's PATH. +$(ibidir)/bison-$(bison-version): $(ibidir)/help2man-$(help2man-version) + tarball=bison-$(bison-version).tar.lz + $(call import-source, $(bison-url), $(bison-checksum)) + $(call gbuild, bison-$(bison-version), static, ,V=1 -j$(numthreads)) + echo "GNU Bison $(bison-version)" > $@ + $(ibidir)/diffutils-$(diffutils-version): \ $(ibidir)/coreutils-$(coreutils-version) - tarball=diffutils-$(diffutils-version).tar.xz + tarball=diffutils-$(diffutils-version).tar.lz $(call import-source, $(diffutils-url), $(diffutils-checksum)) $(call gbuild, diffutils-$(diffutils-version), static,,V=1) echo "GNU Diffutils $(diffutils-version)" > $@ $(ibidir)/file-$(file-version): $(ibidir)/coreutils-$(coreutils-version) - tarball=file-$(file-version).tar.gz + export CFLAGS="-std=c99 $$CFLAGS" + tarball=file-$(file-version).tar.lz $(call import-source, $(file-url), $(file-checksum)) $(call gbuild, file-$(file-version), static, \ --disable-libseccomp, V=1) @@ -876,7 +916,7 @@ $(ibidir)/file-$(file-version): $(ibidir)/coreutils-$(coreutils-version) $(ibidir)/findutils-$(findutils-version): \ $(ibidir)/coreutils-$(coreutils-version) - tarball=findutils-$(findutils-version).tar.xz + tarball=findutils-$(findutils-version).tar.lz $(call import-source, $(findutils-url), $(findutils-checksum)) $(call gbuild, findutils-$(findutils-version), static,,V=1) echo "GNU Findutils $(findutils-version)" > $@ @@ -886,19 +926,19 @@ $(ibidir)/gawk-$(gawk-version): \ $(ibidir)/mpfr-$(mpfr-version) \ $(ibidir)/coreutils-$(coreutils-version) - # Download the tarball. +# Download the tarball. tarball=gawk-$(gawk-version).tar.lz $(call import-source, $(gawk-url), $(gawk-checksum)) - # AWK doesn't include RPATH by default, so we'll have to manually - # include it using the `patchelf' program (which was a dependency - # of Bash). Just note that AWK produces two executables (for - # example `gawk-4.2.1' and `gawk') and a symbolic link `awk' to one - # of those executables. +# AWK doesn't include RPATH by default, so we'll have to manually +# include it using the 'patchelf' program (which was a dependency of +# Bash). Just note that AWK produces two executables (for example +# 'gawk-4.2.1' and 'gawk') and a symbolic link 'awk' to one of those +# executables. $(call gbuild, gawk-$(gawk-version), static, \ --with-readline=$(idir)) - # Correct the RPATH on systems that have installed patchelf. +# Correct the RPATH on systems that have installed patchelf. if [ -f $(ibdir)/patchelf ]; then if [ -f $(ibdir)/gawk ]; then $(ibdir)/patchelf --set-rpath $(ildir) $(ibdir)/gawk @@ -909,31 +949,38 @@ $(ibidir)/gawk-$(gawk-version): \ fi fi - # Build final target. +# Build final target. echo "GNU AWK $(gawk-version)" > $@ +$(ibidir)/help2man-$(help2man-version): \ + $(ibidir)/coreutils-$(coreutils-version) + tarball=help2man-$(help2man-version).tar.lz + $(call import-source, $(help2man-url), $(help2man-checksum)) + $(call gbuild, help2man-$(help2man-version), static, ,V=1) + echo "Help2man $(Help2man-version)" > $@ + $(ibidir)/libiconv-$(libiconv-version): \ $(ibidir)/pkg-config-$(pkgconfig-version) - tarball=libiconv-$(libiconv-version).tar.gz + tarball=libiconv-$(libiconv-version).tar.lz $(call import-source, $(libiconv-url), $(libiconv-checksum)) $(call gbuild, libiconv-$(libiconv-version), static) echo "GNU libiconv $(libiconv-version)" > $@ $(ibidir)/libunistring-$(libunistring-version): \ $(ibidir)/libiconv-$(libiconv-version) - tarball=libunistring-$(libunistring-version).tar.xz + tarball=libunistring-$(libunistring-version).tar.lz $(call import-source, $(libunistring-url), $(libunistring-checksum)) $(call gbuild, libunistring-$(libunistring-version), static,, \ -j$(numthreads)) echo "GNU libunistring $(libunistring-version)" > $@ $(ibidir)/libxml2-$(libxml2-version): $(ibidir)/patchelf-$(patchelf-version) - # The libxml2 tarball also contains Python bindings which are built - # and installed to a system directory by default. If you don't need - # the Python bindings, the easiest solution is to compile without - # Python support: `./configure --without-python'. If you really need - # the Python bindings, use `--with-python-install-dir=DIR' instead. - tarball=libxml2-$(libxml2-version).tar.gz +# The libxml2 tarball also contains Python bindings which are built +# and installed to a system directory by default. If you don't need +# the Python bindings, the easiest solution is to compile without +# Python support: './configure --without-python'. If you really need +# the Python bindings, use '--with-python-install-dir=DIR' instead. + tarball=libxml2-$(libxml2-version).tar.lz $(call import-source, $(libxml2-url), $(libxml2-checksum)) $(call gbuild, libxml2-$(libxml2-version), static, \ --without-python, V=1) @@ -956,7 +1003,7 @@ $(ibidir)/git-$(git-version): \ $(ibidir)/curl-$(curl-version) \ $(ibidir)/gettext-$(gettext-version) \ $(ibidir)/libiconv-$(libiconv-version) - tarball=git-$(git-version).tar.xz + tarball=git-$(git-version).tar.lz if [ x$(on_mac_os) = xyes ]; then export LDFLAGS="$$LDFLAGS -lcharset" fi @@ -971,27 +1018,40 @@ $(ibidir)/gmp-$(gmp-version): \ $(ibidir)/coreutils-$(coreutils-version) tarball=gmp-$(gmp-version).tar.lz $(call import-source, $(gmp-url), $(gmp-checksum)) + export CFLAGS="-std=gnu17 $$CFLAGS" $(call gbuild, gmp-$(gmp-version), static, \ --enable-cxx --enable-fat, \ - -j$(numthreads) ,make check) + -j$(numthreads)) echo "GNU Multiple Precision Arithmetic Library $(gmp-version)" > $@ # Less is useful with Git (to view the diffs within a minimal container) # and generally to view large files easily when the project is built in a # container with a minimal OS. $(ibidir)/less-$(less-version): $(ibidir)/ncurses-$(ncurses-version) - tarball=less-$(less-version).tar.gz + tarball=less-$(less-version).tar.lz $(call import-source, $(less-url), $(less-checksum)) - $(call gbuild, less-$(less-version), static,,-j$(numthreads)) + +# Without the '--with-regex=posix' option, the build will depend on +# PCRE (perl compatible regular expressions) which are not available +# on some systems/compilers and can cause a crash. Maneage was +# successfully built with the POSIX regular expression (regex), and +# 'less' is generally, an interactive software, not a batch-mode +# software (it is just added in 'basic.mk' because Git uses it to +# display things. Again, this is an interactive meta-operation in +# maneage (operations you only do when you are developing Maneage +# within Maneage interactively, and will not affect into the actual +# reproducible analysis!) + $(call gbuild, less-$(less-version), static, \ + --with-regex=posix,-j$(numthreads)) if [ -f $(ibdir)/patchelf ]; then $(ibdir)/patchelf --set-rpath $(ildir) $(ibdir)/less; fi echo "Less $(less-version)" > $@ # On Mac OS, libtool does different things, so to avoid confusion, we'll -# prefix GNU's libtool executables with `glibtool'. +# prefix GNU's libtool executables with 'glibtool'. $(ibidir)/libtool-$(libtool-version): $(ibidir)/m4-$(m4-version) - tarball=libtool-$(libtool-version).tar.xz + tarball=libtool-$(libtool-version).tar.lz $(call import-source, $(libtool-url), $(libtool-checksum)) $(call gbuild, libtool-$(libtool-version), static, \ --program-prefix=g, V=1 -j$(numthreads)) @@ -999,172 +1059,67 @@ $(ibidir)/libtool-$(libtool-version): $(ibidir)/m4-$(m4-version) echo "GNU Libtool $(libtool-version)" > $@ $(ibidir)/grep-$(grep-version): $(ibidir)/coreutils-$(coreutils-version) - tarball=grep-$(grep-version).tar.xz + tarball=grep-$(grep-version).tar.lz $(call import-source, $(grep-url), $(grep-checksum)) - $(call gbuild, grep-$(grep-version), static,,V=1) + $(call gbuild, grep-$(grep-version), static,, \ + -j$(numthreads) V=1) echo "GNU Grep $(grep-version)" > $@ -$(ibidir)/libbsd-$(libbsd-version): $(ibidir)/coreutils-$(coreutils-version) - tarball=libbsd-$(libbsd-version).tar.xz - $(call import-source, $(libbsd-url), $(libbsd-checksum)) - if [ x$(on_mac_os) = xyes ]; then - echo "" > $@ - else - $(call gbuild, libbsd-$(libbsd-version), static,,V=1) - echo "Libbsd $(libbsd-version)" > $@ - fi - -# We need to apply a patch to the M4 source to be used properly on macOS. -# The patch [1] was inspired by Homebrew's build instructions [1]. -# -# [1] https://raw.githubusercontent.com/macports/macports-ports/edf0ee1e2cf/devel/m4/files/secure_snprintf.patch -# [2] https://github.com/Homebrew/homebrew-core/blob/master/Formula/m4.rb -# # M4 doesn't depend on PatchELF, but just to be consistent with the # levels/phases introduced here (where the compressors are level 1, # PatchELF is level 2, and ...), we'll set it as a dependency. +# +# The '--with-syscmd-shell' is used as the default shell and if not given, +# 'm4' will use '/bin/sh' (which is not under Maneage control and can cause +# problems in 'high-level.mk' because it closes off the system's +# LD_LIBRARY_PATH and if the system's '/bin/sh' needs a special system +# library, the high-level programs will not be built). We are setting this +# default shell to Dash because M4 is built before our own Bash. Recall +# that Dash is built before we enter this Makefile. $(ibidir)/m4-$(m4-version): $(ibidir)/patchelf-$(patchelf-version) - tarball=m4-$(m4-version).tar.gz + tarball=m4-$(m4-version).tar.lz $(call import-source, $(m4-url), $(m4-checksum)) - cd $(ddir) - unpackdir=m4-$(m4-version) - rm -rf $$unpackdir - tar xf $(tdir)/$$tarball - mv m4-* $$unpackdir - cd $$unpackdir - if [ x$(on_mac_os) = xyes ]; then - sed 's|if !(((__GLIBC__ > 2|if !defined(__APPLE__) \&\& !(((__GLIBC__ > 2|' \ - lib/vasnprintf.c > lib/vasnprintf_edited.c - mv lib/vasnprintf_edited.c lib/vasnprintf.c - fi - ./configure --prefix=$(idir) LDFLAGS="$(LDFLAGS)" \ - CPPFLAGS="$(CPPFLAGS)" - make V=1 -j$(numthreads) - make V=1 install - cd .. - rm -rf $$unpackdir + export CFLAGS="-std=gnu17 $$CFLAGS" + $(call gbuild, m4-$(m4-version), static, \ + --with-syscmd-shell=$(ibdir)/dash, \ + -j$(numthreads) V=1) echo "GNU M4 $(m4-version)" > $@ -# Metastore is used (through a Git hook) to restore the source modification -# dates of files after a Git checkout. Another Git hook saves all file -# metadata just before a commit (to allow restoration after a -# checkout). Since this project is managed in Makefiles, file modification -# dates are critical to not having to redo the whole analysis after -# checking out between branches. -# -# Note that we aren't using the standard version of Metastore, but a fork -# of it that is maintained in this repository: -# https://gitlab.com/makhlaghi/metastore-fork -# -# Note that the prerequisites `coreutils', `gawk' and `sed' are not -# metastore oficial dependencies, but they are necessaries to run our steps -# before and after the installation. -# -# Libbsd is not necessary on macOS systems, because macOS is already a -# BSD-based distribution. But on GNU/Linux systems, it is necessary. -$(ibidir)/metastore-$(metastore-version): \ - $(ibidir)/sed-$(sed-version) \ - $(ibidir)/git-$(git-version) \ - $(ibidir)/gawk-$(gawk-version) \ - $(ibidir)/libbsd-$(libbsd-version) \ - $(ibidir)/coreutils-$(coreutils-version) - - # Download the tarball. - tarball=metastore-$(metastore-version).tar.gz - $(call import-source, $(metastore-url), $(metastore-checksum)) - - # Metastore doesn't have any `./configure' script. So we'll just - # call `pwd' as a place-holder for the `./configure' command. - # - # File attributes are also not available on some systems, since the - # main purpose here is modification dates (and not attributes), - # we'll also set the `NO_XATTR' flag. - # - # After installing Metastore, write the relevant hooks into this - # system's Git hooks, while setting the system-specific - # directories/files. - # - # Note that the metastore -O and -G options used in this template - # are currently only available in a fork of `metastore' hosted at: - # https://github.com/mohammad-akhlaghi/metastore - # - # Checking for presence of `.git'. When the project source is - # downloaded from a non-Git source (for example from arXiv), there - # is no `.git' directory to work with. So until we find a better - # solution, avoid the step to to add the Git hooks. - current_dir=$$(pwd); \ - $(call gbuild, metastore-$(metastore-version), static,, \ - NO_XATTR=1 V=1,,pwd,PREFIX=$(idir)) - - # Correct RPATH when necessary. - if [ -f $(ibdir)/patchelf ]; then - $(ibdir)/patchelf --set-rpath $(ildir) $(ibdir)/metastore - fi - - # If this project is being built in a directory version controlled - # by Git, copy the hooks into the Git configuation. - if [ -f $(ibdir)/metastore ]; then - if [ -d .git ]; then - user=$$(whoami) - group=$$(groups | awk '{print $$1}') - cd $$current_dir - for f in pre-commit post-checkout; do - sed -e's|@USER[@]|'$$user'|g' \ - -e's|@GROUP[@]|'$$group'|g' \ - -e's|@BINDIR[@]|$(ibdir)|g' \ - -e's|@TOP_PROJECT_DIR[@]|'$$current_dir'|g' \ - reproduce/software/shell/git-$$f > .git/hooks/$$f - chmod +x .git/hooks/$$f - done - fi - echo "Metastore (forked) $(metastore-version)" > $@ - else - echo; echo; echo - echo "*****************" - echo "metastore couldn't be installed!" - echo - echo "Its used for preserving timestamps on Git commits." - echo "Its useful for development, not simple running of " - echo "the project. So we won't stop the configuration " - echo "because it wasn't built." - echo "*****************" - echo "" > $@ - fi - $(ibidir)/mpfr-$(mpfr-version): $(ibidir)/gmp-$(gmp-version) - tarball=mpfr-$(mpfr-version).tar.xz + tarball=mpfr-$(mpfr-version).tar.lz $(call import-source, $(mpfr-url), $(mpfr-checksum)) - $(call gbuild, mpfr-$(mpfr-version), static, , , make check) + $(call gbuild, mpfr-$(mpfr-version), static) echo "GNU Multiple Precision Floating-Point Reliably $(mpfr-version)" > $@ $(ibidir)/pkg-config-$(pkgconfig-version): $(ibidir)/patchelf-$(patchelf-version) - # Download the tarball. - tarball=pkg-config-$(pkgconfig-version).tar.gz +# Download the tarball. + tarball=pkg-config-$(pkgconfig-version).tar.lz $(call import-source, $(pkgconfig-url), $(pkgconfig-checksum)) - # An existing `libiconv' can cause a conflict with `pkg-config', - # this is why `libiconv' depends on `pkg-config'. On a clean build, - # `pkg-config' is built first. But when we don't have a clean build - # (and `libiconv' exists) there will be a problem. So before - # re-building `pkg-config', we'll remove any installation of - # `libiconv'. +# An existing 'libiconv' can cause a conflict with 'pkg-config', this +# is why 'libiconv' depends on 'pkg-config'. On a clean build, +# 'pkg-config' is built first. But when we don't have a clean build +# (and 'libiconv' exists) there will be a problem. So before +# re-building 'pkg-config', we'll remove any installation of +# 'libiconv'. rm -f $(ildir)/libiconv* $(idir)/include/iconv.h - # Some Mac OS systems may have a version of the GNU C Compiler - # (GCC) installed that doesn't support some necessary features of - # building Glib (as part of pkg-config). So to be safe, for Mac - # systems, we'll make sure it will use LLVM's Clang. +# Some Mac OS systems may have a version of the GNU C Compiler (GCC) +# installed that doesn't support some necessary features of building +# Glib (as part of pkg-config). So to be safe, for Mac systems, we'll +# make sure it will use LLVM's Clang. if [ x$(on_mac_os) = xyes ]; then export compiler="CC=clang" else export compiler="" fi + export CFLAGS="-std=gnu17 $$CFLAGS" $(call gbuild, pkg-config-$(pkgconfig-version), static, \ $$compiler --with-internal-glib \ --with-pc-path=$(ildir)/pkgconfig, V=1) echo "pkg-config $(pkgconfig-version)" > $@ $(ibidir)/sed-$(sed-version): $(ibidir)/coreutils-$(coreutils-version) - tarball=sed-$(sed-version).tar.xz + tarball=sed-$(sed-version).tar.lz $(call import-source, $(sed-url), $(sed-checksum)) $(call gbuild, sed-$(sed-version), static,,V=1) echo "GNU Sed $(sed-version)" > $@ @@ -1172,7 +1127,18 @@ $(ibidir)/sed-$(sed-version): $(ibidir)/coreutils-$(coreutils-version) $(ibidir)/texinfo-$(texinfo-version): \ $(ibidir)/perl-$(perl-version) \ $(ibidir)/gettext-$(gettext-version) - tarball=texinfo-$(texinfo-version).tar.xz + +# Setting for the XS sub-package. "This is because in theory, the XS +# module could be built with a different compiler to the rest of the +# project, needing completely different flags" (part of [1]) +# +# [1] https://lists.gnu.org/archive/html/bug-texinfo/2022-08/msg00068.html + export PERL="$(ibdir)/perl" + export PERL_EXT_LDFLAGS="-L$(ildir)" + export PERL_EXT_CPPFLAGS="-I$(iidir)" + +# Basic build commands. + tarball=texinfo-$(texinfo-version).tar.lz $(call import-source, $(texinfo-url), $(texinfo-checksum)) $(call gbuild, texinfo-$(texinfo-version), static) if [ -f $(ibdir)/patchelf ]; then @@ -1182,14 +1148,14 @@ $(ibidir)/texinfo-$(texinfo-version): \ echo "GNU Texinfo $(texinfo-version)" > $@ $(ibidir)/which-$(which-version): $(ibidir)/coreutils-$(coreutils-version) - tarball=which-$(which-version).tar.gz + tarball=which-$(which-version).tar.lz $(call import-source, $(which-url), $(which-checksum)) $(call gbuild, which-$(which-version), static) echo "GNU Which $(which-version)" > $@ # GNU ISL is necessary to build GCC. $(ibidir)/isl-$(isl-version): $(ibidir)/gmp-$(gmp-version) - tarball=isl-$(isl-version).tar.bz2 + tarball=isl-$(isl-version).tar.lz $(call import-source, $(isl-url), $(isl-checksum)) if [ $(host_cc) = 1 ]; then echo "" > $@ @@ -1201,13 +1167,13 @@ $(ibidir)/isl-$(isl-version): $(ibidir)/gmp-$(gmp-version) # GNU MPC is necessary to build GCC. $(ibidir)/mpc-$(mpc-version): $(ibidir)/mpfr-$(mpfr-version) - tarball=mpc-$(mpc-version).tar.gz + tarball=mpc-$(mpc-version).tar.lz $(call import-source, $(mpc-url), $(mpc-checksum)) if [ $(host_cc) = 1 ]; then echo "" > $@ else $(call gbuild, mpc-$(mpc-version), static, , \ - -j$(numthreads), make check) + -j$(numthreads)) echo "GNU Multiple Precision Complex library" > $@ fi @@ -1224,33 +1190,35 @@ $(ibidir)/mpc-$(mpc-version): $(ibidir)/mpfr-$(mpfr-version) # ----------------------- # # The installation of Binutils can cause problems during the build of other -# programs (http://savannah.nongnu.org/bugs/?56294), but its necessary for +# programs since it provides the linker that is used to build them +# (http://savannah.nongnu.org/bugs/?56294). However, it is necessary for # GCC. Therefore, we'll set all other basic programs as Binutils -# prerequisite and GCC (the final basic target) ultimately just depends on -# Binutils. +# prerequisites, so GCC (the almost-final basic target) ultimately just +# depends on Binutils. $(ibidir)/binutils-$(binutils-version): \ - $(ibidir)/sed-$(sed-version) \ + $(ibidir)/git-$(git-version) \ $(ibidir)/isl-$(isl-version) \ $(ibidir)/mpc-$(mpc-version) \ - $(ibidir)/wget-$(wget-version) \ - $(ibidir)/grep-$(grep-version) \ + $(ibidir)/sed-$(sed-version) \ $(ibidir)/file-$(file-version) \ $(ibidir)/gawk-$(gawk-version) \ + $(ibidir)/grep-$(grep-version) \ + $(ibidir)/wget-$(wget-version) \ + $(ibidir)/bison-$(bison-version) \ $(ibidir)/which-$(which-version) \ - $(ibidir)/texinfo-$(texinfo-version) \ $(ibidir)/libtool-$(libtool-version) \ - $(ibidir)/metastore-$(metastore-version) \ - $(ibidir)/findutils-$(findutils-version) \ + $(ibidir)/texinfo-$(texinfo-version) \ + $(ibidir)/coreutils-$(coreutils-version) \ $(ibidir)/diffutils-$(diffutils-version) \ - $(ibidir)/coreutils-$(coreutils-version) + $(ibidir)/findutils-$(findutils-version) - # Download the tarball. +# Download the tarball. tarball=binutils-$(binutils-version).tar.lz $(call import-source, $(binutils-url), $(binutils-checksum)) - # Binutils' assembler (`as') and linker (`ld') will conflict with - # other compilers. So if we don't build our own compiler, we'll use - # the host opertating system's equivalents by just making links. +# Binutils' assembler ('as') and linker ('ld') will conflict with +# other compilers. So if we don't build our own compiler, we'll use +# the host opertating system's equivalents by just making links. if [ x$(on_mac_os) = xyes ]; then $(call makelink,as) $(call makelink,ar) @@ -1262,20 +1230,21 @@ $(ibidir)/binutils-$(binutils-version): \ echo "" > $@ else - # Build binutils with the standard 'gbuild' function. +# Build binutils with the standard 'gbuild' function. $(call gbuild, binutils-$(binutils-version), static, \ - --with-lib-path=$(sys_library_path), \ - -j$(numthreads) ) - - # The `ld' linker of Binutils needs several `*crt*.o' files from - # the host's GNU C Library to run. On some systems these object - # files aren't installed in standard places. We defined - # `LIBRARY_PATH' and that fixed the problem for many - # systems. However, some software (for example ImageMagick) - # over-write `LIBRARY_PATH', therefore there is no other way than - # to put a link to these necessary files in our local build - # directory. IMPORTANT NOTE: later, when we build the GNU C - # Library in the project, we should remove this step. + --with-lib-path=$(sys_library_path) \ + --enable-gprofng=no, \ + -j$(numthreads) V=1) + +# The 'ld' linker of Binutils needs several '*crt*.o' files from +# the host's GNU C Library to run. On some systems these object +# files aren't installed in standard places. We defined +# 'LIBRARY_PATH' and that fixed the problem for many +# systems. However, some software (for example ImageMagick) +# over-write 'LIBRARY_PATH', therefore there is no other way than +# to put a link to these necessary files in our local build +# directory. IMPORTANT NOTE: later, when we build the GNU C Library +# in the project, we should remove this step. if ! [ x"$(sys_library_path)" = x ]; then for f in $(sys_library_path)/*crt*.o; do b=$$($(ibdir)/basename $$f) @@ -1283,11 +1252,11 @@ $(ibidir)/binutils-$(binutils-version): \ done fi - # Write the final target. +# Write the final target. echo "GNU Binutils $(binutils-version)" > $@ fi -# We are having issues with `libiberty' (part of GCC) on Mac. So for now, +# We are having issues with 'libiberty' (part of GCC) on Mac. So for now, # GCC won't be built there. Since almost no natural science paper's # processing depends so strongly on the compiler used, for now, this isn't # a bad assumption, but we are indeed searching for a solution. @@ -1296,13 +1265,13 @@ $(ibidir)/binutils-$(binutils-version): \ # environment. So, we'll build GCC after building all the basic tools that # are often used in a configure and build scripts of GCC components. # -# Objective C and Objective C++ is necessary for installing `matplotlib'. +# Objective C and Objective C++ is necessary for installing 'matplotlib'. # # We are currently having problems installing GCC on macOS, so for the time # being, if the project is being run on a macOS, we'll just set a link. $(ibidir)/gcc-$(gcc-version): $(ibidir)/binutils-$(binutils-version) - # Function to let the users know what to do if build fails. +# Function to let the users know what to do if build fails. error_message() { echo; echo echo "_________________________________________________" @@ -1323,45 +1292,41 @@ $(ibidir)/gcc-$(gcc-version): $(ibidir)/binutils-$(binutils-version) echo; exit 1 } - # Download the tarball. - tarball=gcc-$(gcc-version).tar.xz +# Download the tarball. + tarball=gcc-$(gcc-version).tar.lz $(call import-source, $(gcc-url), $(gcc-checksum)) - # To avoid any previous build in '.local/bin' causing problems in - # this build/links of this GCC, we'll first delete all the possibly - # built/existing compilers in this project. Note that GCC also - # installs several executables like this 'x86_64-pc-linux-gnu-gcc', - # 'x86_64-pc-linux-gnu-gcc-ar' or 'x86_64-pc-linux-gnu-g++'. +# To avoid any previous build in '.local/bin' causing problems in +# this build/links of this GCC, we'll first delete all the possibly +# built/existing compilers in this project. Note that GCC also +# installs several executables like this 'x86_64-pc-linux-gnu-gcc', +# 'x86_64-pc-linux-gnu-gcc-ar' or 'x86_64-pc-linux-gnu-g++'. rm -f $(ibdir)/*g++ $(ibdir)/cpp $(ibdir)/gfortran rm -rf $(ildir)/gcc $(ildir)/libcc* $(ildir)/libgcc* rm -f $(ibdir)/*gcc* $(ibdir)/gcov* $(ibdir)/cc $(ibdir)/c++ rm -rf $(ildir)/libgfortran* $(ildir)/libstdc* rm $(idir)/x86_64* - # GCC builds is own libraries in '$(idir)/lib64'. But all other - # libraries are in '$(idir)/lib'. Since this project is only for a - # single architecture, we can trick GCC into building its libraries - # in '$(idir)/lib' by defining the '$(idir)/lib64' as a symbolic - # link to '$(idir)/lib'. +# Build (or set links) to GCC. if [ $(host_cc) = 1 ]; then - # Put links to the host's tools in '.local/bin'. Note that some - # macOS systems have both a native clang *and* a GNU C Compiler - # (note that this is different from the "normal" macOS situation - # where 'gcc' actually points to clang, here we mean when 'gcc' - # is actually the GNU C Compiler). - # - # In such cases, the GCC isn't complete and using it will cause - # problems when building high-level tools (for example openBLAS, - # rpcsvc-proto, CMake, xlsxio, Python or Matplotlib among - # others). To avoid such situations macOSs are configured like - # this: we'll simply set 'gcc' to point to 'clang' and won't set - # 'gcc' to point to the system's 'gcc'. - # - # Also, note that LLVM's clang doesn't have a C Pre-Processor. So - # we will only put a link to the host's 'cpp' if the system is - # not macOS. On macOS systems that have a real GCC installed, - # having GNU CPP in the project build directory is known to cause - # problems with 'libX11'. +# Put links to the host's tools in '.local/bin'. Note that some +# macOS systems have both a native clang *and* a GNU C Compiler +# (note that this is different from the "normal" macOS situation +# where 'gcc' actually points to clang, here we mean when 'gcc' is +# actually the GNU C Compiler). +# +# In such cases, the GCC isn't complete and using it will cause +# problems when building high-level tools (for example openBLAS, +# rpcsvc-proto, CMake, xlsxio, Python or Matplotlib among +# others). To avoid such situations macOSs are configured like +# this: we'll simply set 'gcc' to point to 'clang' and won't set +# 'gcc' to point to the system's 'gcc'. +# +# Also, note that LLVM's clang doesn't have a C Pre-Processor. So +# we will only put a link to the host's 'cpp' if the system is not +# macOS. On macOS systems that have a real GCC installed, having +# GNU CPP in the project build directory is known to cause problems +# with 'libX11'. $(call makelink,gfortran) if [ x$(on_mac_os) = xyes ]; then $(call makelink,clang) @@ -1374,52 +1339,121 @@ $(ibidir)/gcc-$(gcc-version): $(ibidir)/binutils-$(binutils-version) $(call makelink,g++) fi - # We also want to have the two 'cc' and 'c++' in the build - # directory that point to the selected compiler. With the checks - # above, 'gcc' and 'g++' will point to the proper compiler, so - # we'll use them to define 'cc' and 'c++'. +# We also want to have the two 'cc' and 'c++' in the build +# directory that point to the selected compiler. With the checks +# above, 'gcc' and 'g++' will point to the proper compiler, so +# we'll use them to define 'cc' and 'c++'. $(call makelink,gcc,,cc) $(call makelink,g++,,c++) - # Get the first line of the compiler's '--version' output and put - # that into the target (so we know want compiler was used). +# Get the first line of the compiler's '--version' output and put +# that into the target (so we know want compiler was used). ccinfo=$$(gcc --version | awk 'NR==1') echo "C compiler (""$$ccinfo"")" > $@ else - # Mark the current directory. +# Mark the current directory. current_dir=$$(pwd) - # We don't want '.local/lib' and '.local/lib64' to be separate. - ln -fs $(ildir) $(idir)/lib64 - - # By default we'll build GCC in the RAM to avoid building so many - # files and possibly harming the hard-drive or SSD. But if the - # RAM doesn't have enough space, we can't use it. - in_ram=$$(df $(ddir) \ - | awk 'NR==2{print ($$4>10000000) ? "yes" : "no"}'); \ +# By default 'ddir' (where GCC is decompressed and built) is in the +# RAM (on systems that support a '/dev/shm' RAM disk). This is done +# to avoid building so many small/temporary files and possibly +# harming the hard-drive or SSD. But if the RAM doesn't have enough +# space, we should use the hard-drive or SSD. During its build, +# GCC's build directory will become several gigabytes and the build +# also needs RAM. You can track the RAM usage of the system with a +# 1-second resolution (if no other RAM consuming program is running +# while building GCC) with the command below (example outputs can +# be seen in https://savannah.nongnu.org/task/index.php?16623). +# +# c=1; while true; do POSIXLY_CORRECT=1 df -P /dev/shm/maneage-* | awk 'NR==2{print '$c', $3}'; c=$((c+1)); sleep 1; done > mem-usage.txt +# asttable mem-usage.txt -c1,'arith $2 512 x 1024 / 1024 / 1024 /' -o mem.fits +# +# For POSIX portability and longevity (default sizes might change), +# we use the '-P' option, and we use the environment variable +# POSIXLY_CORRECT=1, so the 'block size' is 512 bytes. In this way, +# to get the actual GiB amount, multiply the value returned above +# by 512 (B/block), then divide by 1024^3 (B/GiB). +# +# To get the final value to use here, get the maximum used value +# after GCC is fully built and you have stopped the 'while true' +# command above. You can do this with the command below (assumes +# you have Gnuastro). +# +# aststatistics mem-usage.txt -c2 --maximum | asttable -c'arith $1 7000000 +' -Afixed -B0 +# +# The extra space is because we will assume an extra 3 GiB = 3GiB * +# 1024^3 (B/GiB) / 512 (B/block) = 6291456 blocks are necessary for +# the building (let's round it to 7000000!). +# +# Therefore, we need to make sure that the running system more than +# the necessary amount of space in the RAM. To do this, we use 'df' +# below. +# +# The 4th column of 'df' is the "available" space at the time of +# running, not the full space. So the 'RAM disk' that the OS +# will be using as "pretend" disk space (e.g. using 'tmpfs'; this +# is physically RAM, but appears as if it is disk space) +# during this stage of Maneage is accounted for. GCC is built +# alone - no other Maneage software is built at the same time as +# GCC - so this amount of RAM should be enough. + in_ram=$$(POSIXLY_CORRECT=1 df -P $(ddir) \ + | awk 'NR==2{print ($$4>26613216) ? "yes" : "no"}'); \ if [ $$in_ram = "yes" ]; then odir=$(ddir) else - odir=$(BDIR)/software/build-tmp-gcc + odir=$(BDIR)/software/build-tmp-gcc-due-to-lack-of-space if [ -d $$odir ]; then rm -rf $$odir; fi mkdir $$odir fi - # Go into the proper directory, unpack GCC and prepare the - # 'build' directory inside it for all the built files. +# Go into the directory to uncompress GCC. cd $$odir + +# Unpack GCC and prepare the 'build' directory inside it for all +# the built files. rm -rf gcc-$(gcc-version) - tar xf $(tdir)/$$tarball + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions if [ $$odir != $(ddir) ]; then ln -s $$odir/gcc-$(gcc-version) $(ddir)/gcc-$(gcc-version) fi cd gcc-$(gcc-version) + $(shsrcdir)/prep-source.sh $(ibdir) + +# Unfortunately binutils installs headers like 'ansidecl.h' that +# have been seen to conflict with GCC's internal versions of those +# headers. For example in the 'ansidecl.h' of Binutils 2.39, the +# 'PTR' macro isn't defined, while the same file in GCC 12.1.0 has +# defined it. Therefore, without this change, GCC will include the +# file installed from Binutils, not find what it needs and crash! +# Therefore, with the 'CPPFLAGS' modification below, we tell GCC to +# first look into its own 'include' directory before anything else. + export CPPFLAGS="-I$$(pwd)/include $(CPPFLAGS)" + +# In the GNU C Library 2.36 (which is more recent than GCC 12.1.0), +# the 'linux/mount.h' (loaded by 'linux/fs.h', which is loaded by +# 'libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cpp' +# in GCC) conflicts with 'sys/mount.h' which is directly loaded by +# the same file! This is a known conflict in glibc 2.36 (see +# [1]). As described in [1], one solution is the final job done in +# [2]. We therefore do this process here: 1) Not loading +# 'linux/fs.h', and adding the necessary macros directly. +# +# [1] https://sourceware.org/glibc/wiki/Release/2.36#Usage_of_.3Clinux.2Fmount.h.3E_and_.3Csys.2Fmount.h.3E +# [2] https://reviews.llvm.org/D129471 + sed -e's|\#include <linux/fs.h>||' \ + -e"s|FS_IOC_GETFLAGS;|_IOR('f', 1, long);|" \ + -e"s|FS_IOC_GETVERSION;|_IOR('v', 1, long);|" \ + -e"s|FS_IOC_SETFLAGS;|_IOW('f', 2, long);|" \ + -e"s|FS_IOC_SETVERSION;|_IOW('v', 2, long);|" \ + -i libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cpp + +# Set the build directory for the processing. mkdir build cd build - # Configure, build and install GCC, if any of three steps fails, - # the error message will be printed. +# Configure, build and install GCC, if any of three steps fails, +# the error message will be printed. if ! ../configure SHELL=$(ibdir)/bash \ --prefix=$(idir) \ --with-mpc=$(idir) \ @@ -1438,26 +1472,25 @@ $(ibidir)/gcc-$(gcc-version): $(ibidir)/binutils-$(binutils-version) --enable-languages=c,c++,fortran,objc,obj-c++ \ --disable-nls \ --disable-libada \ - --disable-multilib \ - --disable-multiarch; then error_message; fi + --disable-multilib; then error_message; fi if ! make SHELL=$(ibdir)/bash -j$(numthreads); then error_message; fi if ! make SHELL=$(ibdir)/bash install; then error_message; fi - # We need to manually fix the RPATH inside GCC's libraries, the - # programs built by GCC already have RPATH. +# We need to manually fix the RPATH inside GCC's libraries, the +# programs built by GCC already have RPATH. tempname=$$odir/gcc-$(gcc-version)/build/rpath-temp-copy if [ -f $(ibdir)/patchelf ]; then - # Go over all the installed GCC libraries (its executables are - # fine!). +# Go over all the installed GCC libraries (its executables are +# fine!). for f in $$(find $(idir)/libexec/gcc -type f) $(ildir)/libstdc++*; do - # Make sure this is a static library, copy it to a temporary - # name (to avoid any possible usage of the file while it is - # being corrected), and add RPATH inside of it and put the - # corrected file back in its place. In the case of the - # standard C++ library, we also need to manually insert a - # linking to libiconv. +# Make sure this is a static library, copy it to a temporary +# name (to avoid any possible usage of the file while it is +# being corrected), and add RPATH inside of it and put the +# corrected file back in its place. In the case of the standard +# C++ library, we also need to manually insert a linking to +# libiconv. if file $$f | grep -q "dynamically linked"; then cp $$f $$tempname patchelf --set-rpath $(ildir) $$tempname @@ -1471,8 +1504,8 @@ $(ibidir)/gcc-$(gcc-version): $(ibidir)/binutils-$(binutils-version) done fi - # Come back up to the un-packing directory and delete the GCC - # source directory. +# Come back up to the un-packing directory and delete the GCC +# source directory. cd ../.. rm -rf gcc-$(gcc-version) cd $$current_dir @@ -1481,11 +1514,11 @@ $(ibidir)/gcc-$(gcc-version): $(ibidir)/binutils-$(binutils-version) rm $(ddir)/gcc-$(gcc-version); fi - # Set 'cc' to point to 'gcc'. +# Set 'cc' to point to 'gcc'. ln -sf $(ibdir)/gcc $(ibdir)/cc ln -sf $(ibdir)/g++ $(ibdir)/c++ - # Write the final target. +# Write the final target. echo "GNU Compiler Collection (GCC) $(gcc-version)" > $@ fi @@ -1493,29 +1526,58 @@ $(ibidir)/gcc-$(gcc-version): $(ibidir)/binutils-$(binutils-version) +# Level 6: need re-compilation +# ---------------------------- +# +# The initial build of these was done with the host's settings, which will +# cause problems later when we completely close-off the host environment. +$(ibidir)/make-$(make-version): $(ibidir)/gcc-$(gcc-version) + tarball=make-$(make-version).tar.lz + $(call import-source, $(make-url), $(make-checksum)) + $(call gbuild, make-$(make-version), static, \ + --disable-dependency-tracking --without-guile) + echo "GNU Make $(make-version)" > $@ + +$(ibidir)/lzip-$(lzip-version): $(ibidir)/gcc-$(gcc-version) + tarball=lzip-$(lzip-version).tar + unpackdir=lzip-$(lzip-version) + cd $(ddir) + rm -rf $$unpackdir + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions + cd $$unpackdir + $(shsrcdir)/prep-source.sh $(ibdir) + ./configure --build --check --installdir="$(ibdir)" + if [ -f $(ibdir)/patchelf ]; then + $(ibdir)/patchelf --set-rpath $(ildir) $(ibdir)/lzip; + fi + cd .. + rm -rf $$unpackdir + echo "Lzip $(lzip-version)" > $@ -# Level 6: Basic text editor +# Level 7: Basic text editor # -------------------------- # # If the project is built in a minimal environment, there is no text # editor, making it hard to work on the project. By default a minimal # (relatively user-friendly: GNU Nano) text editor will thus also be built -# at the end of the "basic" tools. More advanced editors are available as -# optional high-level programs. GNU Nano is a very light-weight and small -# command-line text editor (around 3.5 Mb after installation!). +# at the end of the "basic" tools. More advanced editors (for example Emacs +# and Vim) are available as optional high-level programs. GNU Nano is a +# very light-weight and small command-line text editor (around 3.5 Mb after +# installation!). # # The editor is a top-level target in the basic tools (given to # 'targets-proglib' above). Hence nothing depends on it, and it just # depends on GCC. This is done because some projects may choose to not have -# nano (and use their own optional high-level text editor). To do this, -# they just have to manually remove 'nano' from 'targets-proglib' above and +# nano (and use their own optional high-level text editor). To do this, you +# can just have to manually remove 'nano' from 'targets-proglib' above and # add their optional text editor in 'TARGETS.conf'. -$(ibidir)/nano-$(nano-version): $(ibidir)/gcc-$(gcc-version) - tarball=nano-$(nano-version).tar.xz +$(ibidir)/nano-$(nano-version): $(ibidir)/lzip-$(lzip-version) \ + $(ibidir)/make-$(make-version) + tarball=nano-$(nano-version).tar.lz $(call import-source, $(nano-url), $(nano-checksum)) $(call gbuild, nano-$(nano-version), static) echo "GNU Nano $(nano-version)" > $@ diff --git a/reproduce/software/make/build-rules.mk b/reproduce/software/make/build-rules.mk index 66c77bc..463fbbf 100644 --- a/reproduce/software/make/build-rules.mk +++ b/reproduce/software/make/build-rules.mk @@ -3,7 +3,7 @@ # imported into 'basic.mk' and 'high-level.mk'. They should be activated # with Make's 'Call' function. # -# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2018-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> # # This Makefile is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -29,8 +29,13 @@ # its checksum and if it is correct, remove the extra suffix. # # Arguments: -# 1: The optional URL to use for this tarball. -# 2: The expeced checksum of the tarball. +# 1: The optional base URL (directory) to use for this tarball. +# 2: The expected checksum of the tarball. +# 3: The upstream name of the tarball file, if not automatically derived +# from the version number. +# 4: [Optional]: Alternative upstream base URL (directory) for the +# tarball, to be used in preference to user or Maneage backup +# servers. # # Necessary shell variables # 'tarball': This is the name of the actual tarball file without a @@ -56,7 +61,14 @@ import-source = final=$(tdir)/$$tarball; \ tarballurl=$(topbackupserver)/$$tarball; \ else \ bservers="$(backupservers_all)"; \ - tarballurl=$$url/$$tarball; \ + if [ "x$(strip $(3))" = "x" ]; then \ + tarballurl=$$url/$$tarball; \ + else \ + tarballurl=$$url/$(strip $(3)); \ + fi; \ + fi; \ + if [ x"$(4)" != x ]; then \ + bservers="$(strip $(4)) $$bservers"; \ fi; \ if [ -f $(ibdir)/wget ]; then \ downloader="wget --no-use-server-timestamps -O"; \ @@ -89,6 +101,48 @@ import-source = final=$(tdir)/$$tarball; \ +# Double-check an already downloaded R source +# ------------------------------------------- +# +# It is probably too late to protect the system if you have already +# installed an insecure or wrong R package. However, it's still useful +# to check that the source package is the one that you think it is. +# +# Calculate the checksum and exit with a non-zero error code if +# there's a mismatch, after informing the user. +# +# Arguments: +# 1: The expected checksum of the tarball. +# +# Necessary shell variables +# 'tarball': This is the name of the actual tarball file without a +# directory. +double-check-R-source = final=$(tdir)/R-project/$$tarball; \ + exp_checksum="$(strip $(1))"; \ + if [ x"$$exp_checksum" = x"NO-CHECK-SUM" ]; then \ + result=0; \ + else \ + if type sha512sum > /dev/null 2>/dev/null; then \ + checksum=$$(sha512sum "$$final" | awk '{print $$1}'); \ + if [ x"$$checksum" = x"$$exp_checksum" ]; then \ + result=0; \ + else \ + echo "ERROR: Non-matching checksum: $$final"; \ + echo "Checksum should be: $$exp_checksum"; \ + echo "Checksum is: $$checksum"; \ + result=1; \ + exit 1; \ + fi; \ + else \ + echo "ERROR: sha512sum is unavailable."; \ + exit 1; \ + fi; \ + fi + + + + + # Unpack a tarball # ---------------- # @@ -106,7 +160,7 @@ uncompress = csuffix=$$(echo $$utarball \ intarrm=0; \ intar=$$utarball; \ fi; \ - if tar xf $$intar; then \ + if tar -xf $$intar --no-same-owner --no-same-permissions; then \ if [ x$$intarrm = x1 ]; then rm $$intar; fi; \ else \ echo; echo "Tar error"; exit 1; \ @@ -153,23 +207,19 @@ gbuild = if [ x$(static_build) = xyes ] && [ "x$(2)" = xstatic ]; then \ else confscript="$(strip $(6))"; \ fi; \ \ - if [ -f $(ibdir)/bash ]; then \ - if [ -f "$$confscript" ]; then \ - sed -e's|\#\! /bin/sh|\#\! $(ibdir)/bash|' \ - -e's|\#\!/bin/sh|\#\! $(ibdir)/bash|' \ - $$confscript > $$confscript-tmp; \ - mv $$confscript-tmp $$confscript; \ - chmod +x $$confscript; \ - fi; \ + $(shsrcdir)/prep-source.sh $(ibdir); \ + if [ -f $(ibdir)/bash ]; then \ shellop="SHELL=$(ibdir)/bash"; \ - elif [ -f /bin/bash ]; then shellop="SHELL=/bin/bash"; \ - else shellop="SHELL=/bin/sh"; \ + else shellop="SHELL=$(ibdir)/dash"; \ fi; \ \ + if [ x$$gbuild_prefix = x ]; then prefixdir="$(idir)"; \ + else prefixdir="$$gbuild_prefix"; fi; \ + \ if [ -f "$$confscript" ]; then \ if [ x"$(strip $(1))" = x"zlib-$(zlib-version)" ]; then \ - configop="--prefix=$(idir)"; \ - else configop="$$shellop --prefix=$(idir)"; \ + configop="--prefix=$$prefixdir"; \ + else configop="$$shellop --prefix=$$prefixdir"; \ fi; \ fi; \ \ @@ -190,7 +240,7 @@ gbuild = if [ x$(static_build) = xyes ] && [ "x$(2)" = xstatic ]; then \ make "$$shellop" install $(7); \ cd ..; \ fi; \ - rm -rf $(1) + rm -rf $(1); @@ -198,10 +248,7 @@ gbuild = if [ x$(static_build) = xyes ] && [ "x$(2)" = xstatic ]; then \ # CMake # ----- # -# According to the link below, in CMake '/bin/sh' is hardcoded, so there is -# no way to change it unfortunately! -# -# https://stackoverflow.com/questions/21167014/how-to-set-shell-variable-in-makefiles-generated-by-cmake +# Used by packages that are built with CMake. cbuild = if [ x$(static_build) = xyes ] && [ $(2)x = staticx ]; then \ export LDFLAGS="$$LDFLAGS -static"; \ opts="-DBUILD_SHARED_LIBS=OFF"; \ @@ -211,13 +258,18 @@ cbuild = if [ x$(static_build) = xyes ] && [ $(2)x = staticx ]; then \ utarball=$(tdir)/$$tarball; \ $(call uncompress); \ cd $(1); \ - rm -rf project-build; \ - mkdir project-build; \ - cd project-build; \ + $(shsrcdir)/prep-source.sh $(ibdir); \ + if [ -f $(ibdir)/bash ]; then \ + shellop="SHELL=$(ibdir)/bash"; \ + else shellop="SHELL=$(ibdir)/dash"; \ + fi; \ + rm -rf maneage-build; \ + mkdir maneage-build; \ + cd maneage-build; \ cmake .. -DCMAKE_LIBRARY_PATH=$(ildir) \ -DCMAKE_INSTALL_PREFIX=$(idir) \ -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON $$opts $(3); \ - make; \ - make install; \ + make $$shellop; \ + make $$shellop install; \ cd ../..; \ rm -rf $(1) diff --git a/reproduce/software/make/high-level.mk b/reproduce/software/make/high-level.mk index 6ea782c..67ca8b6 100644 --- a/reproduce/software/make/high-level.mk +++ b/reproduce/software/make/high-level.mk @@ -3,7 +3,7 @@ # ------------------------------------------------------------------------ # !!!!! IMPORTANT NOTES !!!!! # -# This Makefile will be run by the initial `./project configure' script. It +# This Makefile will be run by the initial './project configure' script. It # is not included into the project afterwards. # # This Makefile builds the high-level (optional) software in Maneage that @@ -12,8 +12,8 @@ # # ------------------------------------------------------------------------ # -# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> -# Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> +# Copyright (C) 2018-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> # # This Makefile is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -29,7 +29,7 @@ # along with this Makefile. If not, see <http://www.gnu.org/licenses/>. # Top level environment (same as 'basic.mk') -include reproduce/software/config/LOCAL.conf +include .build/software/config/LOCAL.conf include reproduce/software/make/build-rules.mk include reproduce/software/config/versions.conf include reproduce/software/config/checksums.conf @@ -49,6 +49,7 @@ ddir = $(BDIR)/software/build-tmp idir = $(BDIR)/software/installed ibdir = $(BDIR)/software/installed/bin ildir = $(BDIR)/software/installed/lib +iidir = $(BDIR)/software/installed/include ibidir = $(BDIR)/software/installed/version-info/proglib # Basic directories (specific to this Makefile). @@ -60,6 +61,15 @@ patchdir = "$(shell pwd)"/reproduce/software/patches itidir = $(BDIR)/software/installed/version-info/tex ictdir = $(BDIR)/software/installed/version-info/cite ipydir = $(BDIR)/software/installed/version-info/python +ircrandir = $(BDIR)/software/installed/version-info/r-cran +ilibrcrandir = $(BDIR)/software/installed/lib/R/library + +# Special files. +makewshell = $(ibdir)/make-with-shell + + + + # Targets to build. ifeq ($(strip $(all_highlevel)),1) @@ -75,7 +85,7 @@ ifeq ($(strip $(all_highlevel)),1) # included here because there is no explicit target for them: they will # be built as part of the other package. targets-proglib := $(filter-out minizip-% lapack-% ghostscript-fonts-%, \ - $(shell awk '/^# CLASS:PYTHON/{good=0} \ + $(shell awk '/^# CLASS:(PYTHON|R-CRAN)/{good=0} \ good==1 && !/^#/ && $$1 ~ /-version$$/ { \ printf("%s %s ", $$1, $$3)} \ /^# CLASS:HIGHLEVEL/{good=1}' \ @@ -85,22 +95,40 @@ ifeq ($(strip $(all_highlevel)),1) # List all existing Python packages. targets-python := $(shell \ - awk '/^# CLASS:PYTHON/{good=1} \ - good==1 && !/^#/ && $$1 ~ /-version$$/ {printf("%s %s ",$$1,$$3)}' \ - reproduce/software/config/versions.conf | sed 's/version //g') + awk '/^# CLASS:PYTHON-START/{good=1} good; \ + /^# CLASS:PYTHON-END/{good=0}' \ + reproduce/software/config/versions.conf \ + | awk '!/^#/' \ + | sed 's/-version = /-/g') + + # List all existing R-CRAN packages. + targets-r-cran := $(shell \ + awk '/^# CLASS:R-CRAN-START/{good=1} good; \ + /^# CLASS:R-CRAN-END/{good=0}' \ + reproduce/software/config/versions.conf \ + | awk '!/^#/' \ + | sed 's/-version = /-/g') else - # Append the version of each software to its name. We are using a Make + # Append the version of each software package to its name. We are using a Make # feature where a variable name is defined with another variable. targets-python := $(foreach p,$(top-level-python),$(p)-$($(p)-version)) + targets-r-cran := $(foreach p,$(top-level-r-cran),$(p)-$($(p)-version)) targets-proglib := $(foreach p,$(top-level-programs),$(p)-$($(p)-version)) endif -# Ultimate Makefile target. +# Disable the TeXLive target if `--offline` +ifneq ($(strip $(offline)),1) + target-texlive := $(itidir)/texlive +endif + +# Ultimate Makefile target. The recipe is '@echo > /dev/null' so Make does +# not print "make: Nothing to be done for 'all'." all: $(foreach p, $(targets-proglib), $(ibidir)/$(p)) \ $(foreach p, $(targets-python), $(ipydir)/$(p)) \ - $(itidir)/texlive + $(foreach p, $(targets-r-cran), $(ircrandir)/$(p)) \ + $(target-texlive); @echo > /dev/null # Define the shell environment # ---------------------------- @@ -112,14 +140,18 @@ all: $(foreach p, $(targets-proglib), $(ibidir)/$(p)) \ # # To investigate: # -# 1) Set SHELL to `$(ibdir)/env - NAME=VALUE $(ibdir)/bash' and set all -# the parameters defined bellow as `NAME=VALUE' statements before +# 1) Set SHELL to '$(ibdir)/env - NAME=VALUE $(ibdir)/bash' and set all +# the parameters defined bellow as 'NAME=VALUE' statements before # calling Bash. This will enable us to completely ignore the user's # native environment. # -# 2) Add `--noprofile --norc' to `.SHELLFLAGS' so doesn't load the +# 2) Add '--noprofile --norc' to '.SHELLFLAGS' so doesn't load the # user's environment. # +# 3) Add the '-u' flag so that an error occurs if an environment +# variable is empty; this reduces the chance of catastrophic +# file removal with 'rm -fr ../../$${FORGOT_TO_DEFINE_THIS}'. +# # Shell settings similar to 'basic.mk': .ONESHELL: export PATH := $(ibdir) @@ -128,9 +160,11 @@ export SHELL := $(ibdir)/bash .SHELLFLAGS := --noprofile --norc -ec export LDFLAGS := $(rpath_command) -L$(ildir) export PKG_CONFIG_LIBDIR := $(ildir)/pkgconfig -export CPPFLAGS := -I$(idir)/include -Wno-nullability-completeness export PKG_CONFIG_PATH := $(ildir)/pkgconfig:$(idir)/share/pkgconfig +# Disable built-in rules (which are not needed here!) +.SUFFIXES: + # Settings specific to this Makefile. export CC := $(ibdir)/gcc export CXX := $(ibdir)/g++ @@ -138,6 +172,13 @@ export F77 := $(ibdir)/gfortran export LD_RUN_PATH := $(ildir):$(il64dir) export LD_LIBRARY_PATH := $(ildir):$(il64dir) +# See description of '-Wno-nullability-completeness' in +# 'reproduce/software/shell/configure.sh'. +ifeq ($(on_mac_os),yes) + noccwarnings=-Wno-nullability-completeness +endif +export CPPFLAGS := -I$(idir)/include $(noccwarnings) + # In macOS, if a directory exists in both 'C_INCLUDE_PATH' and 'CPPFLAGS' # it will be ignored in 'CPPFLAGS' (which has higher precedence). So, we # should not define 'C_INCLUDE_PATH' on macOS. This happened with clang @@ -147,16 +188,16 @@ export C_INCLUDE_PATH := $(iidir) export CPLUS_INCLUDE_PATH := $(iidir) endif -# Recipe startup script, see `reproduce/software/shell/bashrc.sh'. +# Recipe startup script, see 'reproduce/software/shell/bashrc.sh'. export PROJECT_STATUS := configure_highlevel export BASH_ENV := $(shell pwd)/reproduce/software/shell/bashrc.sh # Until we build our own C library, without this, our GCC won't be able to # compile anything! Note that on most systems (in particular -# non-Debian-based), `sys_cpath' will be empty. +# non-Debian-based), 'sys_cpath' will be empty. export CPATH := $(sys_cpath) -# RPATH is automatically written in macOS, so `DYLD_LIBRARY_PATH' is +# RPATH is automatically written in macOS, so 'DYLD_LIBRARY_PATH' is # ultimately redundant. But on some systems, even having a single value # causes crashs (see bug #56682). So we'll just give it no value at all. export DYLD_LIBRARY_PATH := @@ -164,8 +205,8 @@ export DYLD_LIBRARY_PATH := # On Debian-based OSs, the basic C libraries are in a target-specific # location, not in standard places. Until we merge the building of the C # library, it is thus necessary to include this location here. On systems -# that don't need it, `sys_library_path' is just empty. This is necessary -# for `ld'. +# that don't need it, 'sys_library_path' is just empty. This is necessary +# for 'ld'. # # If this variable is not defined, it will be interpretted as the current # directory. In this case, when the program source has a 'specs' directory, @@ -179,12 +220,18 @@ endif # Building flags: # # C++ flags: when we build GCC, the C++ standard library needs to link with -# libiconv. So it is necessary to generically include `-liconv' for all C++ +# libiconv. So it is necessary to generically include '-liconv' for all C++ # builds. ifeq ($(host_cc),0) export CXXFLAGS := -liconv endif +# Custom installation prefix for software that can cause conflicts with +# others, to avoid crowding the to Maneage installed software directory, +# we'll put them all in a 'custom' directory. +idircustom = $(idir)/custom +$(idircustom):; mkdir $@ + # Servers to use as backup. Maneage already has some fixed servers that can # be used to download software tarballs. They are in a configuation # file. But we give precedence to the "user" backup servers. @@ -197,7 +244,7 @@ endif # Afer putting everything together, we use the first server as the # reference for all software if their '-url' variable isn't defined (in # 'reproduce/software/config/urls.conf'). -downloadwrapper = ./reproduce/analysis/bash/download-multi-try +downloadwrapper = ./reproduce/analysis/bash/download-multi-try.sh maneage_backup_urls := $(shell awk '!/^#/{printf "%s ", $$1}' \ reproduce/software/config/servers-backup.conf) backupservers_all = $(user_backup_urls) $(maneage_backup_urls) @@ -211,6 +258,8 @@ backupservers = $(filter-out $(topbackupserver),$(backupservers_all)) # Import rules to build specialized software include reproduce/software/make/xorg.mk include reproduce/software/make/python.mk +include reproduce/software/make/r-cran.mk + @@ -226,61 +275,27 @@ include reproduce/software/make/python.mk # # We would prefer to build static libraries, but some compilers like LLVM # don't have static capabilities, so they'll only build dynamic/shared -# libraries. Therefore, we can't use the easy `.a' suffix for static +# libraries. Therefore, we can't use the easy '.a' suffix for static # libraries as targets and there are different conventions for shared # library names. - -# Until version 0.11.0 is released, we are using the version corresponding -# to commit 014954db (603 commits after version 0.10.0, most recent when -# first importing log4cxx into this project). -# -# Note that after cloning the project, the following changes are necessary -# in `configure.ac'. -# - Update the final name of the tarball and its version (from `git -# - describe') by modifying the `AC_INIT' line: -# AC_INIT([apachelog4cxx], [0.10.0-603-014954db]) -# - Because of the long file names in the project, some files will not be -# packaged by default, so pass the `tar-ustar' option to Automake (the -# `AM_INIT_AUTOMAKE' line of `configure.ac': -# AM_INIT_AUTOMAKE([foreign subdir-objects -Wall tar-ustar]) -# -# You can then simply bootstrap the project and make the distribution -# tarball like this: -# ./autogen.sh && ./configure && make -j8 && make dist-lzip -# -# Unfortunately we have to re-run the `autogen.sh' script on the tarball to -# build it because it will complain about the version of libtool, so until -# the version 0.11.0 of log4cxx, we'll have to run `autogen.sh' on the -# unpacked source also. $(ibidir)/apachelog4cxx-$(apachelog4cxx-version): \ + $(ibidir)/cmake-$(cmake-version) \ $(ibidir)/expat-$(expat-version) \ $(ibidir)/apr-util-$(apr-util-version) \ $(ibidir)/automake-$(automake-version) - tarball=apachelog4cxx-$(apachelog4cxx-version).tar.lz + tarball=apache-log4cxx-$(apachelog4cxx-version).tar.lz $(call import-source, $(apachelog4cxx-url), $(apachelog4cxx-checksum)) - pdir=apachelog4cxx-$(apachelog4cxx-version) - rm -rf $(ddir)/$$pdir - topdir=$(pwd) - cd $(ddir) - tar xf $(tdir)/$$tarball - cd $$pdir - ./autogen.sh - ./configure SHELL=$(ibdir)/bash --prefix=$(idir) - make -j$(numthreads) SHELL=$(ibdir)/bash - make install - cd .. - rm -rf $$pdir - cd $$topdir + $(call cbuild, apache-log4cxx-$(apachelog4cxx-version), static) echo "Apache log4cxx $(apachelog4cxx-version)" > $@ $(ibidir)/apr-$(apr-version): - tarball=apr-$(apr-version).tar.gz + tarball=apr-$(apr-version).tar.lz $(call import-source, $(apr-url), $(apr-checksum)) $(call gbuild, apr-$(apr-version), ,--disable-static) echo "Apache Portable Runtime $(apr-version)" > $@ $(ibidir)/apr-util-$(apr-util-version): $(ibidir)/apr-$(apr-version) - tarball=apr-util-$(apr-util-version).tar.gz + tarball=apr-util-$(apr-util-version).tar.lz $(call import-source, $(apr-util-url), $(apr-util-checksum)) $(call gbuild, apr-util-$(apr-util-version), , \ --disable-static \ @@ -291,20 +306,19 @@ $(ibidir)/apr-util-$(apr-util-version): $(ibidir)/apr-$(apr-version) $(ibidir)/atlas-$(atlas-version): - tarball=lapack-$(lapack-version).tar.gz + tarball=lapack-$(lapack-version).tar.lz $(call import-source, $(lapack-url), $(lapack-checksum)) - tarball=atlas-$(atlas-version).tar.bz2 + tarball=atlas-$(atlas-version).tar.lz $(call import-source, $(atlas-url), $(atlas-checksum)) - # Get the operating system specific features (how to get - # CPU frequency and the library suffixes). To make the steps - # more readable, the different library version suffixes are - # named with a single character: `s' for no version in the - # name, `m' for the major version suffix, and `f' for the - # full version suffix. - # GCC in Mac OS doesn't work. To work around this issue, on Mac - # systems we force ATLAS to use `clang' instead of `gcc'. +# Get the operating system specific features (how to get CPU +# frequency and the library suffixes). To make the steps more +# readable, the different library version suffixes are named with a +# single character: 's' for no version in the name, 'm' for the major +# version suffix, and 'f' for the full version suffix. GCC in Mac OS +# doesn't work. To work around this issue, on Mac systems we force +# ATLAS to use 'clang' instead of 'gcc'. if [ x$(on_mac_os) = xyes ]; then s=dylib m=3.dylib @@ -321,8 +335,8 @@ $(ibidir)/atlas-$(atlas-version): | sed "s/.*: \([0-9.]*\).*/\1/") fi - # See if the shared libraries should be build for a single CPU - # thread or multiple threads. +# See if the shared libraries should be build for a single CPU thread +# or multiple threads. N=$$(nproc) srcdir=$$(pwd)/reproduce/software/make if [ $$N = 1 ]; then @@ -331,25 +345,27 @@ $(ibidir)/atlas-$(atlas-version): sharedmk=$$srcdir/atlas-multiple.mk fi - # The linking step here doesn't recognize the `-Wl' in the - # `rpath_command'. +# The linking step here doesn't recognize the '-Wl' in the +# 'rpath_command'. export LDFLAGS=-L$(ildir) cd $(ddir) - tar xf $(tdir)/atlas-$(atlas-version).tar.bz2 + tar -xf $(tdir)/atlas-$(atlas-version).tar.lz \ + --no-same-owner --no-same-permissions cd ATLAS + $(shsrcdir)/prep-source.sh $(ibdir) rm -rf build mkdir build cd build ../configure -b 64 -D c -DPentiumCPS=$$core \ - --with-netlib-lapack-tarfile=$(tdir)/lapack-$(lapack-version).tar.gz \ + --with-netlib-lapack-tarfile=$(tdir)/lapack-$(lapack-version).tar.lz \ --cripple-atlas-performance \ -Fa alg -fPIC --shared $$clangflag \ --prefix=$(idir) - # Static build. +# Static build. make - # Currently the shared libraries have problems on macOS. +# Currently the shared libraries have problems on macOS. if [ "x$(on_mac_os)" != xyes ]; then cd lib make -f $$sharedmk @@ -362,21 +378,21 @@ $(ibidir)/atlas-$(atlas-version): ln -fs $(ildir)/liblapack.$$f $(ildir)/liblapack.$$m fi - # Install the libraries. +# Install the libraries. make install - # We need to check the existance of `libptlapack.a', but we can't - # do this in the `&&' steps above (it will conflict). So we'll do - # the check after seeing if `libtatlas.so' is installed, then we'll - # finalize the build (delete the untarred directory). +# We need to check the existance of 'libptlapack.a', but we can't do +# this in the '&&' steps above (it will conflict). So we'll do the +# check after seeing if 'libtatlas.so' is installed, then we'll +# finalize the build (delete the untarred directory). if [ "x$(on_mac_os)" != xyes ]; then \ [ -e lib/libptlapack.a ] && cp lib/libptlapack.a $(ildir); \ cd $(ddir); \ rm -rf ATLAS; \ fi - # We'll check the full installation with the static library (not - # currently building shared library on Mac. +# We'll check the full installation with the static library (not +# currently building shared library on Mac. if [ -f $(ildir)/libatlas.a ]; then \ echo "ATLAS $(atlas-version)" > $@; \ fi @@ -391,8 +407,9 @@ $(ibidir)/boost-$(boost-version): \ rm -rf $(ddir)/$$unpackdir topdir=$(pwd) cd $(ddir) - tar xf $(tdir)/$$tarball + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions cd $$unpackdir + $(shsrcdir)/prep-source.sh $(ibdir) ./bootstrap.sh --prefix=$(idir) --with-libraries=all \ --with-python=python3 echo "using mpi ;" > project-config.jam @@ -404,57 +421,63 @@ $(ibidir)/boost-$(boost-version): \ $(ibidir)/cfitsio-$(cfitsio-version): - # Download the tarball - tarball=cfitsio-$(cfitsio-version).tar.gz +# Download the tarball + tarball=cfitsio-$(cfitsio-version).tar.lz $(call import-source, $(cfitsio-url), $(cfitsio-checksum)) - # CFITSIO hard-codes '@rpath' inside the shared library on - # Mac systems. So we need to change it to our library - # installation path. It doesn't affect GNU/Linux, so we'll - # just do it in any case to keep things clean. - topdir=$(pwd); cd $(ddir); tar xf $(tdir)/$$tarball +# CFITSIO hard-codes '@rpath' inside the shared library on Mac +# systems. So we need to change it to our library installation +# path. It doesn't affect GNU/Linux, so we'll just do it in any case +# to keep things clean. + topdir=$(pwd); cd $(ddir) + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions customtar=cfitsio-$(cfitsio-version)-custom.tar.gz cd cfitsio-$(cfitsio-version) - sed configure -e's|@rpath|$(ildir)|g' > configure_tmp - mv configure_tmp configure - chmod +x configure + sed -i -e's|@rpath|$(ildir)|g' configure cd .. tar cf $$customtar cfitsio-$(cfitsio-version) cd $$topdir - # Continue the standard build on the customized tarball. Note that - # with the installation of CFITSIO, `fpack' and `funpack' are not - # installed by default. Because of that, they are added explicity. +# Continue the standard build on the customized tarball. Note that +# with the installation of CFITSIO, 'fpack' and 'funpack' are not +# installed by default. Because of that, they are added explicity. +# +# Note that older versions of CFITSIO (before 4.4.0) require a +# specific 'shared' target for the building of the shared libraries. export gbuild_tar=$(ddir)/$$customtar $(call gbuild, cfitsio-$(cfitsio-version), , \ --enable-sse2 --enable-reentrant \ --with-bzip2=$(idir), , \ - make shared fpack funpack) + make fpack funpack) rm $$customtar echo "CFITSIO $(cfitsio-version)" > $@ $(ibidir)/cairo-$(cairo-version): \ + $(ibidir)/libxt-$(libxt-version) \ $(ibidir)/pixman-$(pixman-version) \ $(ibidir)/libpng-$(libpng-version) \ $(ibidir)/freetype-$(freetype-version) - tarball=cairo-$(cairo-version).tar.xz + tarball=cairo-$(cairo-version).tar.lz $(call import-source, $(cairo-url), $(cairo-checksum)) $(call gbuild, cairo-$(cairo-version), static, \ --with-x=yes, -j$(numthreads) V=1) echo "Cairo $(cairo-version)" > $@ # Eigen is just headers! So it doesn't need to be compiled. Once unpacked -# it has a checksum after `eigen-eigen', so we'll just use a `*' to choose +# it has a checksum after 'eigen-eigen', so we'll just use a '*' to choose # the unpacked directory. $(ibidir)/eigen-$(eigen-version): - tarball=eigen-$(eigen-version).tar.gz + tarball=eigen-$(eigen-version).tar.lz $(call import-source, $(eigen-url), $(eigen-checksum)) rm -rf $(ddir)/eigen-eigen-* - topdir=$(pwd); cd $(ddir); tar xf $(tdir)/$$tarball - cd eigen-eigen-* - cp -r Eigen $(iidir)/eigen3 + topdir=$(pwd); cd $(ddir) + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions + cd eigen-$(eigen-version) + if ! [ -d $(iidir)/eigen3 ]; then mkdir $(iidir)/eigen3; fi + cp -r Eigen/* $(iidir)/eigen3/ # Some expect 'eigen3'. + ln -s $(iidir)/eigen3 $(iidir)/Eigen # Others expect 'Eigen'. cd $$topdir - rm -rf $(ddir)/eigen-eigen-* + rm -rf $(ddir)/eigen-$(eigen-version) echo "Eigen $(eigen-version)" > $@ # GNU Emacs is an advanced text editor (among many other things!), so it @@ -468,7 +491,7 @@ $(ibidir)/eigen-$(eigen-version): # except the core Emacs functionality (using '--without-all') and we are # also disabling all graphic user interface features (using '--without-x'). $(ibidir)/emacs-$(emacs-version): - tarball=emacs-$(emacs-version).tar.xz + tarball=emacs-$(emacs-version).tar.lz $(call import-source, $(emacs-url), $(emacs-checksum)) $(call gbuild, emacs-$(emacs-version), static, \ --without-all --without-x \ @@ -483,15 +506,28 @@ $(ibidir)/expat-$(expat-version): echo "Expat $(expat-version)" > $@ $(ibidir)/fftw-$(fftw-version): - # Prepare the source tarball. - tarball=fftw-$(fftw-version).tar.gz + +# Prepare the source tarball. + tarball=fftw-$(fftw-version).tar.lz $(call import-source, $(fftw-url), $(fftw-checksum)) - # FFTW's single and double precission libraries must be built - # independently: for the the single-precision library, we need to - # add the `--enable-float' option. We will build this first, then - # the default double-precision library. - confop="--enable-shared --enable-threads --enable-avx --enable-sse2" +# FFTW's single and double precision libraries must be built +# independently: for the the single-precision library, we need to add +# the '--enable-float' option. We will build this first, then the +# default double-precision library. +# +# There are Intel-specific optimizations that can be enabled by +# adding the following two options to 'confop' +# +# --enable-avx --enable-sse2 +# +# However, they cause crashs on non-Intel processors (has been +# confirmed in ARM's aarch64). So in the generic scenario they are +# removed. Checking how these optimizations affect the numeric +# accuracy of the result (and thus optionally adding them for +# Intel-based processors) should be studied before they are +# optionally added for Intel-based CPUs (and ignored for others). + confop="--enable-shared --enable-threads" $(call gbuild, fftw-$(fftw-version), static, \ $$confop --enable-float) $(call gbuild, fftw-$(fftw-version), static, \ @@ -500,19 +536,25 @@ $(ibidir)/fftw-$(fftw-version): echo "FFTW $(fftw-version) \citep{fftw}" > $@ $(ibidir)/freetype-$(freetype-version): $(ibidir)/libpng-$(libpng-version) - tarball=freetype-$(freetype-version).tar.gz +# As of version 2.13.2, FreeType doesn't account for the 'SHELL' +# environment variable. The issue has been reported to the +# developers. But until future versions, the work-around was +# discoverd to be setting the 'GNUMAKE' environment variable so it +# includes 'SHELL'. + export GNUMAKE="$(makewshell)" + tarball=freetype-$(freetype-version).tar.lz $(call import-source, $(freetype-url), $(freetype-checksum)) $(call gbuild, freetype-$(freetype-version), static) echo "FreeType $(freetype-version)" > $@ $(ibidir)/gperf-$(gperf-version): - tarball=gperf-$(gperf-version).tar.gz + tarball=gperf-$(gperf-version).tar.lz $(call import-source, $(gperf-url), $(gperf-checksum)) $(call gbuild, gperf-$(gperf-version), static) echo "GNU gperf $(gperf-version)" > $@ $(ibidir)/gsl-$(gsl-version): - tarball=gsl-$(gsl-version).tar.gz + tarball=gsl-$(gsl-version).tar.lz $(call import-source, $(gsl-url), $(gsl-checksum)) $(call gbuild, gsl-$(gsl-version), static) echo "GNU Scientific Library $(gsl-version)" > $@ @@ -520,7 +562,7 @@ $(ibidir)/gsl-$(gsl-version): $(ibidir)/hdf5-$(hdf5-version): $(ibidir)/openmpi-$(openmpi-version) export CC=mpicc export FC=mpif90 - tarball=hdf5-$(hdf5-version).tar.gz + tarball=hdf5-$(hdf5-version).tar.lz $(call import-source, $(hdf5-url), $(hdf5-checksum)) $(call gbuild, hdf5-$(hdf5-version), static, \ --enable-parallel \ @@ -531,14 +573,14 @@ $(ibidir)/hdf5-$(hdf5-version): $(ibidir)/openmpi-$(openmpi-version) # HEALPix includes the source of its C, C++, Python (and several other # languages) libraries within one tarball. We will include the Python # installation only when any other Python module is requested (in -# `TARGETS.conf'). +# 'TARGETS.conf'). # -# Note that the default `./configure' script is an interactive script which -# is hard to automate. So we need to go into the `autotools' directory of -# the `C' and `cxx' directories and configure the GNU Build System (with -# `autoreconf', which uses `autoconf' and `automake') to easily build the +# Note that the default './configure' script is an interactive script which +# is hard to automate. So we need to go into the 'autotools' directory of +# the 'C' and 'cxx' directories and configure the GNU Build System (with +# 'autoreconf', which uses 'autoconf' and 'automake') to easily build the # HEALPix C/C++ libraries in batch mode. -ifeq ($(strip $(top-level-python)),) +ifeq ($(strip $(targets-python)),) healpix-python-dep = else healpix-python-dep = $(ipydir)/matplotlib-$(matplotlib-version) \ @@ -548,7 +590,7 @@ $(ibidir)/healpix-$(healpix-version): $(healpix-python-dep) \ $(ibidir)/cfitsio-$(cfitsio-version) \ $(ibidir)/autoconf-$(autoconf-version) \ $(ibidir)/automake-$(automake-version) - tarball=healpix-$(healpix-version).tar.gz + tarball=healpix-$(healpix-version).tar.lz $(call import-source, $(healpix-url), $(healpix-checksum)) if [ x"$(healpix-python-dep)" = x ]; then pycommand1="echo no-healpy-because-no-other-python" @@ -559,8 +601,10 @@ $(ibidir)/healpix-$(healpix-version): $(healpix-python-dep) \ fi rm -rf $(ddir)/Healpix_$(healpix-version) topdir=$(pwd); cd $(ddir); - tar xf $(tdir)/$$tarball - cd Healpix_$(healpix-version)/src/C/autotools/ + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions + cd Healpix_$(healpix-version) + $(shsrcdir)/prep-source.sh $(ibdir) + cd src/C/autotools autoreconf --install ./configure --prefix=$(idir) make V=1 -j$(numthreads) SHELL=$(ibdir)/bash @@ -568,6 +612,13 @@ $(ibidir)/healpix-$(healpix-version): $(healpix-python-dep) \ cd ../../cxx/autotools/ autoreconf --install ./configure --prefix=$(idir) + +# With CFITSIO 4.0, the 'CFITSIO_VERSION' macro has three +# components. But this version of Healpix doesn't yet account for +# this. + sed -i -e's/CFITSIO_VERSION/fitsversion/' cxxsupport/fitshandle.cc + +# Continue with the building. make V=1 -j$(numthreads) SHELL=$(ibdir)/bash make install cd ../../healpy @@ -578,19 +629,52 @@ $(ibidir)/healpix-$(healpix-version): $(healpix-python-dep) \ cp $(dtexdir)/healpix.tex $(ictdir)/ echo "HEALPix $(healpix-version) \citep{healpix}" > $@ +$(ibidir)/libbsd-$(libbsd-version): $(ibidir)/libmd-$(libmd-version) + tarball=libbsd-$(libbsd-version).tar.lz + $(call import-source, $(libbsd-url), $(libbsd-checksum)) + if [ x$(on_mac_os) = xyes ]; then + echo "" > $@ + else + export LDFLAGS="-L$(idirlibmd)/lib $$LDFLAGS" + export CPPFLAGS="-I$(idirlibmd)/include $$CPPFLAGS" + $(call gbuild, libbsd-$(libbsd-version), static,,V=1) + echo "Libbsd $(libbsd-version)" > $@ + fi + $(ibidir)/libidn-$(libidn-version): - tarball=libidn-$(libidn-version).tar.gz + tarball=libidn-$(libidn-version).tar.lz $(call import-source, $(libidn-url), $(libidn-checksum)) $(call gbuild, libidn-$(libidn-version), static, \ --disable-doc, -j$(numthreads) V=1) echo "Libidn $(libidn-version)" > $@ $(ibidir)/libjpeg-$(libjpeg-version): - tarball=jpegsrc.$(libjpeg-version).tar.gz + tarball=libjpeg-$(libjpeg-version).tar.lz $(call import-source, $(libjpeg-url), $(libjpeg-checksum)) - $(call gbuild, jpeg-9b, static,,V=1) + $(call gbuild, libjpeg-$(libjpeg-version), static,,V=1) echo "Libjpeg $(libjpeg-version)" > $@ +# libmd is a set of "message digest" functions that are available in in the +# C library of BSD-based systems, but not others (like GNU-based +# systems). It includes hash functions like MD5 and SHAs. +# +# Libmd is being installed in a non-standard location because its headers +# (like 'md5.h') will conflict with similarly named headers by the system +# during the building of Binutils later! So any program that needs libmd's +# headers or libraries (like 'libbsd'), should add this special location to +# its CPPFLAGS and LDFLAGS. +idirlibmd=$(idircustom)/libmd +$(ibidir)/libmd-$(libmd-version): | $(idircustom) + tarball=libmd-$(libmd-version).tar.lz + $(call import-source, $(libmd-url), $(libmd-checksum)) + if [ x$(on_mac_os) = xyes ]; then + echo "" > $@ + else + export gbuild_prefix=$(idirlibmd) + $(call gbuild, libmd-$(libmd-version), static,,V=1) + echo "Libmd $(libmd-version)" > $@ + fi + $(ibidir)/libnsl-$(libnsl-version): \ $(ibidir)/libtirpc-$(libtirpc-version) \ $(ibidir)/rpcsvc-proto-$(rpcsvc-proto-version) @@ -603,15 +687,16 @@ $(ibidir)/libnsl-$(libnsl-version): \ $(ibidir)/libpaper-$(libpaper-version): \ $(ibidir)/automake-$(automake-version) - # Download the tarball. - tarball=libpaper-$(libpaper-version).tar.gz +# Download the tarball. + tarball=libpaper-$(libpaper-version).tar.lz $(call import-source, $(libpaper-url), $(libpaper-checksum)) - # Unpack, build the configure system, build and install. +# Unpack, build the configure system, build and install. cd $(ddir) - tar -xf $(tdir)/$$tarball + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions unpackdir=libpaper-$(libpaper-version) cd $$unpackdir + $(shsrcdir)/prep-source.sh $(ibdir) autoreconf -fi ./configure --prefix=$(idir) --sysconfdir=$(idir)/etc \ --disable-static @@ -620,10 +705,10 @@ $(ibidir)/libpaper-$(libpaper-version): \ cd .. rm -rf $$unpackdir - # Post-processing: according to Linux From Scratch, libpaper - # expects that packages will install files into this directory and - # 'paperconfig' is a script which will invoke 'run-parts' if - # '/etc/libpaper.d' exists +# Post-processing: according to Linux From Scratch, libpaper expects +# that packages will install files into this directory and +# 'paperconfig' is a script which will invoke 'run-parts' if +# '/etc/libpaper.d' exists mkdir -vp $(idir)/etc/libpaper.d sed -e's|MANEAGESHELL|$(SHELL)|' $(shsrcdir)/run-parts.in \ > $(ibdir)/run-parts @@ -631,15 +716,19 @@ $(ibidir)/libpaper-$(libpaper-version): \ echo "Libpaper $(libpaper-version)" > $@ $(ibidir)/libpng-$(libpng-version): - tarball=libpng-$(libpng-version).tar.xz + +# The option '-DPNG_ARM_NEON_OPT=0' prevents an arm64 'neon' library +# from being required at compile time. + tarball=libpng-$(libpng-version).tar.lz $(call import-source, $(libpng-url), $(libpng-checksum)) - $(call gbuild, libpng-$(libpng-version), static) + $(call gbuild, libpng-$(libpng-version), static, \ + CFLAGS="-DPNG_ARM_NEON_OPT=0") echo "Libpng $(libpng-version)" > $@ $(ibidir)/libtiff-$(libtiff-version): $(ibidir)/libjpeg-$(libjpeg-version) - tarball=tiff-$(libtiff-version).tar.gz + tarball=libtiff-$(libtiff-version).tar.lz $(call import-source, $(libtiff-url), $(libtiff-checksum)) - $(call gbuild, tiff-$(libtiff-version), static, \ + $(call gbuild, libtiff-$(libtiff-version), static, \ --disable-jbig \ --disable-webp \ --disable-zstd) @@ -652,21 +741,119 @@ $(ibidir)/libtirpc-$(libtirpc-version): --disable-gssapi, V=1) echo "libtirpc $(libtirpc-version)" > $@ +# Metastore is used (through a Git hook) to restore the source modification +# dates of files after a Git checkout. Another Git hook saves all file +# metadata just before a commit (to allow restoration after a +# checkout). Since this project is managed in Makefiles, file modification +# dates are critical to not having to redo the whole analysis after +# checking out between branches. +# +# Note that we aren't using the standard version of Metastore, but a fork +# of it that is maintained in this repository: +# https://gitlab.com/makhlaghi/metastore-fork +# +# Libbsd is not necessary on macOS systems, because macOS is already a +# BSD-based distribution. But on GNU/Linux systems, it is necessary. +$(ibidir)/metastore-$(metastore-version): \ + $(ibidir)/libbsd-$(libbsd-version) + +# Download the tarball. + tarball=metastore-$(metastore-version).tar.lz + $(call import-source, $(metastore-url), $(metastore-checksum)) + +# Metastore doesn't have any './configure' script. So we'll just call +# 'pwd' as a place-holder for the './configure' command. +# +# File attributes are also not available on some systems, since the +# main purpose here is modification dates (and not attributes), we'll +# also set the 'NO_XATTR' flag. +# +# After installing Metastore, write the relevant hooks into this +# system's Git hooks, while setting the system-specific +# directories/files. +# +# Note that the metastore -O and -G options used in this template are +# currently only available in a fork of 'metastore' hosted at: +# https://github.com/mohammad-akhlaghi/metastore +# +# Checking for presence of '.git'. When the project source is +# downloaded from a non-Git source (for example from arXiv), there is +# no '.git' directory to work with. So until we find a better +# solution, avoid the step to to add the Git hooks. + current_dir=$$(pwd); \ + $(call gbuild, metastore-$(metastore-version), static,, \ + NO_XATTR=1 V=1,,pwd,PREFIX=$(idir)) + +# Correct RPATH when necessary. + if [ -f $(ibdir)/patchelf ]; then + $(ibdir)/patchelf --set-rpath $(ildir) $(ibdir)/metastore + fi + +# If this project is being built in a directory version controlled +# by Git, copy the hooks into the Git configuation. + if [ -f $(ibdir)/metastore ]; then + if [ -d .git ]; then + user=$$(whoami) + group=$$(groups | awk '{print $$1}') + cd $$current_dir + for f in pre-commit post-checkout; do + sed -e's|@USER[@]|'$$user'|g' \ + -e's|@GROUP[@]|'$$group'|g' \ + -e's|@BINDIR[@]|$(ibdir)|g' \ + -e's|@TOP_PROJECT_DIR[@]|'$$current_dir'|g' \ + reproduce/software/shell/git-$$f > .git/hooks/$$f + chmod +x .git/hooks/$$f + done + fi + echo "Metastore (forked) $(metastore-version)" > $@ + else + echo; echo; echo + echo "*****************" + echo "metastore couldn't be installed!" + echo + echo "Its used for preserving timestamps on Git commits." + echo "Its useful for development, not simple running of " + echo "the project. So we won't stop the configuration " + echo "because it wasn't built." + echo "*****************" + echo "" > $@ + fi + +# The Ninja build system (https://ninja-build.org) is also known as simply +# "Ninja". But other package managers (for example Debian) use +# "ninja-build" (the old "ninja" name has become obsolete there). Also, +# their own URL is called "ninja-build". So we use the same convention in +# Maneage. +$(ibidir)/ninjabuild-$(ninjabuild-version): $(ibidir)/cmake-$(cmake-version) + tarball=ninjabuild-$(ninjabuild-version).tar.lz + $(call import-source, $(ninjabuild-url), $(ninjabuild-checksum)) + cd $(ddir) + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions + cd ninjabuild-$(ninjabuild-version) + cmake -Bbuild-cmake + cmake --build build-cmake -j$(numthreads) + ./build-cmake/ninja_test + cp -pv build-cmake/ninja $(ibdir)/ + cd .. + rm -rf ninjabuild-$(ninjabuild-version) + echo "Ninja build system $(ninjabuild-version)" > $@ + $(ibidir)/openblas-$(openblas-version): - tarball=OpenBLAS-$(openblas-version).tar.gz + tarball=openblas-$(openblas-version).tar.lz $(call import-source, $(openblas-url), $(openblas-checksum)) if [ x$(on_mac_os) = xyes ]; then export CC=clang; fi cd $(ddir) - tar xf $(tdir)/$$tarball - cd OpenBLAS-$(openblas-version) + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions + cd openblas-$(openblas-version) + $(shsrcdir)/prep-source.sh $(ibdir) make -j$(numthreads) make PREFIX=$(idir) install cd .. - rm -rf OpenBLAS-$(openblas-version) + rm -rf openblas-$(openblas-version) echo "OpenBLAS $(openblas-version)" > $@ $(ibidir)/openmpi-$(openmpi-version): - tarball=openmpi-$(openmpi-version).tar.gz + tarball=openmpi-$(openmpi-version).tar.lz $(call import-source, $(openmpi-url), $(openmpi-checksum)) $(call gbuild, openmpi-$(openmpi-version), static, \ --with-pmix=internal \ @@ -692,22 +879,22 @@ $(ibidir)/openssh-$(openssh-version): echo "OpenSSH $(openssh-version)" > $@ $(ibidir)/pixman-$(pixman-version): - tarball=pixman-$(pixman-version).tar.gz + tarball=pixman-$(pixman-version).tar.lz $(call import-source, $(pixman-url), $(pixman-checksum)) $(call gbuild, pixman-$(pixman-version), static, , \ -j$(numthreads) V=1) echo "Pixman $(pixman-version)" > $@ $(ibidir)/rpcsvc-proto-$(rpcsvc-proto-version): - # 'libintl' is installed as part of GNU Gettext in - # 'basic.mk'. rpcsvc-proto needs to link with it on macOS. +# 'libintl' is installed as part of GNU Gettext in +# 'basic.mk'. rpcsvc-proto needs to link with it on macOS. if [ x$(on_mac_os) = xyes ]; then export CC=clang export CXX=clang++ export LDFLAGS="-lintl $$LDFLAGS" fi - # Download the tarball and build rpcsvc-proto. +# Download the tarball and build rpcsvc-proto. tarball=rpcsvc-proto-$(rpcsvc-proto-version).tar.xz $(call import-source, $(rpcsvc-proto-url), $(rpcsvc-proto-checksum)) $(call gbuild, rpcsvc-proto-$(rpcsvc-proto-version), static) @@ -721,35 +908,10 @@ $(ibidir)/tides-$(tides-version): cp $(dtexdir)/tides.tex $(ictdir)/ echo "TIDES $(tides-version) \citep{tides}" > $@ -$(ibidir)/valgrind-$(valgrind-version): \ - $(ibidir)/patch-$(patch-version) \ - $(ibidir)/autoconf-$(autoconf-version) \ - $(ibidir)/automake-$(automake-version) - # Import the tarball - tarball=valgrind-$(valgrind-version).tar.bz2 +$(ibidir)/valgrind-$(valgrind-version): + tarball=valgrind-$(valgrind-version).tar.lz $(call import-source, $(valgrind-url), $(valgrind-checksum)) - - # For valgrind-3.15.0, see - # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=946329 for a - # report on an MPI-related compile bug and the two patches - # below. These two patches and `automake` should allow valgrind to - # compile with gcc-9.2.0. - cd $(ddir) - tar -xf $(tdir)/$$tarball - valgrinddir=valgrind-$(valgrind-version) - cd $${valgrinddir} - printf "valgrindir=$${valgrinddir} ; pwd = %s .\n" $$($(ibdir)/pwd) - if [ "x$(valgrind-version)" = "x3.15.0" ]; then - patch --verbose -p1 < $(patchdir)/valgrind-3.15.0-mpi-fix1.patch - patch --verbose -p1 < $(patchdir)/valgrind-3.15.0-mpi-fix2.patch - fi - autoreconf - ./configure --prefix=$(idir) - make -j$(numthreads) - if ! make check -j$(numthreads); then - echo; echo "Valgrind's 'make check' failed!"; echo - fi - make install + $(call gbuild, valgrind-$(valgrind-version), static) echo "Valgrind $(valgrind-version)" > $@ $(ibidir)/yaml-$(yaml-version): @@ -773,47 +935,55 @@ $(ibidir)/yaml-$(yaml-version): # manually. # # For example, Libgit2 page recommends doing a static build, especially for -# Mac systems (with `-DBUILD_SHARED_LIBS=OFF'): "It’s highly recommended +# Mac systems (with '-DBUILD_SHARED_LIBS=OFF'): "It’s highly recommended # that you build libgit2 as a static library for Xcode projects. This # simplifies distribution significantly, as the resolution of dynamic # libraries at runtime can be extremely problematic.". This is a major # problem we have been having so far with Mac systems: # https://libgit2.org/docs/guides/build-and-link -# On macOS system, `libgit2' complains about not finding `_iconv*' -# functions! But apparently `libgit2' has its own implementation of libiconv +# On macOS system, 'libgit2' complains about not finding '_iconv*' +# functions! But apparently 'libgit2' has its own implementation of libiconv # that it uses if it can't find libiconv on macOS. So, to fix this problem -# it is necessary to use the option `-DUSE_ICONV=OFF` in the configure step. +# it is necessary to use the option '-DUSE_ICONV=OFF' in the configure step. $(ibidir)/libgit2-$(libgit2-version): $(ibidir)/cmake-$(cmake-version) - tarball=libgit2-$(libgit2-version).tar.gz + tarball=libgit2-$(libgit2-version).tar.lz $(call import-source, $(libgit2-url), $(libgit2-checksum)) $(call cbuild, libgit2-$(libgit2-version), static, \ -DUSE_SSH=OFF -DBUILD_CLAR=OFF \ -DTHREADSAFE=ON -DUSE_ICONV=OFF ) if [ x$(on_mac_os) = xyes ]; then - install_name_tool -id $(ildir)/libgit2.1.0.dylib \ - $(ildir)/libgit2.1.0.dylib + install_name_tool -id $(ildir)/libgit2.1.9.dylib \ + $(ildir)/libgit2.1.9.dylib fi echo "Libgit2 $(libgit2-version)" > $@ $(ibidir)/wcslib-$(wcslib-version): $(ibidir)/cfitsio-$(cfitsio-version) - # Import the tarball. - tarball=wcslib-$(wcslib-version).tar.bz2 - $(call import-source, $(wcslib-url), $(wcslib-checksum)) - # If Fortran isn't present, don't build WCSLIB with it. - if type gfortran &> /dev/null; then fortranopt=""; - else fortranopt="--disable-fortran" - fi +# Import the tarball. + tarball=wcslib-$(wcslib-version).tar.lz + $(call import-source, $(wcslib-url), $(wcslib-checksum)) - # Build WCSLIB. +# Build WCSLIB while disabling some features: +# - Fortran is disabled because as of May 2023, on macOS systems +# where we do not install GCC (and thus a standard 'gfortran'), the +# LLVM Fortran compiler is not complete and will cause a crash. If +# you use the Fortran features of WCSLIB, feel free to remove +# '--disable-fortran', but be careful with portability on other +# systems. Hopefully some time in the future, GCC will also be +# install-able on macOS within Maneage or LLVM's 'gfortran' will +# also be complete and will not cause a crash. +# - PGPLOT is disabled because it has many manual steps in its +# installation. Therefore, we currently do not build PGPlots in +# Maneage. Once (if) it is added, we can remove '--without-pgplot'. $(call gbuild, wcslib-$(wcslib-version), , \ LIBS="-pthread -lcurl -lm" \ + --without-pgplot \ + --disable-fortran \ --with-cfitsiolib=$(ildir) \ - --with-cfitsioinc=$(idir)/include \ - --without-pgplot $$fortranopt) + --with-cfitsioinc=$(idir)/include) if [ x$(on_mac_os) = xyes ]; then - install_name_tool -id $(ildir)/libwcs.7.3.dylib \ - $(ildir)/libwcs.7.3.dylib + install_name_tool -id $(ildir)/libwcs.$(wcslib-version).dylib \ + $(ildir)/libwcs.$(wcslib-version).dylib fi echo "WCSLIB $(wcslib-version)" > $@ @@ -831,30 +1001,60 @@ $(ibidir)/wcslib-$(wcslib-version): $(ibidir)/cfitsio-$(cfitsio-version) # # Astrometry-net contains a lot of programs. We need to specify the # installation directory and the Python executable (by default it will look -# for /usr/bin/python) +# for /usr/bin/python). +# +# An optional dependency is 'netpbm' but it has many dependencies and a +# crazy build system. So, it is not in the default preprequisites. If you +# need it you can add it as a prerequisite and pray that it will work. +# +# A consequence of not having 'netpbm' is that for obtaining the +# astrometric solution of one image using 'solve-field', it is necessary to +# build a catalog of sources with image coordinates and flux (x,y,flux) in +# advance. The catalog has to be sorted by flux. Finally, when invoking +# 'solve-field', the width and heigh of the original image have to be +# provided. +# +# More explicitly, raw basic steps using Gnuastro: +# +# - Obtain a catalog with image coordinates and flux (x,y,brightness): +# $ astnoisechisel img.fits -o det.fits +# $ astsegment det.fits -o seg.fits +# $ astmkcatalog seg.fits --clumpscat --x --y --brightness -o cat-raw.fits +# +# - Sort by flux: +# $ asttable cat-raw.fits --hdu 2 --sort brightness --descending \ +# --output cat.fits +# +# - Get the x-size and y-size from the header: +# $ xsize=$(astfits img.fits --keyvalue NAXIS1 --quiet) +# $ ysize=$(astfits img.fits --keyvalue NAXIS2 --quiet) +# +# - Run 'solve-field' to obtain the astrometric solution: +# $ solve-field cat.fits --width $xsize --height $ysize [--other-parameters] $(ibidir)/astrometrynet-$(astrometrynet-version): \ $(ibidir)/gsl-$(gsl-version) \ $(ibidir)/swig-$(swig-version) \ $(ipydir)/numpy-$(numpy-version) \ $(ibidir)/cairo-$(cairo-version) \ $(ibidir)/libpng-$(libpng-version) \ - $(ibidir)/netpbm-$(netpbm-version) \ $(ibidir)/wcslib-$(wcslib-version) \ + $(ipydir)/astropy-$(astropy-version) \ $(ibidir)/cfitsio-$(cfitsio-version) \ $(ibidir)/libjpeg-$(libjpeg-version) - # Import the tarball - tarball=astrometry.net-$(astrometrynet-version).tar.gz +# Import the tarball + tarball=astrometry.net-$(astrometrynet-version).tar.lz $(call import-source, $(astrometrynet-url), $(astrometrynet-checksum)) - # We are modifying the Makefile in two steps because on Mac OS - # system we do not have `/proc/cpuinfo' nor `free'. Since this is - # only for the `report.txt', this changes do not causes problems in - # running `astrometrynet' +# We are modifying the Makefile in two steps because on Mac OS system +# we do not have '/proc/cpuinfo' nor 'free'. Since this is only for +# the 'report.txt', this changes do not causes problems in running +# 'astrometrynet' cd $(ddir) rm -rf astrometry.net-$(astrometrynet-version) - tar xf $(tdir)/$$tarball + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions cd astrometry.net-$(astrometrynet-version) + $(shsrcdir)/prep-source.sh $(ibdir) sed -e 's|cat /proc/cpuinfo|echo "Ignoring CPU info"|' \ -e 's|-free|echo "Ignoring RAM info"|' Makefile > Makefile.tmp mv Makefile.tmp Makefile @@ -874,30 +1074,26 @@ $(ibidir)/autoconf-$(autoconf-version): echo "GNU Autoconf $(autoconf-version)" > $@ $(ibidir)/automake-$(automake-version): $(ibidir)/autoconf-$(autoconf-version) - tarball=automake-$(automake-version).tar.gz + tarball=automake-$(automake-version).tar.lz $(call import-source, $(automake-url), $(automake-checksum)) $(call gbuild, automake-$(automake-version), static, ,V=1) echo "GNU Automake $(automake-version)" > $@ -$(ibidir)/bison-$(bison-version): $(ibidir)/help2man-$(help2man-version) - tarball=bison-$(bison-version).tar.lz - $(call import-source, $(bison-url), $(bison-checksum)) - $(call gbuild, bison-$(bison-version), static, ,V=1 -j$(numthreads)) - echo "GNU Bison $(bison-version)" > $@ - # cdsclient is a set of software written in c to interact with astronomical -# database servers. It is a dependency of `scamp' to be able to download +# database servers. It is a dependency of 'scamp' to be able to download # reference catalogues. -# NOTE: we do not use a convencional `gbuild' installation because the +# +# NOTE: we do not use a convencional 'gbuild' installation because the # programs are scripts and we need to touch them before installing. # Otherwise this software will be re-built each time the configure step is # invoked. $(ibidir)/cdsclient-$(cdsclient-version): - tarball=cdsclient-$(cdsclient-version).tar.gz + tarball=cdsclient-$(cdsclient-version).tar.lz $(call import-source, $(cdsclient-url), $(cdsclient-checksum)) cd $(ddir) - tar xf $(tdir)/$$tarball + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions cd cdsclient-$(cdsclient-version) + $(shsrcdir)/prep-source.sh $(ibdir) touch * ./configure --prefix=$(idir) make @@ -906,31 +1102,47 @@ $(ibidir)/cdsclient-$(cdsclient-version): rm -rf cdsclient-$(cdsclient-version) echo "cdsclient $(cdsclient-version)" > $@ -# CMake can be built with its custom `./bootstrap' script. -$(ibidir)/cmake-$(cmake-version): $(ibidir)/curl-$(curl-version) - # Import the tarball - tarball=cmake-$(cmake-version).tar.gz +# CMake can be built with its custom './bootstrap' script and has no +# dependencies beyond the basic Maneage software. +$(ibidir)/cmake-$(cmake-version): + +# Import the tarball + tarball=cmake-$(cmake-version).tar.lz $(call import-source, $(cmake-url), $(cmake-checksum)) - # After searching in `bootstrap', I couldn't find `LIBS', only - # `LDFLAGS'. So the extra libraries are being added to `LDFLAGS', - # not `LIBS'. - # - # On Mac systems, the build complains about `clang' specific - # features, so we can't use our own GCC build here. +# On Mac systems, the build complains about 'clang' specific +# features, so we can't use our own GCC build here. if [ x$(on_mac_os) = xyes ]; then export CC=clang export CXX=clang++ fi + +# CMake wants a single executable for 'MAKE', so we can't use 'make +# SHELL=$(SHELL) and we have defined this script. + export MAKE="$(makewshell)" + +# Go into the unpacked directory and prepare CMake. cd $(ddir) rm -rf cmake-$(cmake-version) - tar xf $(tdir)/$$tarball + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions cd cmake-$(cmake-version) - ./bootstrap --prefix=$(idir) --system-curl --system-zlib \ - --system-bzip2 --system-liblzma --no-qt-gui \ + $(shsrcdir)/prep-source.sh $(ibdir) + +# Bootstrap, build and install CMake: +# - With the '--no-system-libs' option, CMake builds and statically +# links all the libraries it needs. Even though some of those (like +# liblzma, libcurl, zlib or bzip2) are within Maneage, we +# discovered that CMake can get confused and use out-of-Maneage +# libraries (https://savannah.nongnu.org/bugs/?63043). + ./bootstrap --no-qt-gui \ + --prefix=$(idir) \ + --no-system-libs \ --parallel=$(numthreads) - make -j$(numthreads) LIBS="$$LIBS -lssl -lcrypto -lz" VERBOSE=1 - make install + $(makewshell) VERBOSE=1 LIBS="$$LIBS -lssl -lcrypto -lz" \ + -j$(numthreads) + $(makewshell) install + +# Clean up. cd .. rm -rf cmake-$(cmake-version) echo "CMake $(cmake-version)" > $@ @@ -938,11 +1150,15 @@ $(ibidir)/cmake-$(cmake-version): $(ibidir)/curl-$(curl-version) $(ibidir)/flex-$(flex-version): $(ibidir)/bison-$(bison-version) tarball=flex-$(flex-version).tar.lz $(call import-source, $(flex-url), $(flex-checksum)) - $(call gbuild, flex-$(flex-version), static, ,V=1 -j$(numthreads)) + $(call gbuild, flex-$(flex-version), static, \ + --with-libiconv-prefix=$(idir) \ + --with-libintl-prefix=$(idir) \ + --disable-dependency-tracking, \ + V=1 -j$(numthreads)) echo "Flex $(flex-version)" > $@ $(ibidir)/gdb-$(gdb-version): $(ibidir)/python-$(python-version) - tarball=gdb-$(gdb-version).tar.gz + tarball=gdb-$(gdb-version).tar.lz export configure_in_different_directory=1; $(call import-source, $(gdb-url), $(gdb-checksum)) $(call gbuild, gdb-$(gdb-version),,,V=1 -j$(numthreads)) @@ -956,49 +1172,54 @@ $(ibidir)/ghostscript-$(ghostscript-version): \ $(ibidir)/libtiff-$(libtiff-version) \ $(ibidir)/libpaper-$(libpaper-version) - # Download the standard collection of Ghostscript fonts. - tarball=ghostscript-fonts-std-$(ghostscript-fonts-std-version).tar.gz +# Download the standard collection of Ghostscript fonts. + tarball=ghostscript-fonts-std-$(ghostscript-fonts-std-version).tar.lz $(call import-source, $(ghostscript-fonts-std-url), \ $(ghostscript-fonts-std-checksum)) - # Download the extra GNU fonts for Ghostscript. - tarball=ghostscript-fonts-gnu-$(ghostscript-fonts-gnu-version).tar.gz +# Download the extra GNU fonts for Ghostscript. + tarball=ghostscript-fonts-gnu-$(ghostscript-fonts-gnu-version).tar.lz $(call import-source, $(ghostscript-fonts-gnu-url), \ $(ghostscript-fonts-gnu-checksum)) - # Download the tarball - tarball=ghostscript-$(ghostscript-version).tar.gz +# Download the tarball + tarball=ghostscript-$(ghostscript-version).tar.lz $(call import-source, $(ghostscript-url), $(ghostscript-checksum)) - # Unpack it and configure Ghostscript. +# Unpack it and configure Ghostscript. The option +# '-DPNG_ARM_NEON_OPT=0' prevents an arm64 'neon' library from being +# required at compile time. cd $(ddir) - tar xf $(tdir)/$$tarball + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions cd ghostscript-$(ghostscript-version) + $(shsrcdir)/prep-source.sh $(ibdir) ./configure --prefix=$(idir) \ --disable-cups \ --enable-dynamic \ - --with-system-libtiff \ - --disable-compile-inits + --disable-compile-inits \ + --disable-hidden-visibility \ + CFLAGS="-DPNG_ARM_NEON_OPT=0" \ + LDFLAGS=-Wl,--copy-dt-needed-entries - # Build and install the program and the shared libraries. +# Build and install the program and the shared libraries. make V=1 -j$(numthreads) make so V=1 -j$(numthreads) make install make soinstall - # Install headers and set PostScript (PS) headers to point there. +# Install headers and set PostScript (PS) headers to point there. install -v -m644 base/*.h $(iidir)/ghostscript ln -sfvn $(iidir)/ghostscript $(iidir)/ps - # Install the fonts. - tar -xvf $(tdir)/ghostscript-fonts-std-$(ghostscript-fonts-std-version).tar.gz \ - -C $(idir)/share/ghostscript - tar -xvf $(tdir)/ghostscript-fonts-gnu-$(ghostscript-fonts-gnu-version).tar.gz \ - -C $(idir)/share/ghostscript +# Install the fonts. + tar -xvf $(tdir)/ghostscript-fonts-std-$(ghostscript-fonts-std-version).tar.lz \ + -C $(idir)/share/ghostscript --no-same-owner --no-same-permissions + tar -xvf $(tdir)/ghostscript-fonts-gnu-$(ghostscript-fonts-gnu-version).tar.lz \ + -C $(idir)/share/ghostscript --no-same-owner --no-same-permissions fc-cache -v $(idir)/share/ghostscript/fonts/ echo; echo "Ghostscript fonts added to Fontconfig."; echo; - # Clean up and write the output target. +# Clean up and write the output target. cd .. rm -rf ghostscript-$(ghostscript-version) echo "GPL Ghostscript $(ghostscript-version)" > $@ @@ -1017,64 +1238,106 @@ $(ibidir)/gnuastro-$(gnuastro-version): \ cp $(dtexdir)/gnuastro.tex $(ictdir)/ echo "GNU Astronomy Utilities $(gnuastro-version) \citep{gnuastro}" > $@ -$(ibidir)/help2man-$(help2man-version): - tarball=help2man-$(help2man-version).tar.xz - $(call import-source, $(help2man-url), $(help2man-checksum)) - $(call gbuild, help2man-$(help2man-version), static, ,V=1) - echo "Help2man $(Help2man-version)" > $@ +$(ibidir)/icu-$(icu-version): $(ibidir)/python-$(python-version) + +# First, we need to remove any possibly existing ICU installation +# because it can cause conflicts during a new configuration +# (especially if a new version is to replace the old one). + for i in data i18n io test tu uc; do + rm -fv $(ildir)/libicu$$i.*; + done + +# Prepare the tarball, unpack, build and install ICU (some +# customizations are necessary, so we're not using 'gbuild'). + tarball=icu-$(icu-version).tar.lz + $(call import-source, $(icu-url), $(icu-checksum)) + cd $(ddir) + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions + unpackdir=icu-$(icu-version) + cd $$unpackdir + $(shsrcdir)/prep-source.sh $(ibdir) + cd icu4c/source + ./configure --enable-static --prefix=$(idir) + make -j$(numthreads) V=1 + make install + cd $(ddir) + rm -rf $$unpackdir + echo "ICU $(icu-version)" > $@ $(ibidir)/imagemagick-$(imagemagick-version): \ $(ibidir)/zlib-$(zlib-version) \ $(ibidir)/libjpeg-$(libjpeg-version) \ - $(ibidir)/libtiff-$(libtiff-version) - tarball=imagemagick-$(imagemagick-version).tar.xz + $(ibidir)/libtiff-$(libtiff-version) \ + $(ibidir)/ghostscript-$(ghostscript-version) + tarball=ImageMagick-$(imagemagick-version).tar.lz $(call import-source, $(imagemagick-url), $(imagemagick-checksum)) $(call gbuild, ImageMagick-$(imagemagick-version), static, \ - --without-x --disable-openmp, V=1 -j$(numthreads)) + --without-x \ + --with-gslib \ + --disable-openmp \ + --with-libstdc=$(ildir), \ + V=1 -j$(numthreads)) + +# On macOS, an executable and several libraries are not properly +# linked with the Ghostscript library (libgs), so we need to fix it +# manually. + if [ x$(on_mac_os) = xyes ]; then + gsversion=$$(echo $(ghostscript-version) \ + | awk -F'.' '{print $$1"."$$2}') + libMagicks=$$(ls -l $(ildir)/libMagick*.dylib \ + | awk '/^-/{print $$NF}') + libMagicks_all="$(ibdir)/magick $$libMagicks" + for f in $$libMagicks_all; do \ + install_name_tool -change libgs.dylib.$$gsversion \ + $(ildir)/libgs.dylib.$$gsversion $$f; done + fi echo "ImageMagick $(imagemagick-version)" > $@ -# `imfit' doesn't use the traditional `configure' and `make' to install -# itself. Instead of that, it uses `scons'. As a consequence, the +# 'imfit' doesn't use the traditional 'configure' and 'make' to install +# itself. Instead of that, it uses 'scons'. As a consequence, the # installation is manually done by decompressing the tarball, and running -# `scons' with the necessary flags. Despite of that, it is necessary to +# 'scons' with the necessary flags. Despite of that, it is necessary to # replace the default searching paths in this script by our installation -# paths. This is done with `sed', replacing each ocurrence of `/usr/local' -# by `$(idir)'. After that, each compiled program (`imfit', `imfit-mcmc' -# and `makeimage') is copied into the installation directory and an `rpath' +# paths. This is done with 'sed', replacing each ocurrence of '/usr/local' +# by '$(idir)'. After that, each compiled program ('imfit', 'imfit-mcmc' +# and 'makeimage') is copied into the installation directory and an 'rpath' # is added. $(ibidir)/imfit-$(imfit-version): \ $(ibidir)/gsl-$(gsl-version) \ $(ibidir)/fftw-$(fftw-version) \ $(ibidir)/scons-$(scons-version) \ $(ibidir)/cfitsio-$(cfitsio-version) + +# Prepare the source. tarball=imfit-$(imfit-version).tar.gz $(call import-source, $(imfit-url), $(imfit-checksum)) - # If the C library is in a non-standard location. +# If the C library is in a non-standard location. if ! [ x$(SYS_CPATH) = x ]; then headerpath="--header-path=$(SYS_CPATH)" fi - # Unpack and build imfit and its accompanying programs. +# Unpack and build imfit and its accompanying programs. cd $(ddir) unpackdir=imfit-$(imfit-version) rm -rf $$unpackdir - tar xf $(tdir)/$$tarball + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions cd $$unpackdir + $(shsrcdir)/prep-source.sh $(ibdir) sed -i 's|/usr/local|$(idir)|g' SConstruct sed -i 's|/usr/include|$(idir)/include|g' SConstruct sed -i 's|.append(|.insert(0,|g' SConstruct - scons --no-openmp --no-nlopt \ + scons --no-openmp --no-nlopt \ --cc=$(ibdir)/gcc --cpp=$(ibdir)/g++ \ --header-path=$(idir)/include $$headerpath \ --lib-path=$(idir)/lib imfit cp imfit $(ibdir) - scons --no-openmp --no-nlopt \ + scons --no-openmp --no-nlopt \ --cc=$(ibdir)/gcc --cpp=$(ibdir)/g++ \ --header-path=$(idir)/include $$headerpath \ --lib-path=$(idir)/lib imfit-mcmc cp imfit-mcmc $(ibdir) - scons --no-openmp --no-nlopt \ + scons --no-openmp --no-nlopt \ --cc=$(ibdir)/gcc --cpp=$(ibdir)/g++ \ --header-path=$(idir)/include $$headerpath \ --lib-path=$(idir)/lib makeimage @@ -1086,6 +1349,8 @@ $(ibidir)/imfit-$(imfit-version): \ done fi cp $(dtexdir)/imfit.tex $(ictdir)/ + cd .. + rm -rf $$unpackdir echo "Imfit $(imfit-version) \citep{imfit2015}" > $@ # Minizip 1.x is actually distributed within zlib. It doesn't have its own @@ -1098,14 +1363,16 @@ $(ibidir)/imfit-$(imfit-version): \ # About deleting the final crypt.h file after installation, see # https://bugzilla.redhat.com/show_bug.cgi?id=1424609 $(ibidir)/minizip-$(minizip-version): $(ibidir)/automake-$(automake-version) - tarball=zlib-$(zlib-version).tar.gz - $(call import-source, $(minizip-url), $(minizip-checksum)) + tarball=zlib-$(zlib-version).tar.lz + $(call import-source, $(zlib-url), $(zlib-checksum)) cd $(ddir) unpackdir=minizip-$(minizip-version) rm -rf $$unpackdir mkdir $$unpackdir - tar xf $(tdir)/$$tarball -C$$unpackdir --strip-components=1 + tar -xf $(tdir)/$$tarball -C$$unpackdir --strip-components=1 \ + --no-same-owner --no-same-permissions cd $$unpackdir + $(shsrcdir)/prep-source.sh $(ibdir) ./configure --prefix=$(idir) make cd contrib/minizip @@ -1138,29 +1405,50 @@ $(ibidir)/missfits-$(missfits-version): cp $(dtexdir)/missfits.tex $(ictdir)/ echo "MissFITS $(missfits-version) \citep{missfits}" > $@ -# Netpbm is a prerequisite of Astrometry-net, it contains a lot of programs. -# This program has a crazy dialogue installation which is override using the -# printf statment. Each `\n' is a new question that the installation process -# ask to the user. We give all answers with a pipe to the scripts (configure -# and install). The questions are different depending on the system (tested -# on GNU/Linux and Mac OS). +# Netpbm is an optional prerequisite of Astrometry-net, it contains a lot +# of programs. This program has a crazy dialogue installation which is +# override using the printf statment. Each '\n' is a new question that the +# installation process ask to the user. We give all answers with a pipe to +# the scripts (configure and install). The questions are different +# depending on the system (tested on GNU/Linux and Mac OS). $(ibidir)/netpbm-$(netpbm-version): \ + $(ibidir)/flex-$(flex-version) \ $(ibidir)/libpng-$(libpng-version) \ + $(ibidir)/libx11-$(libx11-version) \ $(ibidir)/libjpeg-$(libjpeg-version) \ $(ibidir)/libtiff-$(libtiff-version) \ $(ibidir)/libxml2-$(libxml2-version) - tarball=netpbm-$(netpbm-version).tar.gz + tarball=netpbm-$(netpbm-version).tar.lz $(call import-source, $(netpbm-url), $(netpbm-checksum)) + +# Answers to the configuration questions. if [ x$(on_mac_os) = xyes ]; then answers='\n\n$(ildir)\n\n\n\n\n\n$(ildir)/include\n\n$(ildir)/include\n\n$(ildir)/include\nnone\n\n' else answers='\n\n\n\n\n\n\n\n\n\n\n\n\nnone\n\n\n' fi + +# Go into the temporary directory and unpack the tarball. cd $(ddir) unpackdir=netpbm-$(netpbm-version) rm -rf $$unpackdir - tar xf $(tdir)/$$tarball + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions cd $$unpackdir + $(shsrcdir)/prep-source.sh $(ibdir) + +# As of NetPBM 10.73.39 and Flex 2.6.4-410-74a89fd (commit 74a89fd in +# Flex's Git that is 410 commits after version 2.6.4), there is the +# following line: 'if (0) yyunput(0, NULL);'. It will cause a crash +# and is just to avoid compiler warnings! So we are setting the +# 'yyunput(0, NULL);' to the redundant 'yyunput(0, NULL);' to let the +# compilation finish! + awk '$$1=="if" && $$2=="(0)"{inif=1} \ + {if(inif==1 && $$1=="yyunput(0,") \ + print "{int a=1;}"; else print $$0}' \ + converter/pbm/thinkjettopbm.l > thinkjettopbm.tmp + mv thinkjettopbm.tmp converter/pbm/thinkjettopbm.l + +# Pass the answers to the configure script then build and install it. printf "$$answers" | ./configure make rm -rf $(ddir)/$$unpackdir/install @@ -1189,60 +1477,43 @@ $(ibidir)/pcre-$(pcre-version): , V=1 -j$(numthreads)) echo "Perl Compatible Regular Expressions $(pcre-version)" > $@ -# Comment on building R without GUI support ('--without-tcltlk') +# On macOS 12.3 Monterey with AppleClang 13.1.6.13160021, Plplot 5.15.0 +# needs the 'finite' function of 'math.h' which has been deprecated in +# macOS. By manually adding "#define finite isfinite" in 'math.h' like +# below, we fixed this problem but still it can't find 'exit' during the +# configuration phase so we stopped trying to port it to macOS. It means +# that on macOS Plplot is not available. For other OSs it should be fine. +# On macOS, the file 'tmath.h' can be found with 'xcrun --show-sdk-path'. # -# Tcl/Tk are a set of tools to provide Graphic User Interface (GUI) support -# in some software. But they are not yet natively built within Maneage, -# primarily because we have higher-priority work right now (if anyone is -# interested, they can ofcourse contribute!). GUI tools in general aren't -# high on our priority list right now because they are generally good for -# human interaction (which is contrary to the reproducible philosophy: -# there will always be human-error and frustration, for example in GUI -# tools the best level of reproducibility is statements like this: "move -# your mouse to button XXX, then click on menu YYY and etc"). A robust -# reproducible solution must be done automatically. -# -# If someone wants to use R's GUI functionalities while investigating for -# their analysis, they can do the GUI part on their host OS -# implementation. Later, they can bring the finalized source into Maneage -# to be automatically run in Maneage. This will also be the recommended way -# to deal with GUI tools later when we do install them within Maneage. -$(ibidir)/R-$(R-version): \ - $(ibidir)/pcre-$(pcre-version) \ - $(ibidir)/cairo-$(cairo-version) \ - $(ibidir)/libpng-$(libpng-version) \ - $(ibidir)/libjpeg-$(libjpeg-version) \ - $(ibidir)/libtiff-$(libtiff-version) \ - $(ibidir)/libpaper-$(libpaper-version) - tarball=R-$(R-version).tar.gz - $(call import-source, $(R-url), $(R-checksum)) - cd $(ddir) - tar xf $(tdir)/$$tarball - cd R-$(R-version) - - # We need to manually remove the lines with '~autodetect~', they - # cause the configure script to crash in version 4.0.2. They are - # used in relation to Java, and we don't use Java anyway. - sed -i -e '/\~autodetect\~/ s/^/#/g' configure - export R_SHELL=$(SHELL) - ./configure --prefix=$(idir) \ - --without-x \ - --with-pcre1 \ - --disable-java \ - --with-readline \ - --without-tcltk \ - --disable-openmp - make -j$(numthreads) - make install - cd .. - rm -rf R-$(R-version) - echo "R $(R-version)" > $@ +# cp /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/usr/include/math.h \ +# $(iidir)/math.h +# awk '{if($$0 ~ /#define isinf\(x\)/) {print "#define finite isfinite"; print $$0} else print $$0 }' \ +# $(iidir)/math.h > $(iidir)/math-tmp.h +# mv $(iidir)/math-tmp.h $(iidir)/math.h +$(ibidir)/plplot-$(plplot-version): \ + $(ibidir)/cairo-$(cairo-version) \ + $(ibidir)/freetype-$(freetype-version) + tarball=plplot-$(plplot-version).tar.lz + $(call import-source, $(plplot-url), $(plplot-checksum)) + $(call cbuild, plplot-$(plplot-version), static, \ + -DDEFAULT_NO_BINDINGS:BOOL=ON \ + -DENABLE_cxx:BOOL=ON \ + -DENABLE_fortran:BOOL=ON \ + -DDEFAULT_NO_QT_DEVICES:BOOL=ON) + echo "PLplot $(pcre-version)" > $@ # SCAMP documentation says ATLAS is a mandatory prerequisite for using # SCAMP. We have ATLAS into the project but there are some problems with the # libraries that are not yet solved. However, we tried to install it with # the option --enable-openblas and it worked (same issue happened with -# `sextractor'. +# 'sextractor'. +# +# Plplot in SCAMP cannot be built for macOS. See comments on top of Plplot. +# But if you are not using macOS and want to enable plots, follow the +# steps below: +# +# 1. Add '$(ibidir)/plplot-$(plplot-version)' to the prerequisites +# 2. Remove the option '--enable-plplot=no' from the call of 'gbuild' $(ibidir)/scamp-$(scamp-version): \ $(ibidir)/fftw-$(fftw-version) \ $(ibidir)/openblas-$(openblas-version) \ @@ -1250,7 +1521,7 @@ $(ibidir)/scamp-$(scamp-version): \ tarball=scamp-$(scamp-version).tar.lz $(call import-source, $(scamp-url), $(scamp-checksum)) - # See comment above 'missfits' for '-fcommon'. +# See comment above 'missfits' for '-fcommon'. $(call gbuild, scamp-$(scamp-version), static, \ CFLAGS="-fcommon" \ --enable-threads \ @@ -1263,48 +1534,109 @@ $(ibidir)/scamp-$(scamp-version): \ cp $(dtexdir)/scamp.tex $(ictdir)/ echo "SCAMP $(scamp-version) \citep{scamp}" > $@ -# Since `scons' doesn't use the traditional GNU installation with -# `configure' and `make' it is installed manually using `python'. +# Since 'scons' doesn't use the traditional GNU installation with 'configure' +# and 'make' it is installed manually using 'python'. After 'scons' is +# installed, there is a file, '$(ildir)/scons/SCons/Platform/posix.py', that +# contains several hard coded paths like '/usr/local'. This is bad because it +# causes later problems in the installation of other programs (e.g., 'imfit'). +# As a consequence, at the end of the installation we replace the +# '/usr/local' by the one Maneage uses: '$(idir)'. Only one of these paths +# is replaced, the first one: '/usr/local'. $(ibidir)/scons-$(scons-version): $(ibidir)/python-$(python-version) + +# Prepare the tarball tarball=scons-$(scons-version).tar.gz $(call import-source, $(scons-url), $(scons-checksum)) + +# Unpack and enter the source directory cd $(ddir) unpackdir=scons-$(scons-version) rm -rf $$unpackdir - tar xf $(tdir)/$$tarball + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions cd $$unpackdir + $(shsrcdir)/prep-source.sh $(ibdir) + +# Unfortuantely SCons hard-codes its search PATH in its source (to +# use POSIX operating system defaults)! So the only way to modify it +# is to edit the source manually! Instead of using SED to replace a +# fixed string, we are using AWK to replace the value. This is done +# because in future versions, they may suddenly change the order, and +# the fixed string won't match with SED. But with AWK, we can be +# fully ignorant to their default value, and just change the value of +# the known variable. Some technical notes: +# - In the shell, the single quote is used to separate AWK's +# environment from the shell, we are using '\47' instead of the +# single quote. +# - In Python (the source we are editing) indentation is +# meaningful, but SPACE is a delimiter in AWK and AWK will +# remove leading/trailing SPACE from its values. So we'll +# manually inseart the necessary number of spaces before the +# modified line. + awk '{ if($$1=="env[\47ENV\47][\47PATH\47]") \ + {$$3="\47'$(ibdir)'\47"; print " "$$0} \ + else print}' engine/SCons/Platform/posix.py > posix-edited.py + mv posix-edited.py engine/SCons/Platform/posix.py + +# Install SCons python setup.py install + cd .. + rm -rf $$unpackdir echo "SCons $(scons-version)" > $@ # Sextractor crashes complaining about not linking with some ATLAS -# libraries. But we can override this issue since we have Openblas +# libraries. But we can override this issue since we have OpenBLAS # installed, it is just necessary to explicity tell sextractor to use it in # the configuration step. -# -# The '-fcommon' is a necessary C compilation flag for GCC 10 and above. It -# is necessary for astromatic libraries, otherwise their build will crash. $(ibidir)/sextractor-$(sextractor-version): \ $(ibidir)/fftw-$(fftw-version) \ $(ibidir)/openblas-$(openblas-version) +# Import the source. tarball=sextractor-$(sextractor-version).tar.lz $(call import-source, $(sextractor-url), $(sextractor-checksum)) - # See comment above 'missfits' for '-fcommon'. - $(call gbuild, sextractor-$(sextractor-version), static, \ - CFLAGS="-fcommon" \ - --enable-threads \ - --enable-openblas \ - --with-openblas-libdir=$(ildir) \ - --with-openblas-incdir=$(idir)/include) +# Unpack the tarball and enter the directory. + unpackdir=sextractor-$(sextractor-version) + cd $(ddir) + rm -rf $$unpackdir + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions + cd $$unpackdir + +# See comment above 'missfits' for '-fcommon'. + ./configure --prefix="$(idir)" \ + CFLAGS="-fcommon" \ + --enable-threads \ + --enable-openblas \ + --libdir=$(ildir) \ + --includedir=$(iidir) \ + --with-openblas-libdir=$(ildir) \ + --with-openblas-incdir=$(iidir) + +# On macOS we need to manually change 'finite' to 'isfinite' in the +# header file 'src/levmar/compiler.h'. Until this problem is +# hopefully fixed in next releases, we are doing it manually using +# 'sed'. Consequently we are not installing it using 'gbuild'. Once +# this is fixed upstream, we can use the standard 'gbuild'. + sed -i -e's|define LM_FINITE finite |define LM_FINITE isfinite |' \ + src/levmar/compiler.h + +# Build, install and delete the temporary files. + make V=1 + make install + cd .. + rm -rf $$unpackdir + +# Make links for other possibly used names, copy citation and build +# the final target. ln -fs $(ibdir)/sex $(ibdir)/sextractor + ln -fs $(ibdir)/sex $(ibdir)/source-extractor cp $(dtexdir)/sextractor.tex $(ictdir)/ echo "SExtractor $(sextractor-version) \citep{sextractor}" > $@ $(ibidir)/swarp-$(swarp-version): $(ibidir)/fftw-$(fftw-version) - tarball=swarp-$(swarp-version).tar.gz + tarball=swarp-$(swarp-version).tar.lz $(call import-source, $(swarp-url), $(swarp-checksum)) - # See comment above 'missfits' for '-fcommon'. +# See comment above 'missfits' for '-fcommon'. $(call gbuild, swarp-$(swarp-version), static, \ CFLAGS="-fcommon" \ --enable-threads) @@ -1312,10 +1644,11 @@ $(ibidir)/swarp-$(swarp-version): $(ibidir)/fftw-$(fftw-version) echo "SWarp $(swarp-version) \citep{swarp}" > $@ $(ibidir)/swig-$(swig-version): - # Option --without-pcre was a suggestion once the configure step - # was tried and it failed. It was not recommended but it works! - # pcr is a dependency of swig - tarball=swig-$(swig-version).tar.gz + +# Option --without-pcre was a suggestion once the configure step was +# tried and it failed. It was not recommended but it works! pcr is a +# dependency of swig + tarball=swig-$(swig-version).tar.lz $(call import-source, $(swig-url), $(swig-checksum)) $(call gbuild, swig-$(swig-version), static, \ --without-pcre --without-tcl) @@ -1331,6 +1664,14 @@ $(ibidir)/swig-$(swig-version): # --disable-mount # --disable-wall # --disable-su +# Because they fail on older kernels (tested on Linux 2.6.32) +# and they need root (to actually use; so are not relevant to +# Maneage): +# --disable-swapon +# --disable-unshare +# --disable-libmount +# --disable-mountpoint +# --enable-libmount-support-mtab # # NOTE ON INSTALLATION DIRECTORY: Util-linux libraries are relatively # low-level and may cause conflicts with system libraries (especilly when @@ -1345,44 +1686,79 @@ $(ibidir)/swig-$(swig-version): # '$(ibdir)'. If any program does need 'util-linux' libraries, they can # simply add the proper directories to the environment variables, see # 'fontconfig' for example. -$(ibidir)/util-linux-$(util-linux-version): +$(ibidir)/util-linux-$(util-linux-version): \ + $(ibidir)/autoconf-$(autoconf-version) \ + $(ibidir)/automake-$(automake-version) \ + | $(idircustom) - # Import the source. - tarball=util-linux-$(util-linux-version).tar.xz +# Import the source. + tarball=util-linux-$(util-linux-version).tar.lz $(call import-source, $(util-linux-url), $(util-linux-checksum)) - # Unpack the source and set it to install in a special directory - # (as explained above). As shown below, later, we'll put a symbolic - # link of all the necessary binaries in the main '$(idir)/bin'. +# Unpack the source and set it to install in a special directory (as +# explained above). As shown below, later, we'll put a symbolic link +# of all the necessary binaries in the main '$(idir)/bin'. cd $(ddir) - tar xf $(tdir)/$$tarball + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions cd util-linux-$(util-linux-version) - ./configure --prefix=$(idir)/util-linux \ + $(shsrcdir)/prep-source.sh $(ibdir) + +# If a patch is necessary, apply it. + if [ -f $(patchdir)/util-linux-$(util-linux-version)-macos.patch ]; then + cp $(patchdir)/util-linux-$(util-linux-version)-macos.patch \ + util-linux-$(util-linux-version)-macos.patch + git apply util-linux-$(util-linux-version)-macos.patch + fi + +# The 'mkswap' feature needs low-level file system and kernel headers +# that are not always available (in particular on older Linux +# kernels). Also, creating SWAP space will need root permissions, so +# its not something a Maneager may need! Unfortunately there is no +# configuration option to disable this so we'll have to disable it +# manually by commenting the relevant files in the +# 'configure.ac'. + sed -e's|UL_BUILD_INIT(\[mkswap\], \[yes\])|UL_BUILD_INIT(\[mkswap\], \[no\])|' \ + -i configure.ac + +# Having updated 'configure.ac', we need to re-generate the +# './configure' script with 'autoreconf' (which is part of Autoconf +# and needs Automake; hence why they are dependencies. + autoreconf -f + +# Configure Util-linux + export CONFIG_SHELL=$(ibdir)/bash + ./configure --prefix=$(idircustom)/util-linux \ --disable-dependency-tracking \ + --enable-libmount-support-mtab \ --disable-silent-rules \ + --disable-liblastlog2 \ + --disable-mountpoint \ + --disable-libmount \ + --disable-unshare \ --without-systemd \ --enable-libuuid \ + --disable-swapon \ --disable-mount \ --disable-ipcrm \ --disable-ipcs \ --disable-wall \ --disable-su - # Build and install it. +# Build and install it. make V=1 -j$(numthreads) make install - # Put a symbolic link to installed programs in main installation - # directory. If 'sbin' exists in the main installation directory, - # put util-linux's 'sbin/' directory there too. - ln -sf $(idir)/util-linux/bin/* $(ibdir)/ +# Put a symbolic link to installed programs in main installation +# directory. If 'sbin' exists in the main installation directory, put +# util-linux's 'sbin/' directory there too. + ln -sf $(idircustom)/util-linux/bin/* $(ibdir)/ if [ -d $(idir)/sbin ]; then - ln -sf $(idir)/util-linux/sbin/* $(idir)/sbin + ln -sf $(idircustom)/util-linux/sbin/* $(idir)/sbin else - ln -sf $(idir)/util-linux/sbin/* $(idir)/bin + ln -sf $(idircustom)/util-linux/sbin/* $(idir)/bin fi - # Clean up and write the main target. +# Clean up and write the main target. cd ../ rm -rf util-linux-$(util-linux-version) echo "util-Linux $(util-linux-version)" > $@ @@ -1425,12 +1801,13 @@ $(ibidir)/xlsxio-$(xlsxio-version): \ # useful in projects during its development, for more see the comment above # GNU Emacs. $(ibidir)/vim-$(vim-version): - tarball=vim-$(vim-version).tar.bz2 + tarball=vim-$(vim-version).tar.lz $(call import-source, $(vim-url), $(vim-checksum)) cd $(ddir) - tar xf $(tdir)/$$tarball - n=$$(echo $(vim-version) | sed -e's|\.||') - cd $(ddir)/vim$$n + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions + unpackdir=vim-$(vim-version) + cd $(ddir)/$$unpackdir + $(shsrcdir)/prep-source.sh $(ibdir) ./configure --prefix=$(idir) \ --disable-canberra \ --enable-multibyte \ @@ -1443,9 +1820,29 @@ $(ibidir)/vim-$(vim-version): make -j$(numthreads) make install cd .. - rm -rf vim$$n + rm -rf $$unpackdir echo "VIM $(vim-version)" > $@ +$(ibidir)/unzip-$(unzip-version): $(ibidir)/gzip-$(gzip-version) + tarball=unzip-$(unzip-version).tar.lz + $(call import-source, $(unzip-url), $(unzip-checksum)) + $(call gbuild, unzip-$(unzip-version), static,, \ + -f unix/Makefile generic \ + CFLAGS="-DBIG_MEM -DMMAP",,pwd, \ + -f unix/Makefile generic \ + BINDIR=$(ibdir) MANDIR=$(idir)/man/man1 ) + echo "Unzip $(unzip-version)" > $@ + +$(ibidir)/zip-$(zip-version): $(ibidir)/gzip-$(gzip-version) + tarball=zip-$(zip-version).tar.lz + $(call import-source, $(zip-url), $(zip-checksum)) + $(call gbuild, zip-$(zip-version), static,, \ + -f unix/Makefile generic \ + CFLAGS="-DBIG_MEM -DMMAP",,pwd, \ + -f unix/Makefile generic \ + BINDIR=$(ibdir) MANDIR=$(idir)/man/man1 ) + echo "Zip $(zip-version)" > $@ + @@ -1455,7 +1852,7 @@ $(ibidir)/vim-$(vim-version): # hard to track for Make (as a target). Also, TeX in general is optional # for the project (the processing is the main target, not the generation of # the final PDF). So we'll make a simple ASCII file called -# `texlive-ready-tlmgr' and use its contents to mark if we can use it or +# 'texlive-ready-tlmgr' and use its contents to mark if we can use it or # not. # # TeX Live mirror @@ -1463,8 +1860,8 @@ $(ibidir)/vim-$(vim-version): # # The automatic mirror finding fails sometimes. So we'll manually set it to # use a fixed mirror. I first tried the LaTeX root webpage -# (`ftp.dante.de'), however, it is far too slow (when I tested it). The -# `rit.edu' server seems to be a good alternative (given the importance of +# ('ftp.dante.de'), however, it is far too slow (when I tested it). The +# 'rit.edu' server seems to be a good alternative (given the importance of # NY on the internet infrastructure). texlive-url=http://mirrors.rit.edu/CTAN/systems/texlive/tlnet $(itidir)/texlive-ready-tlmgr: reproduce/software/config/texlive.conf @@ -1472,92 +1869,103 @@ $(itidir)/texlive-ready-tlmgr: reproduce/software/config/texlive.conf tarball=install-tl-unx.tar.gz $(call import-source, $(texlive-url), NO-CHECK-SUM) - # Unpack, enter the directory, and install based on the given - # configuration (prerequisite of this rule). +# Unpack, enter the directory, and install based on the given +# configuration (prerequisite of this rule). @topdir=$$(pwd) cd $(ddir) rm -rf install-tl-* - tar xf $(tdir)/install-tl-unx.tar.gz + tar -xf $(tdir)/install-tl-unx.tar.gz \ + --no-same-owner --no-same-permissions cd install-tl-* + $(shsrcdir)/prep-source.sh $(ibdir) sed -e's|@installdir[@]|$(idir)|g' \ "$$topdir"/reproduce/software/config/texlive.conf \ > texlive.conf - # TeX Live's installation may fail due to any reason. But TeX Live - # is optional (only necessary for building the final PDF). So we - # don't want the configure script to fail if it can't run. - # Possible error messages will be saved into `log.txt' and if it - # fails, 'log.txt' will be checked to see if the error is due to - # the different version of the current tarball and the TeXLive - # server or something else. - # - # The problem with versions is this: each installer tarball (that - # is downloaded and a user may backup) is for a specific version of - # TeXLive (specified by year, usually around April). So if a user - # has an old tarball, but the CTAN server has been updated, the - # script will fail with a message like this: - # - # ============================================================= - # ./install-tl: The TeX Live versions of the local installation - # and the repository being accessed are not compatible: - # local: 2019 - # repository: 2020 - # Perhaps you need to use a different CTAN mirror? - # (For more, see the output of install-tl --help, especially the - # -repository option. Online via https://tug.org/texlive/doc.) - # ============================================================= - # - # To address this problem, when this happens, we simply download a - # the most recent tarball, and if it succeeds, we will build - # TeXLive using that. The old tarball will be preserved, but will - # have an '-OLD' suffix after it. +# We do not build TeXLive from source and for its installation it +# downloads components from the web internally; and those components +# can use '/bin/sh' (which will need '$(sys_library_sh_path)'). + export LD_LIBRARY_PATH="$(sys_library_sh_path):$$LD_LIBRARY_PATH" + +# TeX Live's installation may fail due to any reason. But TeX Live is +# optional (only necessary for building the final PDF). So we don't +# want the configure script to fail if it can't run. Possible error +# messages will be saved into 'log.txt' and if it fails, 'log.txt' +# will be checked to see if the error is due to the different version +# of the current tarball and the TeXLive server or something else. +# +# The problem with versions is this: each installer tarball (that is +# downloaded and a user may backup) is for a specific version of +# TeXLive (specified by year, usually around April). So if a user has +# an old tarball, but the CTAN server has been updated, the script +# will fail with a message like this: +# +# ============================================================= +# ./install-tl: The TeX Live versions of the local installation +# and the repository being accessed are not compatible: +# local: 2019 +# repository: 2020 +# Perhaps you need to use a different CTAN mirror? +# (For more, see the output of install-tl --help, especially the +# -repository option. Online via https://tug.org/texlive/doc.) +# ============================================================= +# +# To address this problem, when this happens, we simply download a +# the most recent tarball, and if it succeeds, we will build TeXLive +# using that. The old tarball will be preserved, but will have an +# '-OLD' suffix after it. if ./install-tl --profile=texlive.conf -repository \ $(texlive-url) 2> log.txt; then - # Put a symbolic link of the TeX Live executables in `ibdir' to - # avoid all the complexities of its sub-directories and additions - # to PATH. +# Put a symbolic link of the TeX Live executables in 'ibdir' to +# avoid all the complexities of its sub-directories and additions +# to PATH. ln -fs $(idir)/texlive/maneage/bin/*/* $(ibdir)/ - # Register that the build was successful. +# Register that the build was successful. echo "TeX Live is ready." > $@ - # The build failed! +# The build failed! else - # Print on the command line the error messages during the - # installation. +# Print on the command line the error messages during the +# installation. cat log.txt - # Look for words `repository:' and `local:' in `log.txt' and make - # sure that two lines are returned. Note that we need to check - # for two lines because one of them may exist, but another may - # not (in this case, its not a version conflict scenario). +# Look for words 'repository:' and 'local:' in 'log.txt' and make +# sure that two lines are returned. Note that we need to check for +# two lines because one of them may exist, but another may not (in +# this case, its not a version conflict scenario). version_check=$$(grep -w 'repository:\|local:' log.txt | wc -l) - # If these words exists and two lines are found, there is a - # conflict with the main TeXLive version in the tarball and on - # the server. So it is necessary to move the old tarball and - # download the new one to install it. +# If these words exists and two lines are found, there is a +# conflict with the main TeXLive version in the tarball and on the +# server. So it is necessary to move the old tarball and download +# the new one to install it. if [ x"$$version_check" = x2 ]; then - # Go back to the top project directory, don't remove the - # tarball, just rename it. + +# Go back to the top project directory, don't remove the tarball, +# just rename it. cd $$topdir mv $(tdir)/install-tl-unx.tar.gz $(tdir)/install-tl-unx-OLD.tar.gz - # Download using the script specially defined for this job. If - # the download of new tarball success, install it (same lines - # than above). If not, record the fail into the target. +# Download using the script specially defined for this job. If +# the download of new tarball success, install it (same lines +# than above). If not, record the fail into the target. url=http://mirror.ctan.org/systems/texlive/tlnet tarballurl=$$url/install-tl-unx.tar.gz touch $(lockdir)/download downloader="wget --no-use-server-timestamps -O" - if $(downloadwrapper) "$$downloader" $(lockdir)/download \ - $$tarballurl "$(tdir)/install-tl-unx.tar.gz" \ - "$(backupservers)"; then + if $(downloadwrapper) "$$downloader" \ + $(lockdir)/download \ + $$tarballurl \ + "$(tdir)/install-tl-unx.tar.gz" \ + "$(backupservers)"; then cd $(ddir) rm -rf install-tl-* - tar xf $(tdir)/install-tl-unx.tar.gz + tar -xf $(tdir)/install-tl-unx.tar.gz \ + --no-same-owner --no-same-permissions cd install-tl-* + $(shsrcdir)/prep-source.sh $(ibdir) sed -e's|@installdir[@]|$(idir)|g' \ $$topdir/reproduce/software/config/texlive.conf \ > texlive.conf @@ -1576,7 +1984,7 @@ $(itidir)/texlive-ready-tlmgr: reproduce/software/config/texlive.conf fi fi - # Clean up +# Clean up cd .. rm -rf install-tl-* @@ -1588,51 +1996,72 @@ $(itidir)/texlive-ready-tlmgr: reproduce/software/config/texlive.conf # Live itself (only very basic TeX and LaTeX) and the installation of its # necessary packages into two packages. # -# Note that Biber needs to link with libraries like libnsl. However, we -# don't currently build biber from source. So we can't choose the library -# version. But we have the source and build instructions for the `nsl' -# library. When we later build biber from source, we can easily use them. +# Note that we do not build the TeXLive executables (like Biber) from +# source. So in case they need special libraries, we can't choose the +# library version here (for example see [1] and [2]). In such cases there +# is no solution but to manually add the location necessary library to +# LD_LIBRARY_PATH when calling the respective LaTeX command in +# 'reproduce/analysis/make/paper.mk'. Fortunately as of Biber 2.20, it does +# not depend on anything except the C library (all dependencies are now +# statically linked), so problems [1] and [2] will not happen. But this can +# generally happen for any other tool/OS, so it is important to build +# TeXLive from source as soon as possible [3]. +# [1] https://github.com/plk/biber/issues/445 +# [2] https://savannah.nongnu.org/bugs/index.php?63175 +# [3] https://savannah.nongnu.org/task/?15267 $(itidir)/texlive: reproduce/software/config/texlive-packages.conf \ $(itidir)/texlive-ready-tlmgr - # To work with TeX live installation, we'll need the internet. +# To work with TeX live installation, we'll need the internet. @res=$$(cat $(itidir)/texlive-ready-tlmgr) if [ x"$$res" = x"NOT!" ]; then echo "" > $@ else - # To update itself, tlmgr needs a backup directory. + +# We do not build TeXLive from source and for its installation it +# downloads components from the web internally; and those +# components can use '/bin/sh' (which needs 'sys_library_sh_path'). + export LD_LIBRARY_PATH="$(sys_library_sh_path)" + +# To update itself, tlmgr needs a backup directory. backupdir=$(idir)/texlive/backups mkdir -p $$backupdir - # Before checking LaTeX packages, update tlmgr itself. +# Before checking LaTeX packages, update tlmgr itself. tlmgr option backupdir $$backupdir tlmgr -repository $(texlive-url) update --self - # Install all the extra necessary packages. If LaTeX complains - # about not finding a command/file/what-ever/XXXXXX, simply run - # the following command to find which package its in, then add it - # to the `texlive-packages' variable of the first prerequisite. - # - # ./.local/bin/tlmgr info XXXXXX - # - # We are putting a notice, because if there is no internet, - # `tlmgr' just hangs waiting. +# Install all the extra necessary packages. If LaTeX complains +# about not finding a command/file/what-ever/XXXXXX, simply run the +# following command to find which package its in, then add it to +# the 'texlive-packages' variable of the first prerequisite. +# +# ./.local/bin/tlmgr info XXXXXX +# +# We are putting a notice, because if there is no internet, 'tlmgr' +# just hangs waiting. tlmgr install $(texlive-packages) - # Make a symbolic link of all the TeX Live executables in the bin - # directory so we don't have to modify `PATH'. +# Make a symbolic link of all the TeX Live executables in the bin +# directory so we don't have to modify 'PATH'. ln -fs $(idir)/texlive/maneage/bin/*/* $(ibdir)/ - # Get all the necessary versions. +# Correct any reference to '/bin/sh' within the installed LaTeX +# files (this is because we do no yet install LaTeX from source): + cdir=$$(pwd) + cd $(idir)/texlive + cd $$cdir + +# Get all the necessary versions. texlive=$$(pdflatex --version \ | awk 'NR==1' \ | sed 's/.*(\(.*\))/\1/' \ | awk '{print $$NF}'); - # Package names and versions. Note that all TeXLive packages - # don't have a version unfortunately! So we need to also read the - # `revision' and `cat-date' elements and print them incase - # version isn't available. +# Package names and versions. Note that all TeXLive packages +# don't have a version unfortunately! So we need to also read the +# 'revision' and 'cat-date' elements and print them incase +# version isn't available. tlmgr info $(texlive-packages) --only-installed | awk \ '$$1=="package:" { \ if(name!=0) \ diff --git a/reproduce/software/make/python.mk b/reproduce/software/make/python.mk index 18c68de..c994e3f 100644 --- a/reproduce/software/make/python.mk +++ b/reproduce/software/make/python.mk @@ -4,7 +4,7 @@ # !!!!! IMPORTANT NOTES !!!!! # # This Makefile will be loaded into 'high-level.mk', which is called by the -# `./project configure' script. It is not included into the project +# './project configure' script. It is not included into the project # afterwards. # # This Makefile contains instructions to build all the Python-related @@ -12,8 +12,8 @@ # # ------------------------------------------------------------------------ # -# Copyright (C) 2019-2021 Raul Infante-Sainz <infantesainz@gmail.com> -# Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2019-2025 Raul Infante-Sainz <infantesainz@gmail.com> +# Copyright (C) 2019-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> # # This Makefile is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -35,7 +35,7 @@ # Python enviroment # ----------------- # -# The main Python environment variable is `PYTHONPATH'. However, so far we +# The main Python environment variable is 'PYTHONPATH'. However, so far we # have found several other Python-related environment variables on some # systems which might interfere. To be safe, we are removing all their # values. @@ -51,6 +51,8 @@ export MPI_PYTHON3_SITEARCH := # Python-specific installation directories. python-major-version = $(shell echo $(python-version) | awk 'BEGIN{FS="."} \ {printf "%d.%d\n", $$1, $$2}') +# This is for 'installer' (the python installer) itself: +ilibpymajorversion = $(idir)/lib/python$(python-major-version) @@ -62,19 +64,20 @@ python-major-version = $(shell echo $(python-version) | awk 'BEGIN{FS="."} \ # While this Makefile is for Python programs, in some cases, we need # certain programs (like Python itself), or libraries for the modules. $(ibidir)/libffi-$(libffi-version): - # Prepare the source. - tarball=libffi-$(libffi-version).tar.gz + +# Prepare the source. + tarball=libffi-$(libffi-version).tar.lz $(call import-source, $(libffi-url), $(libffi-checksum)) - # Build libffi. +# Build libffi. $(call gbuild, libffi-$(libffi-version), , \ CFLAGS="-DNO_JAVA_RAW_API=1") - # On some Fedora systems, libffi installs in `lib64', not - # `lib'. This will cause problems when building setuptools - # later. To fix this problem, we'll first check if this has indeed - # happened (it exists under `lib64', but not under `lib'). If so, - # we'll put a copy of the installed libffi libraries in `lib'. +# On some Fedora systems, libffi installs in 'lib64', not 'lib'. This +# will cause problems when building setuptools later. To fix this +# problem, we'll first check if this has indeed happened (it exists +# under 'lib64', but not under 'lib'). If so, we'll put a copy of the +# installed libffi libraries in 'lib'. if [ -f $(idir)/lib64/libffi.a ] && ! [ -f $(idir)/lib/libffi.a ]; then cp $(idir)/lib64/libffi* $(ildir)/ fi @@ -82,23 +85,58 @@ $(ibidir)/libffi-$(libffi-version): $(ibidir)/python-$(python-version): $(ibidir)/libffi-$(libffi-version) - # Download the source. - tarball=python-$(python-version).tar.gz +# Download the source. + tarball=python-$(python-version).tar.lz $(call import-source, $(python-url), $(python-checksum)) - # On Mac systems, the build complains about `clang' specific - # features, so we can't use our own GCC build here. +# On Mac systems, the build complains about 'clang' specific +# features, so we can't use our own GCC build here. if [ x$(on_mac_os) = xyes ]; then export CC=clang export CXX=clang++ fi - $(call gbuild, Python-$(python-version),, \ - --without-ensurepip \ - --with-system-ffi \ - --enable-shared, -j$(numthreads)) + +# Unpack the tarball (see below for the necessary modification). + cd $(ddir) + unpackdir=python-$(python-version) + tar -xf $(tdir)/$$tarball --no-same-owner --no-same-permissions + cd $$unpackdir + $(shsrcdir)/prep-source.sh $(ibdir) + +# Python's 'setup.py' uses 'os.system' to run shell scripts. On the +# other hand 'os.system' only runs '/bin/sh' (which has its own +# libraries to link to and those are blocked at this level). So we +# need to add an extra line on top of the 'os.system' funciton and +# put '/usr/lib' in 'LD_LIBRARY_PATH' within Python's environment for +# system calls (with 'os.putenv'). As of Python 3.13.2 the tarball no +# longer has an 'setup.py'. But when it did, the change below was +# necessary. + if [ -f setup.py ]; then + awk '{if(/os.system\(/) \ + { print " os.putenv(\"LD_LIBRARY_PATH\", \"$$LD_LIBRARY_PATH:$(sys_library_sh_path)\");"; \ + print $$0;} \ + else print $$0}' \ + setup.py > setup-tmp.py + mv setup-tmp.py setup.py + fi + +# Do the basic installation and delete the temporary directory. + ./configure SHELL=$(ibdir)/bash \ + --enable-optimizations \ + --without-ensurepip \ + --prefix="$(idir)" \ + --with-system-ffi \ + --enable-shared + $(makewshell) -j$(numthreads) + $(makewshell) install -j$(numthreads) + cd .. + rm -rf $$unpackdir + +# Set the necessary environment variables and finish the build. ln -sf $(ildir)/python$(python-major-version) $(ildir)/python ln -sf $(ibdir)/python$(python-major-version) $(ibdir)/python - ln -sf $(iidir)/python$(python-major-version)m $(iidir)/python$(python-major-version) + ln -sf $(iidir)/python$(python-major-version)m \ + $(iidir)/python$(python-major-version) rm -rf $(ipydir) mkdir $(ipydir) echo "Python $(python-version)" > $@ @@ -107,39 +145,138 @@ $(ibidir)/python-$(python-version): $(ibidir)/libffi-$(libffi-version) -# Non-PiP Python module installation +# Non-pip Python module installation # ---------------------------------- # -# To build Python packages with direct access to a `setup.py' (if no direct -# access to `setup.py' is needed, pip can be used). Note that the -# software's packaged source code is the first prerequisite that is in the -# `tdir' directory. +# Build strategy for python modules as of February 2025, for python 3.13.2. + +# This strategy is mostly based on recommendations by E Schwartz +# (ztrawhcse) on #python (Libera Chat), in October 2022 and February +# 2025. Some discussions are on documented in Savannah tasks [1][2]. The +# build strategy for 'python-installer' is inspired by the gentoo script +# 'python_domodule' [3]. + +# Bootstrap-step: 'gpep517' [4], motivated by PEP 517 [5], together with +# 'python-installer' (module called 'installer') are built without +# dependences on other python packages apart from python itself. The build +# rules for these two packages do python byte compilation and copy the .py +# and .pyc files into the python install directory. These two packages are +# considered to be 'frontends'. + +# Once these two frontends are available, other packages that do building +# tasks, including both backends and alternative frontends or a mix of +# these (in particular: setuptools, meson [6]/ninja-build [7] , flit-core, +# and meson-python), can be built with the 'python-installer' and +# 'gpep517'. The aims of the various build tools are diverse, include +# ecosystem resilience, reproducibility, build speed and convenience in +# building bigger packages such as numpy, scipy and astropy. + +# The python.mk script now includes only three methods: the boot +# build methods of 'python-installer' and 'gpep517'; and the gpep517 +# frontend. No method is provided for using 'python-installer' directly; +# it is invoked indirectly by source files of many packages, which +# also give metadata describing information for build methods. + +# Why not pip? We do not build any python packages with 'pip' because we +# want to have a fully documented pipeline of (i) the original upstream +# locations of tarballs, (ii) the tarballs' checksums, and (iii) the exact +# sequence of build commands. + +# For an alternative viewpoint on a python build strategy, see [8]. + +# Prerequisite for the pybuild script here: the package's source code +# (tarball) must already be located in the directory 'tdir'. # -# Arguments of this function are the numbers +# Arguments: # 1) Unpack command # 2) Unpacked directory name after unpacking the tarball # 3) site.cfg file (optional). -# 4) Official software name (for paper). +# 4) Official software name (for paper.tex). +# 5) Obligatory parameter: build method (see below): +# BOOT_INSTALLER - only for 'python-installer' +# BOOT_GPEP517 - only for 'gpep517' +# GPEP517 - for any other python package # # Hooks: -# pyhook_before: optional steps before running `python setup.py build' -# pyhook_after: optional steps after running `python setup.py install' -pybuild = cd $(ddir); rm -rf $(2); \ - if ! $(1) $(tdir)/$$tarball; then \ +# pyhook_before: optional steps before running 'python setup.py build' +# pyhook_after: optional steps after running 'python setup.py install' + +# [1] https://savannah.nongnu.org/task/?16268 +# [2] https://savannah.nongnu.org/task/?16625 +# [3] https://gitweb.gentoo.org/repo/gentoo.git/tree/eclass/python-utils-r1.eclass#n646 +# [4] https://pypi.org/project/gpep517 +# [5] https://peps.python.org/pep-0517 +# [6] https://mesonbuild.com +# [7] https://ninja-build.org +# [8] https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html + + +pybuild = cd $(ddir); \ + packagedir=$(strip $(2)); \ + if (printf "$$packagedir" | grep "[a-z][a-z]"); then rm -rf $$packagedir; fi; \ + printf "\nStarting to install python package with maneage pybuild rule: $(4)\n ..."; \ + if ! $(1) $(tdir)/$$tarball --no-same-owner --no-same-permissions; then \ echo; echo "Tar error"; exit 1; \ fi; \ - cd $(2); \ + cd $$packagedir; \ if [ "x$(strip $(3))" != x ]; then \ sed -e 's|@LIBDIR[@]|'"$(ildir)"'|' \ -e 's|@INCDIR[@]|'"$(idir)/include"'|' \ $(3) > site.cfg; \ fi; \ if type pyhook_before &>/dev/null; then pyhook_before; fi; \ - python setup.py build; \ - python setup.py install; \ + printf "pybuild option5 = __ %s __\n" "$(strip $(5))"; \ + if [ "x$(strip $(5))" = xBOOT_INSTALLER ]; then \ + chmod 0644 src/installer/*.py; \ + mkdir -p $(ilibpymajorversion)/installer; \ + mkdir -p $(ilibpymajorversion)/installer/__pycache__ ; \ + mkdir -p $(ilibpymajorversion)/installer/_scripts; \ + mkdir -p $(ilibpymajorversion)/installer/_scripts/__pycache__ ; \ + cp -pv src/installer/*.py $(ilibpymajorversion)/installer/; \ + cp -pv src/installer/_scripts/__init__.py $(ilibpymajorversion)/installer/_scripts/; \ + cd src/installer/; \ + python -m compileall -o 1 -o 2 -l -f \ + -d $(ilibpymajorversion)/installer/ .; \ + chmod 0644 __pycache__/*.pyc; \ + cp -pv __pycache__/*.pyc \ + $(ilibpymajorversion)/installer/__pycache__; \ + cd -; \ + cd src/installer/_scripts/; \ + python -m compileall -o 1 -o 2 -l -f \ + -d $(ilibpymajorversion)/installer/_scripts/ __init__.py; \ + chmod 0644 __pycache__/*.pyc; \ + cp -pv __pycache__/*.pyc \ + $(ilibpymajorversion)/installer/_scripts/__pycache__; \ + cd -; \ + elif [ "x$(strip $(5))" = xBOOT_GPEP517 ]; then \ + chmod 0644 gpep517/*.py; \ + mkdir -p $(ilibpymajorversion)/gpep517; \ + mkdir -p $(ilibpymajorversion)/gpep517/__pycache__ ; \ + cp -pv gpep517/*.py $(ilibpymajorversion)/gpep517/; \ + cd gpep517/; \ + python -m compileall -o 1 -o 2 -l -f \ + -d $(ilibpymajorversion)/gpep517/ .; \ + chmod 0644 __pycache__/*.pyc; \ + cp -pv __pycache__/*.pyc \ + $(ilibpymajorversion)/gpep517/__pycache__; \ + cd -; \ + elif [ "x$(strip $(5))" = xGPEP517 ]; then \ + printf "\n\n\n=== python build method: gpep517 ====== "; pwd; \ + printf "...............\n\n\n"; \ + python -m gpep517 install-from-source \ + --prefix "" \ + --destdir $(idir) \ + --optimize all; \ + else \ + printf "Error: Unknown pybuild method for $$packagedir: __ $(strip $(5)) __\n"; \ + printf "The pybuild 5th parameter should very likely be set "; \ + printf "to GPEP517 after checking the build rule and "; \ + printf "upgrading if needed.\n"; \ + exit 1; \ + fi; \ if type pyhook_after &>/dev/null; then pyhook_after; fi; \ cd ..; \ - rm -rf $(2); \ + if (printf "$$packagedir" | grep "[a-z][a-z]"); then rm -fr $$packagedir; fi; \ echo "$(4)" > $@ @@ -148,18 +285,20 @@ pybuild = cd $(ddir); rm -rf $(2); \ # Python modules # --------------- -# + + # All the necessary Python modules go here. -$(ipydir)/asn1crypto-$(asn1crypto-version): $(ipydir)/setuptools-$(setuptools-version) +$(ipydir)/asn1crypto-$(asn1crypto-version): \ + $(ipydir)/setuptools-$(setuptools-version) tarball=asn1crypto-$(asn1crypto-version).tar.gz $(call import-source, $(asn1crypto-url), $(asn1crypto-checksum)) - $(call pybuild, tar xf, asn1crypto-$(asn1crypto-version), , \ + $(call pybuild, tar -xf, asn1crypto-$(asn1crypto-version), , \ Asn1crypto $(asn1crypto-version)) $(ipydir)/asteval-$(asteval-version): $(ipydir)/numpy-$(numpy-version) tarball=asteval-$(asteval-version).tar.gz $(call import-source, $(asteval-url), $(asteval-checksum)) - $(call pybuild, tar xf, asteval-$(asteval-version), , \ + $(call pybuild, tar -xf, asteval-$(asteval-version), , \ ASTEVAL $(asteval-version)) $(ipydir)/astroquery-$(astroquery-version): \ @@ -168,96 +307,154 @@ $(ipydir)/astroquery-$(astroquery-version): \ $(ipydir)/requests-$(requests-version) tarball=astroquery-$(astroquery-version).tar.gz $(call import-source, $(astroquery-url), $(astroquery-checksum)) - $(call pybuild, tar xf, astroquery-$(astroquery-version), , \ + $(call pybuild, tar -xf, astroquery-$(astroquery-version), , \ Astroquery $(astroquery-version)) +# Astropy: points to consider about dependencies: +# +# The optional dependency 'h5py' that is necessary for writting tables in +# HDF5 format has been removed from Astropy because on macOS it cannot be +# installed. +# +# 2022-or-older dependencies: +# $(ibidir)/expat-$(expat-version) \ +# $(ipydir)/jinja2-$(jinja2-version) \ +# $(ipydir)/html5lib-$(html5lib-version) \ +# $(ipydir)/beautifulsoup4-$(beautifulsoup4-version) \ +# +# While the astropy pyproject.toml file says that the astropy build depends +# on numpy, not scipy, and does not depend on matplotlib; the +# runtime is recommended to depend on both scipy and matplotlib. +# In practice, users of astropy will generally expect scipy and matplotlib +# to be available at runtime, so we set these as prerequisites. $(ipydir)/astropy-$(astropy-version): \ - $(ipydir)/h5py-$(h5py-version) \ - $(ibidir)/expat-$(expat-version) \ $(ipydir)/scipy-$(scipy-version) \ - $(ipydir)/numpy-$(numpy-version) \ + $(ipydir)/pyerfa-$(pyerfa-version) \ $(ipydir)/pyyaml-$(pyyaml-version) \ - $(ipydir)/html5lib-$(html5lib-version) \ - $(ipydir)/beautifulsoup4-$(beautifulsoup4-version) + $(ipydir)/matplotlib-$(matplotlib-version) \ + $(ipydir)/astropy-iers-data-$(astropy-iers-data-version) \ + $(ipydir)/extension-helpers-$(extension-helpers-version) - # Download the source. - tarball=astropy-$(astropy-version).tar.gz +# Tarball and its preparation. + tarball=astropy-$(astropy-version).tar.lz $(call import-source, $(astropy-url), $(astropy-checksum)) - # Currently, when the Expat library is already built in a project - # (for example as a dependency of another program), Astropy's - # internal building of Expat will conflict with the project's. So - # we have added Expat as a dependency of Astropy (so it is always - # built before it, and we tell Astropy to use the project's - # libexpat. - pyhook_before () { - echo "" >> setup.cfg - echo "[build]" >> setup.cfg - echo "use_system_expat=1" >> setup.cfg - } - $(call pybuild, tar xf, astropy-$(astropy-version)) - cp $(dtexdir)/astropy.tex $(ictdir)/ +# Conservatively purge old version (but not astropy_iers, +# astropy-iers): + rm -fvr $(idir)/lib/python*/site-packages/astropy/ + rm -fvr $(idir)/lib/python*/site-packages/astropy-[0-9]*-info/ + rm -fv $(idir)/bin/fits{diff,check,header,info,2bitmap} + rm -fv $(idir)/bin/{samp_hub,showtable,volint,wcslint} + +# Do the basic build. + $(call pybuild, tar -xf, astropy-$(astropy-version),,, \ + GPEP517) + cp -pv $(dtexdir)/astropy.tex $(ictdir)/ echo "Astropy $(astropy-version) \citep{astropy2013,astropy2018}" > $@ +$(ipydir)/astropy-iers-data-$(astropy-iers-data-version): \ + $(ipydir)/setuptools-$(setuptools-version) + tarball=astropy-iers-data-$(astropy-iers-data-version).tar.lz + $(call import-source, $(astropy-iers-data-url), \ + $(astropy-iers-data-checksum)) + $(call pybuild, tar -xf, \ + astropy-iers-data-$(astropy-iers-data-version),,, \ + GPEP517) + echo "Astropy-Iers-Data $(astropy-iers-data-version)" > $@ + $(ipydir)/beautifulsoup4-$(beautifulsoup4-version): \ $(ipydir)/soupsieve-$(soupsieve-version) - tarball=beautifulsoup4-$(beautifulsoup4-version).tar.gz + tarball=beautifulsoup4-$(beautifulsoup4-version).tar.lz $(call import-source, $(beautifulsoup4-url), $(beautifulsoup4-checksum)) - $(call pybuild, tar xf, beautifulsoup4-$(beautifulsoup4-version), , \ + $(call pybuild, tar -xf, beautifulsoup4-$(beautifulsoup4-version), , \ BeautifulSoup $(beautifulsoup4-version)) -$(ipydir)/certifi-$(certifi-version): $(ipydir)/setuptools-$(setuptools-version) +$(ipydir)/beniget-$(beniget-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) + tarball=beniget-$(beniget-version).tar.lz + $(call import-source, $(beniget-url), $(beniget-checksum)) + $(call pybuild, tar -xf, beniget-$(beniget-version), , \ + Beniget $(beniget-version)) + +$(ipydir)/certifi-$(certifi-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) tarball=certifi-$(certifi-version).tar.gz $(call import-source, $(certifi-url), $(certifi-checksum)) - $(call pybuild, tar xf, certifi-$(certifi-version), , \ + $(call pybuild, tar -xf, certifi-$(certifi-version), , \ Certifi $(certifi-version)) $(ipydir)/cffi-$(cffi-version): \ $(ibidir)/libffi-$(libffi-version) \ $(ipydir)/pycparser-$(pycparser-version) - tarball=cffi-$(cffi-version).tar.gz + tarball=cffi-$(cffi-version).tar.lz $(call import-source, $(cffi-url), $(cffi-checksum)) - $(call pybuild, tar xf, cffi-$(cffi-version), ,cffi $(cffi-version)) + $(call pybuild, tar -xf, cffi-$(cffi-version), ,cffi $(cffi-version)) -$(ipydir)/chardet-$(chardet-version): $(ipydir)/setuptools-$(setuptools-version) +$(ipydir)/chardet-$(chardet-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) tarball=chardet-$(chardet-version).tar.gz $(call import-source, $(chardet-url), $(chardet-checksum)) - $(call pybuild, tar xf, chardet-$(chardet-version), , \ + $(call pybuild, tar -xf, chardet-$(chardet-version), , \ Chardet $(chardet-version)) +$(ipydir)/contourpy-$(contourpy-version): \ + $(ipydir)/pybind11-$(pybind11-version) \ + $(ipydir)/meson-python-$(meson-python-version) + tarball=contourpy-$(contourpy-version).tar.lz + $(call import-source, $(contourpy-url), $(contourpy-checksum)) + $(call pybuild, tar -xf, contourpy-$(contourpy-version), , \ + Contourpy $(contourpy-version), GPEP517) + echo "Contourpy $(contourpy-version)" > $@ + $(ipydir)/corner-$(corner-version): $(ipydir)/matplotlib-$(matplotlib-version) tarball=corner-$(corner-version).tar.gz $(call import-source, $(corner-url), $(corner-checksum)) - $(call pybuild, tar xf, corner-$(corner-version), , \ + $(call pybuild, tar -xf, corner-$(corner-version), , \ Corner $(corner-version)) cp $(dtexdir)/corner.tex $(ictdir)/ echo "Corner $(corner-version) \citep{corner}" > $@ +$(ipydir)/cppy-$(cppy-version): \ + $(ipydir)/setuptools-scm-$(setuptools-scm-version) + tarball=cppy-$(cppy-version).tar.lz + $(call import-source, $(cppy-url), $(cppy-checksum)) + $(call pybuild, tar -xf, cppy-$(cppy-version), , \ + Cppy $(cppy-version), GPEP517) + $(ipydir)/cryptography-$(cryptography-version): \ $(ipydir)/cffi-$(cffi-version) \ - $(ipydir)/asn1crypto-$(asn1crypto-version) - tarball=cryptography-$(cryptography-version).tar.gz + $(ipydir)/asn1crypto-$(asn1crypto-version) \ + $(ipydir)/setuptools-rust-$(setuptools-rust-version) + tarball=cryptography-$(cryptography-version).tar.lz $(call import-source, $(cryptography-url), $(cryptography-checksum)) - $(call pybuild, tar xf, cryptography-$(cryptography-version), , \ + $(call pybuild, tar -xf, cryptography-$(cryptography-version), , \ Cryptography $(cryptography-version)) $(ipydir)/cycler-$(cycler-version): $(ipydir)/six-$(six-version) - tarball=cycler-$(cycler-version).tar.gz + tarball=cycler-$(cycler-version).tar.lz $(call import-source, $(cycler-url), $(cycler-checksum)) - $(call pybuild, tar xf, cycler-$(cycler-version), , \ - Cycler $(cycler-version)) + $(call pybuild, tar -xf, cycler-$(cycler-version), , \ + Cycler $(cycler-version), GPEP517) + echo "Cycler $(cycler-version)" > $@ -$(ipydir)/cython-$(cython-version): $(ipydir)/setuptools-$(setuptools-version) - tarball=cython-$(cython-version).tar.gz +$(ipydir)/cython-$(cython-version): \ + $(ipydir)/python-installer-$(python-installer-version) \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/setuptools-$(setuptools-version) + tarball=cython-$(cython-version).tar.lz $(call import-source, $(cython-url), $(cython-checksum)) - $(call pybuild, tar xf, Cython-$(cython-version)) - cp $(dtexdir)/cython.tex $(ictdir)/ + $(call pybuild, tar -xf, cython-$(cython-version),,, GPEP517) + cp -pv $(dtexdir)/cython.tex $(ictdir)/ echo "Cython $(cython-version) \citep{cython2011}" > $@ $(ipydir)/esutil-$(esutil-version): $(ipydir)/numpy-$(numpy-version) - tarball=esutil-$(esutil-version).tar.gz + export CFLAGS="-std=c++14 $$CFLAGS" + tarball=esutil-$(esutil-version).tar.lz $(call import-source, $(esutil-url), $(esutil-checksum)) - $(call pybuild, tar xf, esutil-$(esutil-version), , \ + $(call pybuild, tar -xf, esutil-$(esutil-version), , \ esutil $(esutil-version)) $(ipydir)/eigency-$(eigency-version): \ @@ -266,36 +463,68 @@ $(ipydir)/eigency-$(eigency-version): \ $(ipydir)/cython-$(cython-version) tarball=eigency-$(eigency-version).tar.gz $(call import-source, $(eigency-url), $(eigency-checksum)) - $(call pybuild, tar xf, eigency-$(eigency-version), , \ + $(call pybuild, tar -xf, eigency-$(eigency-version), , \ eigency $(eigency-version)) $(ipydir)/emcee-$(emcee-version): \ $(ipydir)/numpy-$(numpy-version) \ - $(ipydir)/setuptools_scm-$(setuptools_scm-version) + $(ipydir)/setuptools-scm-$(setuptools-scm-version) tarball=emcee-$(emcee-version).tar.gz $(call import-source, $(emcee-url), $(emcee-checksum)) - $(call pybuild, tar xf, emcee-$(emcee-version), , \ + $(call pybuild, tar -xf, emcee-$(emcee-version), , \ emcee $(emcee-version)) $(ipydir)/entrypoints-$(entrypoints-version): \ - $(ipydir)/setuptools-$(setuptools-version) + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) tarball=entrypoints-$(entrypoints-version).tar.gz $(call import-source, $(entrypoints-url), $(entrypoints-checksum)) - $(call pybuild, tar xf, entrypoints-$(entrypoints-version), , \ + $(call pybuild, tar -xf, entrypoints-$(entrypoints-version), , \ EntryPoints $(entrypoints-version)) +$(ipydir)/extension-helpers-$(extension-helpers-version): \ + $(ipydir)/setuptools-scm-$(setuptools-scm-version) + tarball=extension-helpers-$(extension-helpers-version).tar.lz + $(call import-source, $(extension-helpers-url), \ + $(extension-helpers-checksum)) + $(call pybuild, tar -xf, \ + extension-helpers-$(extension-helpers-version),, \ + Extension-Helpers $(extension-helpers-version), GPEP517) + $(ipydir)/flake8-$(flake8-version): \ $(ipydir)/pyflakes-$(pyflakes-version) \ $(ipydir)/pycodestyle-$(pycodestyle-version) tarball=flake8-$(flake8-version).tar.gz $(call import-source, $(flake8-url), $(flake8-checksum)) - $(call pybuild, tar xf, flake8-$(flake8-version), , \ + $(call pybuild, tar -xf, flake8-$(flake8-version), , \ Flake8 $(flake8-version)) -$(ipydir)/future-$(future-version): $(ipydir)/setuptools-$(setuptools-version) +$(ipydir)/flit-core-$(flit-core-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) + tarball=flit-core-$(flit-core-version).tar.lz + $(call import-source, $(flit-core-url), $(flit-core-checksum)) + $(call pybuild, tar -xf, flit-core-$(flit-core-version), , \ + Flit-core $(flit-core-version), GPEP517) + +# Although cython is not an obligatory prerequisite of fonttools, we force +# it as a prerequisite for reproducibility; otherwise build parallelism may +# lead to some builds with and some builds without cython, depending on how +# many cpus the host machine has. +$(ipydir)/fonttools-$(fonttools-version): \ + $(ipydir)/cython-$(cython-version) \ + $(ipydir)/setuptools-$(setuptools-version) + tarball=fonttools-$(fonttools-version).tar.lz + $(call import-source, $(fonttools-url), $(fonttools-checksum)) + $(call pybuild, tar -xf, fonttools-$(fonttools-version), , \ + fonttools $(fonttools-version), GPEP517) + +$(ipydir)/future-$(future-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) tarball=future-$(future-version).tar.gz $(call import-source, $(future-url), $(future-checksum)) - $(call pybuild, tar xf, future-$(future-version), , \ + $(call pybuild, tar -xf, future-$(future-version), , \ Future $(future-version)) $(ipydir)/galsim-$(galsim-version): \ @@ -304,12 +533,31 @@ $(ipydir)/galsim-$(galsim-version): \ $(ipydir)/eigency-$(eigency-version) \ $(ipydir)/pybind11-$(pybind11-version) \ $(ipydir)/lsstdesccoord-$(lsstdesccoord-version) - tarball=galsim-$(galsim-version).tar.gz + tarball=galsim-$(galsim-version).tar.lz $(call import-source, $(galsim-url), $(galsim-checksum)) - $(call pybuild, tar xf, GalSim-$(galsim-version)) + $(call pybuild, tar -xf, galsim-$(galsim-version)) cp $(dtexdir)/galsim.tex $(ictdir)/ echo "Galsim $(galsim-version) \citep{galsim}" > $@ +$(ipydir)/gast-$(gast-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) + tarball=gast-$(gast-version).tar.lz + $(call import-source, $(gast-url), $(gast-checksum)) + $(call pybuild, tar -xf, gast-$(gast-version), , \ + Gast $(gast-version)) + +$(ipydir)/gpep517-$(gpep517-version): \ + $(ibidir)/python-$(python-version) + tarball=gpep517-$(gpep517-version).tar.lz + $(call import-source, $(gpep517-url), $(gpep517-checksum)) + $(call pybuild, tar -xf, \ + gpep517-$(gpep517-version), \ + , \ + gpep517 $(gpep517-version), \ + BOOT_GPEP517) + echo "gpep517 $(gpep517-version)" > $@ + $(ipydir)/h5py-$(h5py-version): \ $(ipydir)/six-$(six-version) \ $(ibidir)/hdf5-$(hdf5-version) \ @@ -321,15 +569,15 @@ $(ipydir)/h5py-$(h5py-version): \ export HDF5_DIR=$(ildir) tarball=h5py-$(h5py-version).tar.gz $(call import-source, $(h5py-url), $(h5py-checksum)) - $(call pybuild, tar xf, h5py-$(h5py-version), , \ + $(call pybuild, tar -xf, h5py-$(h5py-version), , \ h5py $(h5py-version)) -# `healpy' is actually installed as part of the HEALPix package. It will be +# 'healpy' is actually installed as part of the HEALPix package. It will be # installed with its C/C++ libraries if any other Python library is -# requested with HEALPix. So actually calling for `healpix' (when `healpix' +# requested with HEALPix. So actually calling for 'healpix' (when 'healpix' # is requested) is not necessary. But some users might not know about this -# and just ask for `healpy'. To avoid confusion in such cases, we'll just -# set `healpy' to be dependent on `healpix' and not download any tarball +# and just ask for 'healpy'. To avoid confusion in such cases, we'll just +# set 'healpy' to be dependent on 'healpix' and not download any tarball # for it, or write anything in the final target. $(ipydir)/healpy-$(healpy-version): $(ibidir)/healpix-$(healpix-version) touch $@ @@ -339,35 +587,49 @@ $(ipydir)/html5lib-$(html5lib-version): \ $(ipydir)/webencodings-$(webencodings-version) tarball=html5lib-$(html5lib-version).tar.gz $(call import-source, $(html5lib-url), $(html5lib-checksum)) - $(call pybuild, tar xf, html5lib-$(html5lib-version), , \ + $(call pybuild, tar -xf, html5lib-$(html5lib-version), , \ HTML5lib $(html5lib-version)) -$(ipydir)/idna-$(idna-version): $(ipydir)/setuptools-$(setuptools-version) +$(ipydir)/idna-$(idna-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) tarball=idna-$(idna-version).tar.gz $(call import-source, $(idna-url), $(idna-checksum)) - $(call pybuild, tar xf, idna-$(idna-version), , \ + $(call pybuild, tar -xf, idna-$(idna-version), , \ idna $(idna-version)) -$(ipydir)/jeepney-$(jeepney-version): $(ipydir)/setuptools-$(setuptools-version) +$(ipydir)/jeepney-$(jeepney-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) tarball=jeepney-$(jeepney-version).tar.gz $(call import-source, $(jeepney-url), $(jeepney-checksum)) - $(call pybuild, tar xf, jeepney-$(jeepney-version), , \ + $(call pybuild, tar -xf, jeepney-$(jeepney-version), , \ Jeepney $(jeepney-version)) +$(ipydir)/jinja2-$(jinja2-version): $(ipydir)/markupsafe-$(markupsafe-version) + tarball=jinja2-$(jinja2-version).tar.lz + $(call import-source, $(jinja2-url), $(jinja2-checksum)) + $(call pybuild, tar -xf, jinja2-$(jinja2-version), , \ + Jinja2 $(jinja2-version)) + $(ipydir)/keyring-$(keyring-version): \ $(ipydir)/entrypoints-$(entrypoints-version) \ $(ipydir)/secretstorage-$(secretstorage-version) \ - $(ipydir)/setuptools_scm-$(setuptools_scm-version) + $(ipydir)/setuptools-scm-$(setuptools-scm-version) tarball=keyring-$(keyring-version).tar.gz $(call import-source, $(keyring-url), $(keyring-checksum)) - $(call pybuild, tar xf, keyring-$(keyring-version), , \ + $(call pybuild, tar -xf, keyring-$(keyring-version), , \ Keyring $(keyring-version)) -$(ipydir)/kiwisolver-$(kiwisolver-version): $(ipydir)/setuptools-$(setuptools-version) - tarball=kiwisolver-$(kiwisolver-version).tar.gz +$(ipydir)/kiwisolver-$(kiwisolver-version): \ + $(ipydir)/cppy-$(cppy-version) \ + $(ipydir)/setuptools-scm-$(setuptools-scm-version) + tarball=kiwisolver-$(kiwisolver-version).tar.lz $(call import-source, $(kiwisolver-url), $(kiwisolver-checksum)) - $(call pybuild, tar xf, kiwisolver-$(kiwisolver-version), , \ - Kiwisolver $(kiwisolver-version)) + $(call pybuild, tar -xf, kiwisolver-$(kiwisolver-version), , \ + Kiwisolver $(kiwisolver-version), GPEP517) + cp -pv $(dtexdir)/kiwisolver.tex $(ictdir)/ + echo "Kiwisolver $(kiwisolver-version) \citep{cassowary2001}" > $@ $(ipydir)/lmfit-$(lmfit-version): \ $(ipydir)/six-$(six-version) \ @@ -379,238 +641,421 @@ $(ipydir)/lmfit-$(lmfit-version): \ $(ipydir)/uncertainties-$(uncertainties-version) tarball=lmfit-$(lmfit-version).tar.gz $(call import-source, $(lmfit-url), $(lmfit-checksum)) - $(call pybuild, tar xf, lmfit-$(lmfit-version), , \ + $(call pybuild, tar -xf, lmfit-$(lmfit-version), , \ LMFIT $(lmfit-version)) $(ipydir)/lsstdesccoord-$(lsstdesccoord-version): \ - $(ipydir)/setuptools-$(setuptools-version) + $(ipydir)/cffi-$(cffi-version) \ + $(ipydir)/numpy-$(numpy-version) \ + $(ipydir)/future-$(future-version) tarball=lsstdesccoord-$(lsstdesccoord-version).tar.gz $(call import-source, $(lsstdesccoord-url), $(lsstdesccoord-checksum)) - $(call pybuild, tar xf, LSSTDESC.Coord-$(lsstdesccoord-version), , \ + $(call pybuild, tar -xf, LSSTDESC.Coord-$(lsstdesccoord-version), , \ LSSTDESC.Coord $(lsstdesccoord-version)) +$(ipydir)/markupsafe-$(markupsafe-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) + tarball=markupsafe-$(markupsafe-version).tar.lz + $(call import-source, $(markupsafe-url), $(markupsafe-checksum)) + $(call pybuild, tar -xf, markupsafe-$(markupsafe-version), , \ + MarkupSafe $(markupsafe-version)) + $(ipydir)/matplotlib-$(matplotlib-version): \ $(itidir)/texlive \ $(ipydir)/numpy-$(numpy-version) \ $(ipydir)/cycler-$(cycler-version) \ - $(ibidir)/freetype-$(freetype-version) \ - $(ipydir)/pyparsing-$(pyparsing-version) \ + $(ipydir)/pillow-$(pillow-version) \ + $(ipydir)/fonttools-$(fonttools-version) \ + $(ipydir)/contourpy-$(contourpy-version) \ $(ipydir)/kiwisolver-$(kiwisolver-version) \ - $(ibidir)/ghostscript-$(ghostscript-version) \ - $(ibidir)/imagemagick-$(imagemagick-version) \ $(ipydir)/python-dateutil-$(python-dateutil-version) - # Download the source. - tarball=matplotlib-$(matplotlib-version).tar.gz +# Prepare the source. + tarball=matplotlib-$(matplotlib-version).tar.lz $(call import-source, $(matplotlib-url), $(matplotlib-checksum)) - # On Mac systems, the build complains about `clang' specific - # features, so we can't use our own GCC build here. +# On Mac systems, the build complains about 'clang' specific +# features, so we can't use our own GCC build here. if [ x$(on_mac_os) = xyes ]; then export CC=clang export CXX=clang++ fi - $(call pybuild, tar xf, matplotlib-$(matplotlib-version)) + $(call pybuild, tar -xf, matplotlib-$(matplotlib-version),,, GPEP517) cp $(dtexdir)/matplotlib.tex $(ictdir)/ echo "Matplotlib $(matplotlib-version) \citep{matplotlib2007}" > $@ +$(ipydir)/meson-$(meson-version): \ + $(ibidir)/ninjabuild-$(ninjabuild-version) \ + $(ipydir)/setuptools-$(setuptools-version) + tarball=meson-$(meson-version).tar.lz + $(call import-source, $(meson-url), $(meson-checksum)) + $(call pybuild, tar -xf, meson-$(meson-version), , \ + Meson $(meson-version), GPEP517) + echo "Meson $(meson-version)" > $@ + +# The 'meson-python' package may be helpful or requred for some packages. +$(ipydir)/meson-python-$(meson-python-version): \ + $(ipydir)/meson-$(meson-version) \ + $(ipydir)/packaging-$(packaging-version) \ + $(ipydir)/pyproject-metadata-$(pyproject-metadata-version) + tarball=meson-python-$(meson-python-version).tar.lz + $(call import-source, $(meson-python-url), $(meson-python-checksum)) + $(call pybuild, tar -xf, meson-python-$(meson-python-version), , \ + Meson-python $(meson-python-version), GPEP517) + echo "Meson-Python $(meson-python-version)" > $@ + $(ipydir)/mpi4py-$(mpi4py-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ $(ibidir)/openmpi-$(openmpi-version) \ - $(ipydir)/setuptools-$(setuptools-version) - tarball=mpi4py-$(mpi4py-version).tar.gz + $(ipydir)/python-installer-$(python-installer-version) + tarball=mpi4py-$(mpi4py-version).tar.lz $(call import-source, $(mpi4py-url), $(mpi4py-checksum)) - $(call pybuild, tar xf, mpi4py-$(mpi4py-version)) + $(call pybuild, tar -xf, mpi4py-$(mpi4py-version)) cp $(dtexdir)/mpi4py.tex $(ictdir)/ echo "mpi4py $(mpi4py-version) \citep{mpi4py2011}" > $@ -$(ipydir)/mpmath-$(mpmath-version): $(ipydir)/setuptools-$(setuptools-version) +$(ipydir)/mpmath-$(mpmath-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) tarball=mpmath-$(mpmath-version).tar.gz $(call import-source, $(mpmath-url), $(mpmath-checksum)) - $(call pybuild, tar xf, mpmath-$(mpmath-version), , \ + $(call pybuild, tar -xf, mpmath-$(mpmath-version), , \ mpmath $(mpmath-version)) +# Until 2025-02-22: we had 'export CFLAGS="--std=c99 $$CFLAGS"' before +# calling pybuild; but that doesn't seem to be necessary. $(ipydir)/numpy-$(numpy-version): \ - $(ibidir)/unzip-$(unzip-version) \ $(ipydir)/cython-$(cython-version) \ $(ibidir)/openblas-$(openblas-version) \ - $(ipydir)/setuptools-$(setuptools-version) - tarball=numpy-$(numpy-version).zip + $(ipydir)/pybind11-$(pybind11-version) \ + $(ipydir)/meson-python-$(meson-python-version) + tarball=numpy-$(numpy-version).tar.lz $(call import-source, $(numpy-url), $(numpy-checksum)) if [ x$(on_mac_os) = xyes ]; then export LDFLAGS="$(LDFLAGS) -undefined dynamic_lookup -bundle" - else - export LDFLAGS="$(LDFLAGS) -shared" fi - export CFLAGS="--std=c99 $$CFLAGS" conf="$$(pwd)/reproduce/software/config/numpy-scipy.cfg" - $(call pybuild, unzip, numpy-$(numpy-version),$$conf, \ - Numpy $(numpy-version)) + $(call pybuild, tar -xf, numpy-$(numpy-version),$$conf, \ + Numpy $(numpy-version), GPEP517) cp $(dtexdir)/numpy.tex $(ictdir)/ - echo "Numpy $(numpy-version) \citep{numpy2011}" > $@ - -$(ipydir)/pexpect-$(pexpect-version): $(ipydir)/setuptools-$(setuptools-version) + echo "Numpy $(numpy-version) \citep{numpy2020}" > $@ + +$(ipydir)/packaging-$(packaging-version): \ + $(ipydir)/pyparsing-$(pyparsing-version) + tarball=packaging-$(packaging-version).tar.lz + $(call import-source, $(packaging-url), $(packaging-checksum)) + $(call pybuild, tar -xf, packaging-$(packaging-version), , \ + Packaging $(packaging-version), GPEP517) + +$(ipydir)/pexpect-$(pexpect-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) tarball=pexpect-$(pexpect-version).tar.gz $(call import-source, $(pexpect-url), $(pexpect-checksum)) - $(call pybuild, tar xf, pexpect-$(pexpect-version), , \ + $(call pybuild, tar -xf, pexpect-$(pexpect-version), , \ Pexpect $(pexpect-version)) -$(ipydir)/pip-$(pip-version): $(ipydir)/setuptools-$(setuptools-version) - tarball=pip-$(pip-version).tar.gz - $(call import-source, $(pip-url), $(pip-checksum)) - $(call pybuild, tar xf, pip-$(pip-version), , \ - PiP $(pip-version)) +$(ipydir)/pillow-$(pillow-version): $(ibidir)/libjpeg-$(libjpeg-version) \ + $(ipydir)/setuptools-$(setuptools-version) + tarball=pillow-$(pillow-version).tar.lz + $(call import-source, $(pillow-url), $(pillow-checksum)) + $(call pybuild, tar -xf, pillow-$(pillow-version), , \ + Pillow $(pillow-version), GPEP517) + +# This should normally not be used, because it's a front-end that obstructs +# reproducibility - source URL; checksum of the tarball; build rule. +# $(ipydir)/pip-$(pip-version): \ +# $(ipydir)/python-installer-$(python-installer-version) \ +# $(ipydir)/wheel-$(wheel-version) +# tarball=pip-$(pip-version).tar.gz +# $(call import-source, $(pip-url), $(pip-checksum)) +# $(call pybuild, tar -xf, pip-$(pip-version), , \ +# PiP $(pip-version)) + +$(ipydir)/ply-$(ply-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) + tarball=ply-$(ply-version).tar.lz + $(call import-source, $(ply-url), $(ply-checksum)) + $(call pybuild, tar -xf, ply-$(ply-version), , \ + ply $(ply-version)) $(ipydir)/pycodestyle-$(pycodestyle-version): \ - $(ipydir)/setuptools-$(setuptools-version) + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) tarball=pycodestyle-$(pycodestyle-version).tar.gz $(call import-source, $(pycodestyle-url), $(pycodestyle-checksum)) - $(call pybuild, tar xf, pycodestyle-$(pycodestyle-version), , \ + $(call pybuild, tar -xf, pycodestyle-$(pycodestyle-version), , \ pycodestyle $(pycodestyle-version)) $(ipydir)/pybind11-$(pybind11-version): \ $(ibidir)/eigen-$(eigen-version) \ $(ibidir)/boost-$(boost-version) \ - $(ipydir)/setuptools-$(setuptools-version) - tarball=pybind11-$(pybind11-version).tar.gz + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) + tarball=pybind11-$(pybind11-version).tar.lz $(call import-source, $(pybind11-url), $(pybind11-checksum)) pyhook_after() { - cp -r include/pybind11 $(iidir)/python$(python-major-version)m/ + cp -pvr pybind11/include/pybind11 \ + $(iidir)/python$(python-major-version)m/ } - $(call pybuild, tar xf, pybind11-$(pybind11-version), , \ - pybind11 $(pybind11-version)) + $(call pybuild, tar -xf, pybind11-$(pybind11-version), , \ + pybind11 $(pybind11-version), GPEP517) + echo "Pybind11 $(pybind11-version)" > $@ -$(ipydir)/pycparser-$(pycparser-version): $(ipydir)/setuptools-$(setuptools-version) +$(ipydir)/pycparser-$(pycparser-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) tarball=pycparser-$(pycparser-version).tar.gz $(call import-source, $(pycparser-url), $(pycparser-checksum)) - $(call pybuild, tar xf, pycparser-$(pycparser-version), , \ + $(call pybuild, tar -xf, pycparser-$(pycparser-version), , \ pycparser $(pycparser-version)) -$(ipydir)/pyflakes-$(pyflakes-version): $(ipydir)/setuptools-$(setuptools-version) +$(ipydir)/pyerfa-$(pyerfa-version): \ + $(ipydir)/numpy-$(numpy-version) \ + $(ipydir)/setuptools-scm-$(setuptools-scm-version) + tarball=pyerfa-$(pyerfa-version).tar.lz + $(call import-source, $(pyerfa-url), $(pyerfa-checksum)) + $(call pybuild, tar -xf, pyerfa-$(pyerfa-version), , \ + PyERFA $(pyerfa-version), GPEP517) + +$(ipydir)/pyflakes-$(pyflakes-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) tarball=pyflakes-$(pyflakes-version).tar.gz $(call import-source, $(pyflakes-url), $(pyflakes-checksum)) - $(call pybuild, tar xf, pyflakes-$(pyflakes-version), , \ + $(call pybuild, tar -xf, pyflakes-$(pyflakes-version), , \ pyflakes $(pyflakes-version)) -$(ipydir)/pyparsing-$(pyparsing-version): $(ipydir)/setuptools-$(setuptools-version) - tarball=pyparsing-$(pyparsing-version).tar.gz +$(ipydir)/pyparsing-$(pyparsing-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/flit-core-$(flit-core-version) \ + $(ipydir)/python-installer-$(python-installer-version) + tarball=pyparsing-$(pyparsing-version).tar.lz $(call import-source, $(pyparsing-url), $(pyparsing-checksum)) - $(call pybuild, tar xf, pyparsing-$(pyparsing-version), , \ - PyParsing $(pyparsing-version)) + $(call pybuild, tar -xf, pyparsing-$(pyparsing-version), , \ + PyParsing $(pyparsing-version), GPEP517) -$(ipydir)/pypkgconfig-$(pypkgconfig-version): $(ipydir)/setuptools-$(setuptools-version) +$(ipydir)/pypkgconfig-$(pypkgconfig-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) tarball=pkgconfig-$(pypkgconfig-version).tar.gz $(call import-source, $(pypkgconfig-url), $(pypkgconfig-checksum)) - $(call pybuild, tar xf, pkgconfig-$(pypkgconfig-version), , + $(call pybuild, tar -xf, pkgconfig-$(pypkgconfig-version), , pkgconfig $(pypkgconfig-version)) +$(ipydir)/pyproject-metadata-$(pyproject-metadata-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/flit-core-$(flit-core-version) \ + $(ipydir)/python-installer-$(python-installer-version) + tarball=pyproject-metadata-$(pyproject-metadata-version).tar.lz + $(call import-source, $(pyproject-metadata-url), $(pyproject-metadata-checksum)) + $(call pybuild, tar -xf, \ + pyproject-metadata-$(pyproject-metadata-version),,, GPEP517) + $(ipydir)/python-dateutil-$(python-dateutil-version): \ - $(ipydir)/six-$(six-version) \ - $(ipydir)/setuptools_scm-$(setuptools_scm-version) - tarball=python-dateutil-$(python-dateutil-version).tar.gz + $(ipydir)/setuptools-scm-$(setuptools-scm-version) + tarball=python-dateutil-$(python-dateutil-version).tar.lz $(call import-source, $(python-dateutil-url), $(python-dateutil-checksum)) - $(call pybuild, tar xf, python-dateutil-$(python-dateutil-version), , \ - python-dateutil $(python-dateutil-version)) + $(call pybuild, tar -xf, python-dateutil-$(python-dateutil-version), , \ + python-dateutil $(python-dateutil-version), GPEP517) + +$(ipydir)/python-installer-$(python-installer-version): \ + $(ibidir)/python-$(python-version) + +# Prepare the tarball. + tarball=python-installer-$(python-installer-version).tar.lz + $(call import-source, $(python-installer-url), $(python-installer-checksum)) + +# Modify the line in the source that will cause a crash when a +# to-be-installed file already exists in the installation path. This +# is very important for Python packages in Maneage (when a dependency +# is updated, the package needs to be re-built, but that would cause +# due to this line). + pyhook_before(){ + mv -v src/installer/destinations.py src/installer/destinations.py.orig; \ + sed -e 's/\(raise FileExistsError.message.\)/## \1/' \ + src/installer/destinations.py.orig > src/installer/destinations.py + } + +# Build the Python installer. + $(call pybuild, tar -xf, \ + python-installer-$(python-installer-version),,, \ + BOOT_INSTALLER) + echo "Python-installer $(python-installer-version)" > $@ + +$(ipydir)/pythran-$(pythran-version): \ + $(ipydir)/ply-$(ply-version) \ + $(ipydir)/gast-$(gast-version) \ + $(ibidir)/boost-$(boost-version) \ + $(ipydir)/beniget-$(beniget-version) \ + $(ipydir)/setuptools-scm-$(setuptools-scm-version) + tarball=pythran-$(pythran-version).tar.lz + $(call import-source, $(pythran-url), $(pythran-checksum)) + $(call pybuild, tar -xf, pythran-$(pythran-version), , \ + pythran $(pythran-version)) $(ipydir)/pyyaml-$(pyyaml-version): \ $(ibidir)/yaml-$(yaml-version) \ $(ipydir)/cython-$(cython-version) - tarball=pyyaml-$(pyyaml-version).tar.gz + tarball=pyyaml-$(pyyaml-version).tar.lz $(call import-source, $(pyyaml-url), $(pyyaml-checksum)) - $(call pybuild, tar xf, PyYAML-$(pyyaml-version), , \ - PyYAML $(pyyaml-version)) + $(call pybuild, tar -xf, pyyaml-$(pyyaml-version), , \ + PyYAML $(pyyaml-version), GPEP517) $(ipydir)/requests-$(requests-version): $(ipydir)/idna-$(idna-version) \ - $(ipydir)/numpy-$(numpy-version) \ - $(ipydir)/certifi-$(certifi-version) \ - $(ipydir)/chardet-$(chardet-version) \ - $(ipydir)/urllib3-$(urllib3-version) + $(ipydir)/numpy-$(numpy-version) \ + $(ipydir)/certifi-$(certifi-version) \ + $(ipydir)/chardet-$(chardet-version) \ + $(ipydir)/urllib3-$(urllib3-version) tarball=requests-$(requests-version).tar.gz $(call import-source, $(requests-url), $(requests-checksum)) - $(call pybuild, tar xf, requests-$(requests-version), , \ + $(call pybuild, tar -xf, requests-$(requests-version), , \ Requests $(requests-version)) +# 'pythran' is disabled in the build of Scipy because of complications it +# caused on some systems. We explicitly disable it using a preprocessor +# directive. 'Pythran' can in principle speed up compilation of scientific +# software [1][2]. +# [1] https://pythran.readthedocs.io/en/latest +# [2] https://docs.scipy.org/doc/scipy-1.15.2/dev/roadmap-detailed.html $(ipydir)/scipy-$(scipy-version): \ $(ipydir)/numpy-$(numpy-version) \ $(ipydir)/pybind11-$(pybind11-version) - tarball=scipy-$(scipy-version).tar.gz + tarball=scipy-$(scipy-version).tar.lz $(call import-source, $(scipy-url), $(scipy-checksum)) if [ x$(on_mac_os) = xyes ]; then export LDFLAGS="$(LDFLAGS) -undefined dynamic_lookup -bundle" else - export LDFLAGS="$(LDFLAGS) -shared" +# Same question as for 'numpy': why '-shared'? This obstructs +# the meson build. +# export LDFLAGS="$(LDFLAGS) -shared" + : fi conf="$$(pwd)/reproduce/software/config/numpy-scipy.cfg" - $(call pybuild, tar xf, scipy-$(scipy-version),$$conf) + +# Disable pythran: see +# https://docs.scipy.org/doc/scipy-1.15.2/dev/roadmap-detailed.html#use-of-pythran +# export SCIPY_USE_PYTHRAN=0 # deprecated(?) +# Option 1: Hack the source: + pyhook_before() { + mv -iv meson.options meson.options.orig; \ + sed -e 's/\(use-pythran.*value: *\)true/\1false/' \ + meson.options.orig > meson.options + } + +# Option 2: pass the string +# --config-json='{"setup-args": "-Duse-pythran=false"}' +# to gpep517 with correct escaping of single and double quotes. +# Not tried as of 2025-02-25. + $(call pybuild, tar -xf, scipy-$(scipy-version),$$conf,, GPEP517) cp $(dtexdir)/scipy.tex $(ictdir)/ - echo "Scipy $(scipy-version) \citep{scipy2007,scipy2011}" > $@ + echo "Scipy $(scipy-version) \citep{scipy2020}" > $@ $(ipydir)/secretstorage-$(secretstorage-version): \ $(ipydir)/jeepney-$(jeepney-version) \ $(ipydir)/cryptography-$(cryptography-version) tarball=secretstorage-$(secretstorage-version).tar.gz $(call import-source, $(secretstorage-url), $(secretstorage-checksum)) - $(call pybuild, tar xf, SecretStorage-$(secretstorage-version), , \ + $(call pybuild, tar -xf, SecretStorage-$(secretstorage-version), , \ SecretStorage $(secretstorage-version)) +$(ipydir)/semantic-version-$(semantic-version-version): \ + $(ipydir)/setuptools-$(setuptools-version) + tarball=semantic-version-$(semantic-version-version).tar.lz + $(call import-source, $(semantic-version-url), \ + $(semantic-version-checksum)) + $(call pybuild, tar -xf, \ + semantic-version-$(semantic-version-version), , \ + Semantic-version $(semantic-version-version), GPEP517) + $(ipydir)/setuptools-$(setuptools-version): \ - $(ibidir)/unzip-$(unzip-version) \ - $(ibidir)/python-$(python-version) - tarball=setuptools-$(setuptools-version).zip + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) + tarball=setuptools-$(setuptools-version).tar.lz $(call import-source, $(setuptools-url), $(setuptools-checksum)) - $(call pybuild, unzip, setuptools-$(setuptools-version), , \ - Setuptools $(setuptools-version)) + $(call pybuild, tar -xf, setuptools-$(setuptools-version), , \ + Setuptools $(setuptools-version), GPEP517) -$(ipydir)/setuptools_scm-$(setuptools_scm-version): \ +$(ipydir)/setuptools-scm-$(setuptools-scm-version): \ $(ipydir)/setuptools-$(setuptools-version) - tarball=setuptools_scm-$(setuptools_scm-version).tar.gz - $(call import-source, $(setuptools_scm-url), $(setuptools_scm-checksum)) - $(call pybuild, tar xf, setuptools_scm-$(setuptools_scm-version), , \ - Setuptools-scm $(setuptools_scm-version)) + tarball=setuptools-scm-$(setuptools-scm-version).tar.lz + $(call import-source, $(setuptools-scm-url), $(setuptools-scm-checksum)) + $(call pybuild, tar -xf, setuptools-scm-$(setuptools-scm-version), , \ + Setuptools-scm $(setuptools-scm-version), GPEP517) + +$(ipydir)/setuptools-rust-$(setuptools-rust-version): \ + $(ipydir)/setuptools-scm-$(setuptools-scm-version) \ + $(ipydir)/semantic-version-$(semantic-version-version) + tarball=setuptools-rust-$(setuptools-rust-version).tar.lz + $(call import-source, $(setuptools-rust-url), \ + $(setuptools-rust-checksum)) + $(call pybuild, tar -xf, setuptools-rust-$(setuptools-rust-version), , \ + Setuptools-rust $(setuptools-rust-version), GPEP517) $(ipydir)/sip_tpv-$(sip_tpv-version): \ $(ipydir)/sympy-$(sympy-version) \ $(ipydir)/astropy-$(astropy-version) tarball=sip_tpv-$(sip_tpv-version).tar.gz $(call import-source, $(sip_tpv-url), $(sip_tpv-checksum)) - $(call pybuild, tar xf, sip_tpv-$(sip_tpv-version), ,) + $(call pybuild, tar -xf, sip_tpv-$(sip_tpv-version), ,) cp $(dtexdir)/sip_tpv.tex $(ictdir)/ echo "sip_tpv $(sip_tpv-version) \citep{sip-tpv}" > $@ - -$(ipydir)/six-$(six-version): $(ipydir)/setuptools-$(setuptools-version) - tarball=six-$(six-version).tar.gz +$(ipydir)/six-$(six-version): \ + $(ipydir)/setuptools-$(setuptools-version) + tarball=six-$(six-version).tar.lz $(call import-source, $(six-url), $(six-checksum)) - $(call pybuild, tar xf, six-$(six-version), , \ - Six $(six-version)) + $(call pybuild, tar -xf, six-$(six-version), , \ + Six $(six-version), GPEP517) + echo "Six $(six-version)" > $@ -$(ipydir)/soupsieve-$(soupsieve-version): $(ipydir)/setuptools-$(setuptools-version) +$(ipydir)/soupsieve-$(soupsieve-version): \ + $(ipydir)/setuptools-$(setuptools-version) \ + $(ipydir)/setuptools-scm-$(setuptools-scm-version) tarball=soupsieve-$(soupsieve-version).tar.gz $(call import-source, $(soupsieve-url), $(soupsieve-checksum)) - $(call pybuild, tar xf, soupsieve-$(soupsieve-version), , \ + $(call pybuild, tar -xf, soupsieve-$(soupsieve-version), , \ SoupSieve $(soupsieve-version)) $(ipydir)/sympy-$(sympy-version): $(ipydir)/mpmath-$(mpmath-version) tarball=sympy-$(sympy-version).tar.gz $(call import-source, $(sympy-url), $(sympy-checksum)) - $(call pybuild, tar xf, sympy-$(sympy-version), ,) + $(call pybuild, tar -xf, sympy-$(sympy-version), ,) cp $(dtexdir)/sympy.tex $(ictdir)/ echo "SymPy $(sympy-version) \citep{sympy}" > $@ $(ipydir)/uncertainties-$(uncertainties-version): $(ipydir)/numpy-$(numpy-version) - tarball=uncertainties-$(uncertainties-version).tar.gz + tarball=uncertainties-$(uncertainties-version).tar.lz $(call import-source, $(uncertainties-url), $(uncertainties-checksum)) - $(call pybuild, tar xf, uncertainties-$(uncertainties-version), , \ + $(call pybuild, tar -xf, uncertainties-$(uncertainties-version), , \ uncertainties $(uncertainties-version)) -$(ipydir)/urllib3-$(urllib3-version): $(ipydir)/setuptools-$(setuptools-version) +$(ipydir)/urllib3-$(urllib3-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/python-installer-$(python-installer-version) tarball=urllib3-$(urllib3-version).tar.gz $(call import-source, $(urllib3-url), $(urllib3-checksum)) - $(call pybuild, tar xf, urllib3-$(urllib3-version), , \ + $(call pybuild, tar -xf, urllib3-$(urllib3-version), , \ Urllib3 $(urllib3-version)) $(ipydir)/webencodings-$(webencodings-version): \ - $(ipydir)/setuptools-$(setuptools-version) + $(ipydir)/setuptools-$(setuptools-version) \ + $(ipydir)/setuptools-scm-$(setuptools-scm-version) tarball=webencodings-$(webencodings-version).tar.gz $(call import-source, $(webencodings-url), $(webencodings-checksum)) - $(call pybuild, tar xf, webencodings-$(webencodings-version), , \ + $(call pybuild, tar -xf, webencodings-$(webencodings-version), , \ Webencodings $(webencodings-version)) + +# As of 2025-02, this is only needed if you want 'wheel' on the command +# line; 'setuptools' provides its own version of wheels. +$(ipydir)/wheel-$(wheel-version): \ + $(ipydir)/gpep517-$(gpep517-version) \ + $(ipydir)/flit-core-$(flit-core-version) \ + $(ipydir)/python-installer-$(python-installer-version) +# tarball=wheel-$(wheel-version).tar.lz + tarball=wheel-$(wheel-version).tar.gz + $(call import-source, $(wheel-url), $(wheel-checksum)) + $(call pybuild, tar -xf, wheel-$(wheel-version), , \ + Wheel $(wheel-version), GPEP517) diff --git a/reproduce/software/make/r-cran.mk b/reproduce/software/make/r-cran.mk new file mode 100644 index 0000000..7c86c23 --- /dev/null +++ b/reproduce/software/make/r-cran.mk @@ -0,0 +1,488 @@ +# Build the project's R (here called R-CRAN) dependencies. +# +# ------------------------------------------------------------------------ +# !!!!! IMPORTANT NOTES !!!!! +# +# This Makefile will be loaded into 'high-level.mk', which is called by the +# './project configure' script. It is not included into the project +# afterwards. +# +# This Makefile contains instructions to build all the R-CRAN-related +# software within the project. +# +# ------------------------------------------------------------------------ +# +# Copyright (C) 2022-2025 Boud Roukema <boud@cosmo.torun.pl> +# Copyright (C) 2022-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# +# This Makefile is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This Makefile is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this Makefile. If not, see <http://www.gnu.org/licenses/>. + + + + + +# BUGS/IMPROVEMENTS +# ----------------- +# +# As of 2021-06-20, the R system is still very new and has not yet +# been tested on non-Debian-derived systems. Please provide bug +# reports ( https://savannah.nongnu.org/task/?15772 ) or propose fixes +# as git pull requests on a public git server (e.g. on a fork of +# https://codeberg.org/boud/maneage_dev ). + + + + + +# R-CRAN enviroment +# ----------------- +# +# It may be necessary to override host-level R-related environment +# variables that interfere with the Maneage-installed R system. +# systems which might interfere. + +# Ideas for which environment variables might create problems +# and might need to be set to be empty here: +# +# https://stat.ethz.ch/R-manual/R-devel/library/base/html/EnvVar.html + +# These first variables should be set automatically when R starts: +#export R_HOME := $(idir)/lib/R +#export R_INCLUDE_DIR := $(idir)/lib/R/include + + + + + +# R-CRAN-specific installation directories. +r-cran-major-version = $(shell echo $(r-cran-version) \ + | awk 'BEGIN{FS="."} \ + {printf "%d.%d\n", $$1, $$2}') + + + + + +# R-CRAN-specific build rules for 'make' +# ====================================== + +# Double-check an already downloaded R source +# ------------------------------------------- +# +# Check that the tarball with the version in +# 'reproduce/software/conf/versions.conf' has the sha512sum (checksum) +# stated 'reproduce/software/conf/checksums.conf'. This does not do any +# security checks; it only checks that the source file package is the one +# that is expected the last time that someone updated these two files for +# the R package of interest. +# +# Calculate the checksum and exit with a non-zero error code if there's a +# mismatch, after informing the user. +# +# Arguments: +# 1: The expected checksum of the tarball. +# +# Necessary shell variables +# 'tarball': This is the name of the actual tarball file without a +# directory. +double-check-R-source = final=$(tdir)/$$tarball; \ + exp_checksum="$(strip $(1))"; \ + if [ x"$$exp_checksum" = x"NO-CHECK-SUM" ]; then \ + result=0; \ + else \ + if type sha512sum > /dev/null 2>/dev/null; then \ + checksum=$$(sha512sum "$$final" | awk '{print $$1}'); \ + if [ x"$$checksum" = x"$$exp_checksum" ]; then \ + result=0; \ + else \ + echo "ERROR: Non-matching checksum: $$final"; \ + echo "Checksum should be: $$exp_checksum"; \ + echo "Checksum is: $$checksum"; \ + result=1; \ + exit 1; \ + fi; \ + else \ + echo "ERROR: sha512sum is unavailable."; \ + exit 1; \ + fi; \ + fi + +# Default 'make' build rules for an CRAN package +# ----------------------------------------------- +# +# The default 'install.packages' function of R only recognizes 'tar.gz' +# tarballs. But Maneage uses '.tar.lz' format for its archival. So to be +# agnostic to the compression algorithm, we will be using 'tar' externally +# (before entering R), then give the un-compressed directory to +# 'install.packages'. +# +# Parameters: +# 1. package name (without 'r-cran', without the version string) +# 2. version string +# 3. checksum of the package +r_cran_build = \ + pkg=$(strip $(1)); \ + version=$(strip $(2)); \ + checksum=$(strip $(3)); \ + $(call import-source, \ + https://cran.r-project.org/src/contrib, \ + $$checksum, \ + $$tarball, \ + https://cran.r-project.org/src/contrib/00Archive/$$pkg); \ + cd "$(ddir)"; \ + tar -xf $(tdir)/$$tarball; \ + unpackdir=$$pkg-$$version; \ + (printf "install.packages(c(\"$(ddir)/$$unpackdir\"),"; \ + printf 'lib="$(ilibrcrandir)",'; \ + printf 'repos=NULL,'; \ + printf 'type="source")\n'; \ + printf 'quit()\n'; \ + printf 'n\n') | R --no-save; \ + rm -rf $$unpackdir; \ + if [ $$pkg = r-pkgconfig ]; then iname=pkgconfig; \ + else iname=$$pkg; fi; \ + if [ -e "$(ilibrcrandir)"/$$iname/Meta/nsInfo.rds ]; then \ + $(call double-check-R-source, $$checksum) \ + && echo "$$pkg $$version" > $@; \ + else \ + printf "r-cran-$$pkg failed: Meta/nsInfo.rds missing.\n"; \ + exit 1; \ + fi + + + + + +# Necessary programs and libraries +# -------------------------------- +# +# While this Makefile is for R programs, in some cases, we need certain +# programs (like R itself), or libraries for the modules. Comment on +# building R without GUI support ('--without-tcltlk') +# +# Tcl/Tk are a set of tools to provide Graphic User Interface (GUI) support +# in some software. But they are not yet natively built within Maneage, +# primarily because we have higher-priority work right now (if anyone is +# interested, they can ofcourse contribute!). GUI tools in general aren't +# high on our priority list right now because they are generally good for +# human interaction (which is contrary to the reproducible philosophy: +# there will always be human-error and frustration, for example in GUI +# tools the best level of reproducibility is statements like this: "move +# your mouse to button XXX, then click on menu YYY and etc"). A robust +# reproducible solution must be done automatically. +# +# If someone wants to use R's GUI functionalities while investigating for +# their analysis, they can do the GUI part on their host OS +# implementation. Later, they can bring the finalized source into Maneage +# to be automatically run in Maneage. This will also be the recommended way +# to deal with GUI tools later when we do install them within Maneage. +$(ibidir)/r-cran-$(r-cran-version): \ + $(itidir)/texlive \ + $(ibidir)/icu-$(icu-version) \ + $(ibidir)/pcre-$(pcre-version) \ + $(ibidir)/cairo-$(cairo-version) \ + $(ibidir)/libpng-$(libpng-version) \ + $(ibidir)/libjpeg-$(libjpeg-version) \ + $(ibidir)/libtiff-$(libtiff-version) \ + $(ibidir)/libpaper-$(libpaper-version) + +# Prepare the tarball, unpack it and enter the directory. + tarball=R-$(r-cran-version).tar.lz + $(call import-source, $(r-cran-url), $(r-cran-checksum)) + cd $(ddir) + tar -xf $(tdir)/$$tarball + unpackdir=R-$(r-cran-version) + cd $$unpackdir + $(shsrcdir)/prep-source.sh $(ibdir) + +# We need to manually remove the lines with '~autodetect~', they +# cause the configure script to crash in version 4.0.2. They are used +# in relation to Java, and we don't use Java anyway. + sed -i -e '/\~autodetect\~/ s/^/#/g' configure + export R_SHELL=$(SHELL) + ./configure --prefix=$(idir) \ + --without-x \ + --with-pcre1 \ + --disable-java \ + --with-readline \ + --without-tcltk \ + --disable-openmp + make -j$(numthreads) + make install + cd .. + rm -rf R-$(r-cran-version) + cp -p $(dtexdir)/r-cran.tex $(ictdir)/ + echo "R $(r-cran-version) \citep{RIhakaGentleman1996}" > $@ + + + + + +# Non-Maneage'd tarballs +# ---------------------- +# +# CRAN tarballs differ in two aspects from Maneage'd tarballs: +# - CRAN uses '.tar.gz', while Maneage uses 'tar.lz'. +# - CRAN uses 'name_version', while Maneage uses 'name-version'. +# +# So if you add a new R package, or update the version of an existing one +# (that is not yet in Maneage's archive), you need to use the CRAN naming +# format for the 'tarball' variable. + + + + + +# R-CRAN modules +# --------------- +# +# The rules for downloading, compiling and installing any R-CRAN modules +# that are needed should be provided here. Each target (before the colon) +# is first shown with its dependence on prerequisites (which are listed +# after the colon. The default macro 'r_cran_build' will install the +# package without checking on dependencies. + +$(ircrandir)/r-cran-cli-$(r-cran-cli-version): \ + $(ibidir)/r-cran-$(r-cran-version) \ + $(ircrandir)/r-cran-glue-$(r-cran-glue-version) + tarball=cli-$(r-cran-cli-version).tar.lz + $(call r_cran_build, cli, $(r-cran-cli-version), \ + $(r-cran-cli-checksum)) + +$(ircrandir)/r-cran-colorspace-$(r-cran-colorspace-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=colorspace-$(r-cran-colorspace-version).tar.lz + $(call r_cran_build, colorspace, $(r-cran-colorspace-version), \ + $(r-cran-colorspace-checksum)) + +$(ircrandir)/r-cran-cowplot-$(r-cran-cowplot-version): \ + $(ibidir)/r-cran-$(r-cran-version) \ + $(ircrandir)/r-cran-rlang-$(r-cran-rlang-version) \ + $(ircrandir)/r-cran-gtable-$(r-cran-gtable-version) \ + $(ircrandir)/r-cran-scales-$(r-cran-scales-version) \ + $(ircrandir)/r-cran-ggplot2-$(r-cran-ggplot2-version) + tarball=cowplot-$(r-cran-cowplot-version).tar.lz + $(call r_cran_build, cowplot, $(r-cran-cowplot-version), \ + $(r-cran-cowplot-checksum)) + +$(ircrandir)/r-cran-crayon-$(r-cran-crayon-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=crayon-$(r-cran-crayon-version).tar.lz + $(call r_cran_build, crayon, $(r-cran-crayon-version), \ + $(r-cran-crayon-checksum)) + +$(ircrandir)/r-cran-digest-$(r-cran-digest-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=digest-$(r-cran-digest-version).tar.lz + $(call r_cran_build, digest, $(r-cran-digest-version), \ + $(r-cran-digest-checksum)) + +$(ircrandir)/r-cran-farver-$(r-cran-farver-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=farver-$(r-cran-farver-version).tar.lz + $(call r_cran_build, farver, $(r-cran-farver-version), \ + $(r-cran-farver-checksum)) + +$(ircrandir)/r-cran-ellipsis-$(r-cran-ellipsis-version): \ + $(ibidir)/r-cran-$(r-cran-version) \ + $(ircrandir)/r-cran-rlang-$(r-cran-rlang-version) + tarball=ellipsis-$(r-cran-ellipsis-version).tar.lz + $(call r_cran_build, ellipsis, $(r-cran-ellipsis-version), \ + $(r-cran-ellipsis-checksum)) + +$(ircrandir)/r-cran-fansi-$(r-cran-fansi-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=fansi-$(r-cran-fansi-version).tar.lz + $(call r_cran_build, fansi, $(r-cran-fansi-version), \ + $(r-cran-fansi-checksum)) + +$(ircrandir)/r-cran-ggplot2-$(r-cran-ggplot2-version): \ + $(ibidir)/r-cran-$(r-cran-version) \ + $(ircrandir)/r-cran-glue-$(r-cran-glue-version) \ + $(ircrandir)/r-cran-mgcv-$(r-cran-mgcv-version) \ + $(ircrandir)/r-cran-MASS-$(r-cran-MASS-version) \ + $(ircrandir)/r-cran-rlang-$(r-cran-rlang-version) \ + $(ircrandir)/r-cran-withr-$(r-cran-withr-version) \ + $(ircrandir)/r-cran-digest-$(r-cran-digest-version) \ + $(ircrandir)/r-cran-gtable-$(r-cran-gtable-version) \ + $(ircrandir)/r-cran-scales-$(r-cran-scales-version) \ + $(ircrandir)/r-cran-tibble-$(r-cran-tibble-version) \ + $(ircrandir)/r-cran-isoband-$(r-cran-isoband-version) + tarball=ggplot2-$(r-cran-ggplot2-version).tar.lz + $(call r_cran_build, ggplot2, $(r-cran-ggplot2-version), \ + $(r-cran-ggplot2-checksum)) + +$(ircrandir)/r-cran-glue-$(r-cran-glue-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=glue-$(r-cran-glue-version).tar.lz + $(call r_cran_build, glue, $(r-cran-glue-version), \ + $(r-cran-glue-checksum)) + +$(ircrandir)/r-cran-gridExtra-$(r-cran-gridExtra-version): \ + $(ibidir)/r-cran-$(r-cran-version) \ + $(ircrandir)/r-cran-gtable-$(r-cran-gtable-version) + tarball=gridExtra-$(r-cran-gridExtra-version).tar.lz + $(call r_cran_build, gridExtra, $(r-cran-gridExtra-version), \ + $(r-cran-gridExtra-checksum)) + +$(ircrandir)/r-cran-gtable-$(r-cran-gtable-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=gtable-$(r-cran-gtable-version).tar.lz + $(call r_cran_build, gtable, $(r-cran-gtable-version), \ + $(r-cran-gtable-checksum)) + +$(ircrandir)/r-cran-isoband-$(r-cran-isoband-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=isoband-$(r-cran-isoband-version).tar.lz + $(call r_cran_build, isoband, $(r-cran-isoband-version), \ + $(r-cran-isoband-checksum)) + +$(ircrandir)/r-cran-labeling-$(r-cran-labeling-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=labeling-$(r-cran-labeling-version).tar.lz + $(call r_cran_build, labeling, $(r-cran-labeling-version), \ + $(r-cran-labeling-checksum)) + +$(ircrandir)/r-cran-lifecycle-$(r-cran-lifecycle-version): \ + $(ibidir)/r-cran-$(r-cran-version) \ + $(ircrandir)/r-cran-glue-$(r-cran-glue-version) \ + $(ircrandir)/r-cran-rlang-$(r-cran-rlang-version) + tarball=lifecycle-$(r-cran-lifecycle-version).tar.lz + $(call r_cran_build, lifecycle, $(r-cran-lifecycle-version), \ + $(r-cran-lifecycle-checksum)) + +$(ircrandir)/r-cran-magrittr-$(r-cran-magrittr-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=magrittr-$(r-cran-magrittr-version).tar.lz + $(call r_cran_build, magrittr, $(r-cran-magrittr-version), \ + $(r-cran-magrittr-checksum)) + +$(ircrandir)/r-cran-MASS-$(r-cran-MASS-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=MASS-$(r-cran-MASS-version).tar.lz + $(call r_cran_build, MASS, $(r-cran-MASS-version), \ + $(r-cran-MASS-checksum)) + +# The base R-2.0.4 install includes nlme and Matrix. +# https://cran.r-project.org/web/packages/mgcv/index.html +$(ircrandir)/r-cran-mgcv-$(r-cran-mgcv-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=mgcv-$(r-cran-mgcv-version).tar.lz + $(call r_cran_build, mgcv, $(r-cran-mgcv-version), \ + $(r-cran-mgcv-checksum)) + +$(ircrandir)/r-cran-munsell-$(r-cran-munsell-version): \ + $(ibidir)/r-cran-$(r-cran-version) \ + $(ircrandir)/r-cran-colorspace-$(r-cran-colorspace-version) + tarball=munsell-$(r-cran-munsell-version).tar.lz + $(call r_cran_build, munsell, $(r-cran-munsell-version), \ + $(r-cran-munsell-checksum)) + +#TODO: https://cran.r-project.org/web/packages/pillar/index.html +$(ircrandir)/r-cran-pillar-$(r-cran-pillar-version): \ + $(ibidir)/r-cran-$(r-cran-version) \ + $(ircrandir)/r-cran-cli-$(r-cran-cli-version) \ + $(ircrandir)/r-cran-utf8-$(r-cran-utf8-version) \ + $(ircrandir)/r-cran-fansi-$(r-cran-fansi-version) \ + $(ircrandir)/r-cran-rlang-$(r-cran-rlang-version) \ + $(ircrandir)/r-cran-vctrs-$(r-cran-vctrs-version) \ + $(ircrandir)/r-cran-crayon-$(r-cran-crayon-version) \ + $(ircrandir)/r-cran-ellipsis-$(r-cran-ellipsis-version) \ + $(ircrandir)/r-cran-lifecycle-$(r-cran-lifecycle-version) + tarball=pillar-$(r-cran-pillar-version).tar.lz + $(call r_cran_build, pillar, $(r-cran-pillar-version), \ + $(r-cran-pillar-checksum)) + +# Since we have other software packages with the name 'pkgconfig', to avoid +# confusion with those tarballs, we have put a 'r-' prefix in the tarball +# name. If you want to use the CRAN tarball, please correct the name +# accordingly (as described in the comment above this group of rules). +$(ircrandir)/r-cran-pkgconfig-$(r-cran-pkgconfig-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=r-pkgconfig-$(r-cran-pkgconfig-version).tar.lz + $(call r_cran_build, r-pkgconfig, $(r-cran-pkgconfig-version), \ + $(r-cran-pkgconfig-checksum)) + +$(ircrandir)/r-cran-RColorBrewer-$(r-cran-RColorBrewer-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=RColorBrewer-$(r-cran-RColorBrewer-version).tar.lz + $(call r_cran_build, RColorBrewer, $(r-cran-RColorBrewer-version), \ + $(r-cran-RColorBrewer-checksum)) + +$(ircrandir)/r-cran-R6-$(r-cran-R6-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=R6-$(r-cran-R6-version).tar.lz + $(call r_cran_build, R6, $(r-cran-R6-version), $(r-cran-R6-checksum)) + +$(ircrandir)/r-cran-rlang-$(r-cran-rlang-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=rlang-$(r-cran-rlang-version).tar.lz + $(call r_cran_build, rlang, $(r-cran-rlang-version), \ + $(r-cran-rlang-checksum)) + +# https://cran.r-project.org/web/packages/scales/index.html +$(ircrandir)/r-cran-scales-$(r-cran-scales-version): \ + $(ibidir)/r-cran-$(r-cran-version) \ + $(ircrandir)/r-cran-R6-$(r-cran-R6-version) \ + $(ircrandir)/r-cran-farver-$(r-cran-farver-version) \ + $(ircrandir)/r-cran-munsell-$(r-cran-munsell-version) \ + $(ircrandir)/r-cran-labeling-$(r-cran-labeling-version) \ + $(ircrandir)/r-cran-lifecycle-$(r-cran-lifecycle-version) \ + $(ircrandir)/r-cran-viridisLite-$(r-cran-viridisLite-version) \ + $(ircrandir)/r-cran-RColorBrewer-$(r-cran-RColorBrewer-version) + tarball=scales-$(r-cran-scales-version).tar.lz + $(call r_cran_build, scales, $(r-cran-scales-version), \ + $(r-cran-scales-checksum)) + +#https://cran.r-project.org/web/packages/tibble/index.html +$(ircrandir)/r-cran-tibble-$(r-cran-tibble-version): \ + $(ibidir)/r-cran-$(r-cran-version) \ + $(ircrandir)/r-cran-fansi-$(r-cran-fansi-version) \ + $(ircrandir)/r-cran-rlang-$(r-cran-rlang-version) \ + $(ircrandir)/r-cran-vctrs-$(r-cran-vctrs-version) \ + $(ircrandir)/r-cran-pillar-$(r-cran-pillar-version) \ + $(ircrandir)/r-cran-ellipsis-$(r-cran-ellipsis-version) \ + $(ircrandir)/r-cran-magrittr-$(r-cran-magrittr-version) \ + $(ircrandir)/r-cran-lifecycle-$(r-cran-lifecycle-version) \ + $(ircrandir)/r-cran-pkgconfig-$(r-cran-pkgconfig-version) + tarball=tibble-$(r-cran-tibble-version).tar.lz + $(call r_cran_build, tibble, $(r-cran-tibble-version), \ + $(r-cran-tibble-checksum)) + +$(ircrandir)/r-cran-utf8-$(r-cran-utf8-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=utf8-$(r-cran-utf8-version).tar.lz + $(call r_cran_build, utf8, $(r-cran-utf8-version), \ + $(r-cran-utf8-checksum)) + +$(ircrandir)/r-cran-vctrs-$(r-cran-vctrs-version): \ + $(ibidir)/r-cran-$(r-cran-version) \ + $(ircrandir)/r-cran-glue-$(r-cran-glue-version) \ + $(ircrandir)/r-cran-rlang-$(r-cran-rlang-version) \ + $(ircrandir)/r-cran-ellipsis-$(r-cran-ellipsis-version) + tarball=vctrs-$(r-cran-vctrs-version).tar.lz + $(call r_cran_build, vctrs, $(r-cran-vctrs-version), \ + $(r-cran-vctrs-checksum)) + +$(ircrandir)/r-cran-viridisLite-$(r-cran-viridisLite-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=viridisLite-$(r-cran-viridisLite-version).tar.lz + $(call r_cran_build, viridisLite, $(r-cran-viridisLite-version), \ + $(r-cran-viridisLite-checksum)) + +$(ircrandir)/r-cran-withr-$(r-cran-withr-version): \ + $(ibidir)/r-cran-$(r-cran-version) + tarball=withr-$(r-cran-withr-version).tar.lz + $(call r_cran_build, withr, $(r-cran-withr-version), \ + $(r-cran-withr-checksum)) diff --git a/reproduce/software/make/xorg.mk b/reproduce/software/make/xorg.mk index 3178cb4..864c32a 100644 --- a/reproduce/software/make/xorg.mk +++ b/reproduce/software/make/xorg.mk @@ -4,7 +4,7 @@ # !!!!! IMPORTANT NOTES !!!!! # # This Makefile will be loaded into 'high-level.mk', which is called by the -# `./project configure' script. It is not included into the project +# './project configure' script. It is not included into the project # afterwards. # # This Makefile contains instructions to build all the Xorg-related @@ -14,8 +14,8 @@ # # ------------------------------------------------------------------------ # -# Copyright (C) 2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> -# Copyright (C) 2021 Raul Infante-Sainz <infantesainz@gmail.com> +# Copyright (C) 2021-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2021-2025 Raul Infante-Sainz <infantesainz@gmail.com> # # This Makefile is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -57,7 +57,7 @@ $(idir)/etc/profile.d/xorg.sh: | $(idir)/etc/profile.d $(ibidir)/util-macros-$(util-macros-version): \ $(idir)/etc/profile.d/xorg.sh \ $(ibidir)/automake-$(automake-version) - tarball=util-macros-$(util-macros-version).tar.bz2 + tarball=util-macros-$(util-macros-version).tar.lz $(call import-source, $(util-macros-url), $(util-macros-checksum)) $(call gbuild, util-macros-$(util-macros-version),,$(XORG_CONFIG),V=1) echo "util-macros (Xorg) $(util-macros-version)" > $@ @@ -65,21 +65,21 @@ $(ibidir)/util-macros-$(util-macros-version): \ # Necessaary headers to define the Xorg protocols. $(ibidir)/xorgproto-$(xorgproto-version): \ $(ibidir)/util-macros-$(util-macros-version) - tarball=xorgproto-$(xorgproto-version).tar.bz2 + tarball=xorgproto-$(xorgproto-version).tar.lz $(call import-source, $(xorg-proto-url), $(xorgproto-checksum)) $(call gbuild, xorgproto-$(xorgproto-version),,$(XORG_CONFIG),V=1) echo "xorgproto $(xorgproto-version)" > $@ # Necessaary headers to define the Xorg protocols. $(ibidir)/libxau-$(libxau-version): $(ibidir)/xorgproto-$(xorgproto-version) - tarball=libXau-$(libxau-version).tar.bz2 + tarball=libXau-$(libxau-version).tar.lz $(call import-source, $(libaxu-url), $(libxau-checksum)) $(call gbuild, libXau-$(libxau-version),,$(XORG_CONFIG), V=1) echo "libXau (Xorg) $(libxau-version)" > $@ # Library implementing the X Display Manager Control Protocol. $(ibidir)/libxdmcp-$(libxdmcp-version): $(ibidir)/libxau-$(libxau-version) - tarball=libXdmcp-$(libxdmcp-version).tar.bz2 + tarball=libXdmcp-$(libxdmcp-version).tar.lz $(call import-source, $(libxdmcp-url), $(libxdmcp-checksum)) $(call gbuild, libXdmcp-$(libxdmcp-version),,$(XORG_CONFIG), V=1) echo "libXdmcp (Xorg) $(libxdmcp-version)" > $@ @@ -88,7 +88,7 @@ $(ibidir)/libxdmcp-$(libxdmcp-version): $(ibidir)/libxau-$(libxau-version) $(ibidir)/xcb-proto-$(xcb-proto-version): \ $(ibidir)/python-$(python-version) \ $(ibidir)/libxml2-$(libxml2-version) - tarball=xcb-proto-$(xcb-proto-version).tar.xz + tarball=xcb-proto-$(xcb-proto-version).tar.lz $(call import-source, $(xcb-proto-url), $(xcb-proto-checksum)) $(call gbuild, xcb-proto-$(xcb-proto-version),,$(XORG_CONFIG), V=1) echo "XCB-proto (Xorg) $(xcb-proto-version)" > $@ @@ -98,7 +98,7 @@ $(ibidir)/libxcb-$(libxcb-version): \ $(ibidir)/libxdmcp-$(libxdmcp-version) \ $(ibidir)/xcb-proto-$(xcb-proto-version) \ $(ibidir)/libpthread-stubs-$(libpthread-stubs-version) - tarball=libxcb-$(libxcb-version).tar.xz + tarball=libxcb-$(libxcb-version).tar.lz $(call import-source, $(libxcb-url), $(libxcb-checksum)) $(call gbuild, libxcb-$(libxcb-version),, \ $(XORG_CONFIG) --without-doxygen, \ @@ -107,7 +107,7 @@ $(ibidir)/libxcb-$(libxcb-version): \ $(ibidir)/libpthread-stubs-$(libpthread-stubs-version): \ $(ibidir)/automake-$(automake-version) - tarball=libpthread-stubs-$(libpthread-stubs-version).tar.gz + tarball=libpthread-stubs-$(libpthread-stubs-version).tar.lz $(call import-source, $(libpthread-stubs-url), $(libpthread-stubs-checksum)) $(call gbuild, libpthread-stubs-$(libpthread-stubs-version),, V=1) echo "libpthread-stubs (Xorg) $(libpthread-stubs-version)" > $@ @@ -116,21 +116,22 @@ $(ibidir)/libpthread-stubs-$(libpthread-stubs-version): \ $(ibidir)/fontconfig-$(fontconfig-version): \ $(ibidir)/gperf-$(gperf-version) \ $(ibidir)/expat-$(expat-version) \ + $(ibidir)/python-$(python-version) \ $(ibidir)/libxml2-$(libxml2-version) \ $(ibidir)/freetype-$(freetype-version) \ $(ibidir)/util-linux-$(util-linux-version) - # Import the source. - tarball=fontconfig-$(fontconfig-version).tar.bz2 +# Import the source. + tarball=fontconfig-$(fontconfig-version).tar.lz $(call import-source, $(fontconfig-url), $(fontconfig-checksum)) - # Add the extra environment variables for using 'libuuid' of - # 'util-linux'. +# Add the extra environment variables for using 'libuuid' of +# 'util-linux'. ulidir=$(idir)/util-linux export LDFLAGS="-L$$ulidir/lib $(LDFLAGS)" export CPPFLAGS="-I$$ulidir/include $(CPPFLAGS)" export PKG_CONFIG_PATH=$(PKG_CONFIG_PATH):$$ulidir/lib/pkgconfig - # Build it. +# Build it. $(call gbuild, fontconfig-$(fontconfig-version),, \ $(XORG_CONFIG) --sysconfdir=$(idir)/etc \ --disable-docs, V=1 -j$(numthreads)) @@ -139,27 +140,27 @@ $(ibidir)/fontconfig-$(fontconfig-version): \ $(ibidir)/xtrans-$(xtrans-version): \ $(ibidir)/libxcb-$(libxcb-version) \ $(ibidir)/fontconfig-$(fontconfig-version) - tarball=xtrans-$(xtrans-version).tar.bz2 + tarball=xtrans-$(xtrans-version).tar.lz $(call import-source, $(xtrans-url), $(xtrans-checksum)) $(call gbuild, xtrans-$(xtrans-version),,$(XORG_CONFIG), V=1) echo "xtrans (Xorg) $(xtrans-version)" > $@ $(ibidir)/libx11-$(libx11-version): $(ibidir)/xtrans-$(xtrans-version) - tarball=libX11-$(libx11-version).tar.bz2 + tarball=libX11-$(libx11-version).tar.lz $(call import-source, $(libx11-url), $(libx11-checksum)) $(call gbuild, libX11-$(libx11-version),,$(XORG_CONFIG), \ -j$(numthreads) V=1) echo "X11 library $(libx11-version)" > $@ $(ibidir)/libxext-$(libxext-version): $(ibidir)/libx11-$(libx11-version) - tarball=libXext-$(libxext-version).tar.bz2 + tarball=libXext-$(libxext-version).tar.lz $(call import-source, $(libxext-url), $(libxext-checksum)) $(call gbuild, libXext-$(libxext-version),,$(XORG_CONFIG), \ -j$(numthreads) V=1) echo "libXext $(libxext-version)" > $@ $(ibidir)/libice-$(libice-version): $(ibidir)/libxext-$(libxext-version) - tarball=libICE-$(libice-version).tar.bz2 + tarball=libICE-$(libice-version).tar.lz $(call import-source, $(libice-url), $(libice-checksum)) $(call gbuild, libICE-$(libice-version),, \ $(XORG_CONFIG) ICE_LIBS=-lpthread, \ @@ -167,14 +168,14 @@ $(ibidir)/libice-$(libice-version): $(ibidir)/libxext-$(libxext-version) echo "libICE $(libice-version)" > $@ $(ibidir)/libsm-$(libsm-version): $(ibidir)/libice-$(libice-version) - tarball=libSM-$(libsm-version).tar.bz2 + tarball=libSM-$(libsm-version).tar.lz $(call import-source, $(libsm-url), $(libsm-checksum)) $(call gbuild, libSM-$(libsm-version),, \ $(XORG_CONFIG), -j$(numthreads) V=1) echo "libSM $(libsm-version)" > $@ $(ibidir)/libxt-$(libxt-version): $(ibidir)/libsm-$(libsm-version) - tarball=libXt-$(libxt-version).tar.bz2 + tarball=libXt-$(libxt-version).tar.lz $(call import-source, $(libxt-url), $(libxt-checksum)) $(call gbuild, libXt-$(libxt-version),, \ $(XORG_CONFIG), -j$(numthreads) V=1) diff --git a/reproduce/software/patches/README.md b/reproduce/software/patches/README.md new file mode 100644 index 0000000..804d7ec --- /dev/null +++ b/reproduce/software/patches/README.md @@ -0,0 +1,6 @@ +Patches to apply to software source +=================================== + +This directory is for keeping patches that may be necessary for some +versions of some software. So it may be empty in some instances (when no +software in that commit needs a patch). diff --git a/reproduce/software/patches/README.xz-5.2.5_src_liblzma_liblzma.map b/reproduce/software/patches/README.xz-5.2.5_src_liblzma_liblzma.map new file mode 100644 index 0000000..15e2787 --- /dev/null +++ b/reproduce/software/patches/README.xz-5.2.5_src_liblzma_liblzma.map @@ -0,0 +1,8 @@ +2022-07-14 B Roukema +xz-5.2.5_src_liblzma_liblzma.map is a patched version of xz-5.2.5/src/liblzma/liblzma.map +based on discussion at https://savannah.nongnu.org/bugs/index.php?62700 +and https://github.com/easybuilders/easybuild-easyconfigs/issues/14991 and +https://raw.githubusercontent.com/easybuilders/easybuild-easyconfigs/bcebb3320ffb63f9804ca8d4d64d1822ec7c9792/easybuild/easyconfigs/x/XZ/XZ-5.2.5_compat-libs.patch + +Since we don't yet have 'patch' in 'basic.mk', this file has to be copied +into place rather than patched. diff --git a/reproduce/software/patches/util-linux-2.38.1-macos.patch b/reproduce/software/patches/util-linux-2.38.1-macos.patch new file mode 100644 index 0000000..d89422d --- /dev/null +++ b/reproduce/software/patches/util-linux-2.38.1-macos.patch @@ -0,0 +1,114 @@ +From 3671d4a878fb58aa953810ecf9af41809317294f Mon Sep 17 00:00:00 2001 +From: Karel Zak <kzak@redhat.com> +Date: Mon, 4 Apr 2022 13:17:44 +0200 +Subject: [PATCH] build-sys: improve dependences for lib/procfs.c + +* add #ifdefs when use statfs() and include statfs.h or vfs.h + +Addresses: https://github.com/util-linux/util-linux/issues/1634 +Signed-off-by: Karel Zak <kzak@redhat.com> +--- + configure.ac | 2 ++ + include/statfs_magic.h | 4 +++- + lib/procfs.c | 15 +++++++++++++-- + misc-utils/hardlink.c | 2 +- + 4 files changed, 19 insertions(+), 4 deletions(-) + +diff --git a/configure.ac b/configure.ac +index 3fba95336a5b60b277d90615089093901fec723e..11c86b9a70809cbade98539b17fb2e1186984ebb 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -346,6 +346,7 @@ AC_CHECK_HEADERS([ \ + sys/socket.h \ + sys/sockio.h \ + sys/stat.h \ ++ sys/statfs.h \ + sys/swap.h \ + sys/syscall.h \ + sys/sysmacros.h \ +@@ -355,6 +356,7 @@ AC_CHECK_HEADERS([ \ + sys/types.h \ + sys/ucred.h \ + sys/un.h \ ++ sys/vfs.h \ + sys/xattr.h \ + unistd.h \ + utmp.h \ +diff --git a/include/statfs_magic.h b/include/statfs_magic.h +index b6b0225e865aeb3c04610ecab177f3e95f1cc61a..67ad0af2595862795299f0bcdb7baa5778742185 100644 +--- a/include/statfs_magic.h ++++ b/include/statfs_magic.h +@@ -1,7 +1,9 @@ + #ifndef UTIL_LINUX_STATFS_MAGIC_H + #define UTIL_LINUX_STATFS_MAGIC_H + +-#include <sys/statfs.h> ++#ifdef HAVE_SYS_STATFS_H ++# include <sys/statfs.h> ++#endif + + /* + * If possible then don't depend on internal libc __SWORD_TYPE type. +diff --git a/lib/procfs.c b/lib/procfs.c +index 4d6d25b6d78eba56aada4cd9acacee4d2e69f656..0d58857c83b378cb1cb3baf00f88f992ff8dce1e 100644 +--- a/lib/procfs.c ++++ b/lib/procfs.c +@@ -6,9 +6,13 @@ + */ + #include <ctype.h> + #include <unistd.h> +-#include <sys/vfs.h> + #include <errno.h> + ++#ifdef HAVE_SYS_VFS_H ++# include <sys/vfs.h> ++# include "statfs_magic.h" ++#endif ++ + #include "c.h" + #include "pathnames.h" + #include "procfs.h" +@@ -16,7 +20,6 @@ + #include "all-io.h" + #include "debug.h" + #include "strutils.h" +-#include "statfs_magic.h" + + static void procfs_process_deinit_path(struct path_cxt *pc); + +@@ -356,6 +359,7 @@ int procfs_dirent_match_name(DIR *procfs, struct dirent *d, const char *name) + return 0; + } + ++#ifdef HAVE_SYS_VFS_H + /* checks if fd is file in a procfs; + * returns 1 if true, 0 if false or couldn't determine */ + int fd_is_procfs(int fd) +@@ -375,7 +379,14 @@ int fd_is_procfs(int fd) + } while (ret != 0); + + return st.f_type == STATFS_PROC_MAGIC; ++ return 0; + } ++#else ++int fd_is_procfs(int fd __attribute__((__unused__))) ++{ ++ return 0; ++} ++#endif + + static char *strdup_procfs_file(pid_t pid, const char *name) + { +diff --git a/misc-utils/hardlink.c b/misc-utils/hardlink.c +index dd55af12aab7903e6025d0a39ea020c2400300e8..08af2882c1359e41fba37377a08c9a18647b0f9f 100644 +--- a/misc-utils/hardlink.c ++++ b/misc-utils/hardlink.c +@@ -38,7 +38,7 @@ + #include <ctype.h> /* tolower() */ + #include <sys/ioctl.h> + +-#if defined(HAVE_LINUX_FIEMAP_H) ++#if defined(HAVE_LINUX_FIEMAP_H) && defined(HAVE_SYS_VFS_H) + # include <linux/fs.h> + # include <linux/fiemap.h> + # ifdef FICLONE diff --git a/reproduce/software/patches/valgrind-3.15.0-mpi-fix1.patch b/reproduce/software/patches/valgrind-3.15.0-mpi-fix1.patch deleted file mode 100644 index 94dcab5..0000000 --- a/reproduce/software/patches/valgrind-3.15.0-mpi-fix1.patch +++ /dev/null @@ -1,37 +0,0 @@ ---- - mpi/libmpiwrap.c | 12 +++++++++++- - 1 file changed, 11 insertions(+), 1 deletion(-) - Patch by Samuel Thibault: - https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=946329;msg=10 - and hacked further by Boud Roukema 2020-05-10. ---- a/mpi/libmpiwrap.c -+++ b/mpi/libmpiwrap.c -@@ -278,8 +278,12 @@ static void showTy ( FILE* f, MPI_Dataty - else if (ty == MPI_LONG_INT) fprintf(f,"LONG_INT"); - else if (ty == MPI_SHORT_INT) fprintf(f,"SHORT_INT"); - else if (ty == MPI_2INT) fprintf(f,"2INT"); -+# if defined(MPI_UB_ENABLED_IN_MPI1) - else if (ty == MPI_UB) fprintf(f,"UB"); -+# endif -+# if defined(MPI_LB_ENABLED_IN_MPI1) - else if (ty == MPI_LB) fprintf(f,"LB"); -+# endif - # if defined(MPI_WCHAR) - else if (ty == MPI_WCHAR) fprintf(f,"WCHAR"); - # endif -@@ -733,8 +737,14 @@ void walk_type ( void(*f)(void*,long), c - f(base + offsetof(Ty,loc), sizeof(int)); - return; - } -- if (ty == MPI_LB || ty == MPI_UB) -+#if defined(MPI_LB_ENABLED_IN_MPI1) -+ if (ty == MPI_LB) -+ return; /* have zero size, so nothing needs to be done */ -+#endif -+#if defined(MPI_UB_ENABLED_IN_MPI1) -+ if (ty == MPI_UB) - return; /* have zero size, so nothing needs to be done */ -+#endif - goto unhandled; - /*NOTREACHED*/ - } diff --git a/reproduce/software/patches/valgrind-3.15.0-mpi-fix2.patch b/reproduce/software/patches/valgrind-3.15.0-mpi-fix2.patch deleted file mode 100644 index 12b50a2..0000000 --- a/reproduce/software/patches/valgrind-3.15.0-mpi-fix2.patch +++ /dev/null @@ -1,23 +0,0 @@ -Index: valgrind-3.15.0/mpi/Makefile.am -=================================================================== - Patch by Samuel Thibault: - https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=946329;msg=22 ---- valgrind-3.15.0.orig/mpi/Makefile.am -+++ valgrind-3.15.0/mpi/Makefile.am -@@ -42,14 +42,14 @@ libmpiwrap_@VGCONF_ARCH_PRI@_@VGCONF_OS@ - libmpiwrap_@VGCONF_ARCH_PRI@_@VGCONF_OS@_so_CPPFLAGS = -I$(top_srcdir)/include - libmpiwrap_@VGCONF_ARCH_PRI@_@VGCONF_OS@_so_CFLAGS = \ - $(CFLAGS_MPI) $(MPI_FLAG_M3264_PRI) -Wno-deprecated-declarations --libmpiwrap_@VGCONF_ARCH_PRI@_@VGCONF_OS@_so_LDFLAGS = $(LDFLAGS_MPI) -+libmpiwrap_@VGCONF_ARCH_PRI@_@VGCONF_OS@_so_LDADD = $(LDFLAGS_MPI) - endif - if BUILD_MPIWRAP_SEC - libmpiwrap_@VGCONF_ARCH_SEC@_@VGCONF_OS@_so_SOURCES = libmpiwrap.c - libmpiwrap_@VGCONF_ARCH_SEC@_@VGCONF_OS@_so_CPPFLAGS = -I$(top_srcdir)/include - libmpiwrap_@VGCONF_ARCH_SEC@_@VGCONF_OS@_so_CFLAGS = \ - $(CFLAGS_MPI) $(MPI_FLAG_M3264_SEC) -Wno-deprecated-declarations --libmpiwrap_@VGCONF_ARCH_SEC@_@VGCONF_OS@_so_LDFLAGS = $(LDFLAGS_MPI) -+libmpiwrap_@VGCONF_ARCH_SEC@_@VGCONF_OS@_so_LDADD = $(LDFLAGS_MPI) - endif - - #---------------------------------------------------------------------------- diff --git a/reproduce/software/patches/xz-5.2.5_src_liblzma_liblzma.map b/reproduce/software/patches/xz-5.2.5_src_liblzma_liblzma.map new file mode 100644 index 0000000..8df17a9 --- /dev/null +++ b/reproduce/software/patches/xz-5.2.5_src_liblzma_liblzma.map @@ -0,0 +1,115 @@ +XZ_5.0 { +global: + lzma_alone_decoder; + lzma_alone_encoder; + lzma_auto_decoder; + lzma_block_buffer_bound; + lzma_block_buffer_decode; + lzma_block_buffer_encode; + lzma_block_compressed_size; + lzma_block_decoder; + lzma_block_encoder; + lzma_block_header_decode; + lzma_block_header_encode; + lzma_block_header_size; + lzma_block_total_size; + lzma_block_unpadded_size; + lzma_check_is_supported; + lzma_check_size; + lzma_code; + lzma_crc32; + lzma_crc64; + lzma_easy_buffer_encode; + lzma_easy_decoder_memusage; + lzma_easy_encoder; + lzma_easy_encoder_memusage; + lzma_end; + lzma_filter_decoder_is_supported; + lzma_filter_encoder_is_supported; + lzma_filter_flags_decode; + lzma_filter_flags_encode; + lzma_filter_flags_size; + lzma_filters_copy; + lzma_filters_update; + lzma_get_check; + lzma_index_append; + lzma_index_block_count; + lzma_index_buffer_decode; + lzma_index_buffer_encode; + lzma_index_cat; + lzma_index_checks; + lzma_index_decoder; + lzma_index_dup; + lzma_index_encoder; + lzma_index_end; + lzma_index_file_size; + lzma_index_hash_append; + lzma_index_hash_decode; + lzma_index_hash_end; + lzma_index_hash_init; + lzma_index_hash_size; + lzma_index_init; + lzma_index_iter_init; + lzma_index_iter_locate; + lzma_index_iter_next; + lzma_index_iter_rewind; + lzma_index_memusage; + lzma_index_memused; + lzma_index_size; + lzma_index_stream_count; + lzma_index_stream_flags; + lzma_index_stream_padding; + lzma_index_stream_size; + lzma_index_total_size; + lzma_index_uncompressed_size; + lzma_lzma_preset; + lzma_memlimit_get; + lzma_memlimit_set; + lzma_memusage; + lzma_mf_is_supported; + lzma_mode_is_supported; + lzma_physmem; + lzma_properties_decode; + lzma_properties_encode; + lzma_properties_size; + lzma_raw_buffer_decode; + lzma_raw_buffer_encode; + lzma_raw_decoder; + lzma_raw_decoder_memusage; + lzma_raw_encoder; + lzma_raw_encoder_memusage; + lzma_stream_buffer_bound; + lzma_stream_buffer_decode; + lzma_stream_buffer_encode; + lzma_stream_decoder; + lzma_stream_encoder; + lzma_stream_flags_compare; + lzma_stream_footer_decode; + lzma_stream_footer_encode; + lzma_stream_header_decode; + lzma_stream_header_encode; + lzma_version_number; + lzma_version_string; + lzma_vli_decode; + lzma_vli_encode; + lzma_vli_size; +}; + +XZ_5.1.2alpha { +global: + lzma_stream_encoder_mt; + lzma_stream_encoder_mt_memusage; +} XZ_5.0; + +XZ_5.2.2 { +global: + lzma_block_uncomp_encode; + lzma_cputhreads; + lzma_get_progress; + +local: + *; +} XZ_5.1.2alpha; + +XZ_5.2 { +} XZ_5.2.2; diff --git a/reproduce/software/shell/apptainer-README.md b/reproduce/software/shell/apptainer-README.md new file mode 100644 index 0000000..a7826ec --- /dev/null +++ b/reproduce/software/shell/apptainer-README.md @@ -0,0 +1,71 @@ +# Maneage'd projects in Apptainer + +Copyright (C) 2025-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org>\ +Copyright (C) 2025-2025 Giacomo Lorenzetti <glorenzetti@cefca.es>\ +See the end of the file for license conditions. + +For an introduction on containers, see the "Building in containers" section +of the `README.md` file within the top-level directory of this +project. Here, we focus on Apptainer with a simple checklist on how to use +the `apptainer-run.sh` script that we have already prepared in this +directory for easy usage in a Maneage'd project. + + + + + +## Building your Maneage'd project in Apptainer + +Through the steps below, you will create an Apptainer image that will only +contain the software environment and keep the project source and built +analysis files (data and PDF) on your host operating system. This enables +you to keep the size of the image to a minimum (only containing the built +software environment) to easily move it from one computer to another. + + 1. Using your favorite text editor, create a `run.sh` in your top Maneage + directory (as described in the comments at the start of the + `apptainer.sh` script in this directory). Just add `--build-only` on + the first run so it doesn't go onto doing the analysis and just sets up + the software environment. Set the respective directory(s) based on your + filesystem (the software directory is optional). The `run.sh` file name + is already in `.gitignore` (because it contains local directories), so + Git will ignore it and it won't be committed by mistake. + + 2. Make the script executable with `chmod +x ./run.sh`, and run it with + `./run.sh`. + + 3. Once the build finishes, the build directory (on your host) will + contain two Singularity Image Format (SIF) files listed below. You can + move them to any other (more permanent) positions in your filesystem or + to other computers as needed. + * `maneage-base.sif`: image containing the base operating system that + was used to build your project. You can safely delete this unless you + need to keep it for future builds without internet (you can give it + to the `--base-name` option of this script). If you want a different + name for this, put the same option in your + * `maneaged.sif`: image with the full software environment of your + project. This file is necessary for future runs of your project + within the container. + + 3. To execute your project remote the `--build-only` and use `./run.sh` to + execute it. If you want to enter your Maneage'd project shell, add the + `--project-shell` option to the call inside `./run.sh`. + + + + + +## Copyright information + +This file is free software: you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free +Software Foundation, either version 3 of the License, or (at your option) +any later version. + +This file is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for +more details. + +You should have received a copy of the GNU General Public License along +with this file. If not, see <https://www.gnu.org/licenses/>. diff --git a/reproduce/software/shell/apptainer.sh b/reproduce/software/shell/apptainer.sh new file mode 100755 index 0000000..c581ade --- /dev/null +++ b/reproduce/software/shell/apptainer.sh @@ -0,0 +1,456 @@ +#!/bin/sh +# +# Create a Apptainer container from an existing image of the built software +# environment, but with the source, data and build (analysis) directories +# directly within the host file system. This script is assumed to be run in +# the top project source directory (that has 'README.md' and +# 'paper.tex'). If not, use the '--source-dir' option to specify where the +# Maneage'd project source is located. +# +# Usage: +# +# - When you are at the top Maneage'd project directory, run this script +# like the example below. Just set the build directory location on your +# system. See the items below for optional values to optimize the +# process (avoid downloading for exmaple). +# +# ./reproduce/software/shell/apptainer.sh \ +# --build-dir=/PATH/TO/BUILD/DIRECTORY +# +# - Non-mandatory options: +# +# - If you already have the input data that is necessary for your +# project, use the '--input-dir' option to specify its location +# on your host file system. Otherwise the necessary analysis +# files will be downloaded directly into the build +# directory. Note that this is only necessary when '--build-only' +# is not given. +# +# - If you already have the necessary software tarballs that are +# necessary for your project, use the '--software-dir' option to +# specify its location on your host file system only when +# building the container. No problem if you don't have them, they +# will be downloaded during the configuration phase. +# +# - To avoid having to set them every time you want to start the +# apptainer environment, you can put this command (with the proper +# directories) into a 'run.sh' script in the top Maneage'd project +# source directory and simply execute that. The special name 'run.sh' +# is in Maneage's '.gitignore', so it will not be included in your +# git history by mistake. +# +# Known problems: +# +# Copyright (C) 2025-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2025-2025 Giacomo Lorenzetti <glorenzetti@cefca.es> +# +# This script is free software: you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. +# +# This script is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this script. If not, see <http://www.gnu.org/licenses/>. + + + + + +# Script settings +# --------------- +# Stop the script if there are any errors. +set -e + + + + + +# Default option values +jobs=0 +quiet=0 +source_dir= +build_only= +base_name="" +shm_size=20gb +scriptname="$0" +project_name="" +project_shell=0 +container_shell=0 +base_os=debian:stable-slim + +print_help() { + # Print the output. + cat <<EOF +Usage: $scriptname [OPTIONS] + +Top-level script to build and run a Maneage'd project within Apptainer. + + Host OS directories (to be mounted in the container): + -b, --build-dir=STR Dir. to build in (only analysis in host). + -i, --input-dir=STR Dir. of input datasets (optional). + -s, --software-dir=STR Directory of necessary software tarballs. + --source-dir=STR Directory of source code (default: 'pwd -P'). + + Apptainer images + --base-os=STR Base OS name (default: '$base_os'). + --base-name=STR Base OS apptainer image (a '.sif' file). + --project-name=STR Project's apptainer image (a '.sif' file). + + Interactive shell + --project-shell Open the project's shell within the container. + --container-shell Open the container shell. + + Operating mode: + -q, --quiet Do not print informative statements. + -?, --help Give this help list. + -j, --jobs=INT Number of threads to use in each phase. + --build-only Just build the container, don't run it. + +Mandatory or optional arguments to long options are also mandatory or +optional for any corresponding short options. + +Maneage URL: https://maneage.org + +Report bugs to mohammad@akhlaghi.org +EOF +} + +on_off_option_error() { + if [ "x$2" = x ]; then + echo "$scriptname: '$1' doesn't take any values" + else + echo "$scriptname: '$1' (or '$2') doesn't take any values" + fi + exit 1 +} + +check_v() { + if [ x"$2" = x ]; then + printf "$scriptname: option '$1' requires an argument. " + printf "Try '$scriptname --help' for more information\n" + exit 1; + fi +} + +while [ $# -gt 0 ] +do + case $1 in + + # OS directories + -b|--build-dir) build_dir="$2"; check_v "$1" "$build_dir"; shift;shift;; + -b=*|--build-dir=*) build_dir="${1#*=}"; check_v "$1" "$build_dir"; shift;; + -b*) build_dir=$(echo "$1" | sed -e's/-b//'); check_v "$1" "$build_dir"; shift;; + -i|--input-dir) input_dir="$2"; check_v "$1" "$input_dir"; shift;shift;; + -i=*|--input-dir=*) input_dir="${1#*=}"; check_v "$1" "$input_dir"; shift;; + -i*) input_dir=$(echo "$1" | sed -e's/-i//'); check_v "$1" "$input_dir"; shift;; + -s|--software-dir) software_dir="$2"; check_v "$1" "$software_dir"; shift;shift;; + -s=*|--software-dir=*) software_dir="${1#*=}"; check_v "$1" "$software_dir"; shift;; + -s*) software_dir=$(echo "$1" | sed -e's/-s//'); check_v "$1" "$software_dir"; shift;; + --source-dir) source_dir="$2"; check_v "$1" "$source_dir"; shift;shift;; + --source-dir=*) source_dir="${1#*=}"; check_v "$1" "$source_dir"; shift;; + + # Container options. + --base-name) base_name="$2"; check_v "$1" "$base_name"; shift;shift;; + --base-name=*) base_name="${1#*=}"; check_v "$1" "$base_name"; shift;; + --project-name) project_name="$2"; check_v "$1" "$project_name"; shift;shift;; + --project-name=*) project_name="${1#*=}"; check_v "$1" "$project_name"; shift;; + + # Interactive shell. + --project-shell) project_shell=1; shift;; + --project_shell=*) on_off_option_error --project-shell;; + --container-shell) container_shell=1; shift;; + --container_shell=*) on_off_option_error --container-shell;; + + # Operating mode + -q|--quiet) quiet=1; shift;; + -q*|--quiet=*) on_off_option_error --quiet;; + -j|--jobs) jobs="$2"; check_v "$1" "$jobs"; shift;shift;; + -j=*|--jobs=*) jobs="${1#*=}"; check_v "$1" "$jobs"; shift;; + -j*) jobs=$(echo "$1" | sed -e's/-j//'); check_v "$1" "$jobs"; shift;; + --build-only) build_only=1; shift;; + --build-only=*) on_off_option_error --build-only;; + --shm-size) shm_size="$2"; check_v "$1" "$shm_size"; shift;shift;; + --shm-size=*) shm_size="${1#*=}"; check_v "$1" "$shm_size"; shift;; + -'?'|--help) print_help; exit 0;; + -'?'*|--help=*) on_off_option_error --help -?;; + + # Unrecognized option: + -*) echo "$scriptname: unknown option '$1'"; exit 1;; + esac +done + + + + + +# Sanity checks +# ------------- +# +# Make sure that the build directory is given and that it exists. +if [ x$build_dir = x ]; then + printf "$scriptname: '--build-dir' not provided, this is the location " + printf "that all built analysis files will be kept on the host OS\n" + exit 1; +else + if ! [ -d $build_dir ]; then + printf "$scriptname: '$build_dir' (value to '--build-dir') doesn't " + printf "exist\n" + exit 1; + fi +fi + +# Set the default project and base-OS image names (inside the build +# directory). +if [ x"$base_name" = x ]; then base_name=$build_dir/maneage-base.sif; fi +if [ x"$project_name" = x ]; then project_name=$build_dir/maneaged.sif; fi + + + + + +# Directory preparations +# ---------------------- +# +# If the host operating system has '/dev/shm', then give Apptainer access +# to it also for improved speed in some scenarios (like configuration). +if [ -d /dev/shm ]; then + shm_mnt="--mount type=bind,src=/dev/shm,dst=/dev/shm"; +else shm_mnt=""; +fi + +# If the following directories do not exist within the build directory, +# create them to make sure the '--mount' commands always work and +# that any file. Ideally, the 'input' directory should not be under the 'build' +# directory, but if the user hasn't given it then they don't care about +# potentially deleting it later (Maneage will download the inputs), so put +# it in the build directory. +analysis_dir="$build_dir"/analysis +if ! [ -d $analysis_dir ]; then mkdir $analysis_dir; fi +analysis_dir_mnt="--mount type=bind,src=$analysis_dir,dst=/home/maneager/build/analysis" + +# If no '--source-dir' was given, set it to the output of 'pwd -P' (to get +# the direct path without potential symbolic links) in the running directory. +if [ x"$source_dir" = x ]; then source_dir=$(pwd -P); fi +source_dir_mnt="--mount type=bind,src=$source_dir,dst=/home/maneager/source" + +# Only when an an input directory is given, we need the respective 'mount' +# option for the 'apptainer run' command. +input_dir_mnt="" +if ! [ x"$input_dir" = x ]; then + input_dir_mnt="--mount type=bind,src=$input_dir,dst=/home/maneager/input" +fi + +# If no '--jobs' has been specified, use the maximum available jobs to the +# operating system. Apptainer only works on GNU/Linux operating systems, so +# there is no need to account for reading the number of threads on macOS. +if [ x"$jobs" = x0 ]; then jobs=$(nproc); fi + +# Since the container is read-only and is run with the '--contain' option +# (which makes an empty '/tmp'), we need to make a dedicated directory for +# the container to be able to write to. This is necessary because some +# software (Biber in particular on the default branch) need to write there! +# See https://github.com/plk/biber/issues/494. We'll keep the directory on +# the host OS within the build directory, but as a hidden file (since it is +# not necessary in other types of build and ultimately only contains +# temporary files of programs that need it). +toptmp=$build_dir/.apptainer-tmp-$(whoami) +if ! [ -d $toptmp ]; then mkdir $toptmp; fi +chmod -R +w $toptmp/ # Some software remove writing flags on /tmp files. +if ! [ x"$( ls -A $toptmp )" = x ]; then rm -r "$toptmp"/*; fi + +# [APPTAINER-ONLY] Optional mounting option for the software directory. +software_dir_mnt="" +if ! [ x"$software_dir" = x ]; then + software_dir_mnt="--mount type=bind,src=$software_dir,dst=/home/maneager/tarballs-software" +fi + + + + + +# Maneage'd Apptainer SIF container +# --------------------------------- +# +# Build the base operating system using Maneage's './project configure' +# step. +if [ -f $project_name ]; then + if [ $quiet = 0 ]; then + printf "$scriptname: info: project's image ('$project_name') " + printf "already exists and will be used. If you want to build a " + printf "new project image, give a new name to '--project-name'. " + printf "To remove this message run with '--quiet'\n" + fi +else + + # Build the basic definition, with just Debian-slim with minimal + # necessary tools. + if [ -f $base_name ]; then + if [ $quiet = 0 ]; then + printf "$scriptname: info: base OS docker image ('$base_name') " + printf "already exists and will be used. If you want to build a " + printf "new base OS image, give a new name to '--base-name'. " + printf "To remove this message run with '--quiet'\n" + fi + else + + base_def=$build_dir/base.def + cat <<EOF > $base_def +Bootstrap: docker +From: $base_os + +%post + apt-get update && apt-get install -y gcc g++ wget +EOF + # Build the base operating system container and delete the + # temporary definition file. + apptainer build $base_name $base_def + rm $base_def + fi + + # Build the Maneage definition file. + # - About the '$jobs' variable: this definition file is temporarily + # built and deleted immediately after the SIF file is created. So + # instead of using Apptainer's more complex '{{ jobs }}' format to + # pass an argument, we simply write the value of the configure + # script's '--jobs' option as a shell variable here when we are + # building that file. + # - About the removal of Maneage'd tarballs: we are doing this so if + # Maneage has downloaded tarballs during the build they do not + # unecessarily bloat the container. Even when the user has given a + # software tarball directory, they will all be symbolic links that + # aren't valid when the user runs the container (since we only + # mount the software tarballs at build time). + intbuild=/home/maneager/build + maneage_def=$build_dir/maneage.def + cat <<EOF > $maneage_def +Bootstrap: localimage +From: $base_name + +%setup + mkdir -p \${APPTAINER_ROOTFS}/home/maneager/input + mkdir -p \${APPTAINER_ROOTFS}/home/maneager/source + mkdir -p \${APPTAINER_ROOTFS}/home/maneager/build/analysis + mkdir -p \${APPTAINER_ROOTFS}/home/maneager/tarballs-software + +%post + cd /home/maneager/source + ./project configure --jobs=$jobs \\ + --input-dir=/home/maneager/input \\ + --build-dir=$intbuild \\ + --software-dir=/home/maneager/tarballs-software + rm /home/maneager/build/software/tarballs/* + +%runscript + cd /home/maneager/source + if ./project configure --build-dir=$intbuild \\ + --existing-conf --no-pause \\ + --offline --quiet; then \\ + if [ x"\$maneage_apptainer_stat" = xshell ]; then \\ + ./project shell --build-dir=$intbuild; \\ + elif [ x"\$maneage_apptainer_stat" = xrun ]; then \\ + if [ x"\$maneage_jobs" = x ]; then \\ + ./project make --build-dir=$intbuild; \\ + else \\ + ./project make --build-dir=$intbuild --jobs=\$maneage_jobs; \\ + fi; \\ + else \\ + printf "$scriptname: '\$maneage_apptainer_stat' (value "; \\ + printf "to 'maneage_apptainer_stat' environment variable) "; \\ + printf "is not recognized: should be either 'shell' or 'run'"; \\ + exit 1; \\ + fi; \\ + else \\ + printf "$scriptname: configuration failed! This is probably "; \\ + printf "due to a mismatch between the software versions of "; \\ + printf "the container and the source that it is being "; \\ + printf "executed.\n"; \\ + exit 1; \\ + fi +EOF + + # Build the maneage container. The last two are arguments (where order + # matters). The first few are options where order does not matter (so + # we have sorted them by line length). + apptainer build \ + $shm_mnt \ + $input_dir_mnt \ + $source_dir_mnt \ + $analysis_dir_mnt \ + $software_dir_mnt \ + --ignore-fakeroot-command \ + \ + $project_name \ + $maneage_def + + # Clean up. + rm $maneage_def +fi + +# If the user just wanted to build the base operating system, abort the +# script here. +if ! [ x"$build_only" = x ]; then + if [ $quiet = 0 ]; then + printf "$scriptname: info: Maneaged project has been configured " + printf "successfully in the '$project_name' image" + fi + exit 0 +fi + + + + + +# Run the Maneage'd container +# --------------------------- +# +# Set the high-level Apptainer operational mode. +if [ $container_shell = 1 ]; then + aopt="shell" +elif [ $project_shell = 1 ]; then + aopt="run --env maneage_apptainer_stat=shell" +else + aopt="run --env maneage_apptainer_stat=run --env maneage_jobs=$jobs" +fi + +# Build the hostname from the name of the SIF file of the project name. +hstname=$(echo "$project_name" \ + | awk 'BEGIN{FS="/"}{print $NF}' \ + | sed -e's|.sif$||') + +# Execute Apptainer: +# +# - We are not using '--unsquash' (to run within a sandbox) because it +# loads the full multi-gigabyte container into RAM (which we usually +# need for data processing). The container is read-only and we are +# using the following two options instead to ensure that we have no +# influence from outside the container. (description of each is from +# the Apptainer manual) +# --contain: use minimal /dev and empty other directories (e.g. /tmp +# and $HOME) instead of sharing filesystems from your host. +# --cleanenv: clean environment before running container". +# +# - We are not mounting '/dev/shm' since Apptainer prints a warning that +# it is already mounted (apparently does not need it at run time). +# +# --no-home and --home: the first ensures that the 'HOME' variable is +# different from the user's home on the host operating system, the +# second sets it to a directory we specify (to keep things like +# '.bash_history'). +apptainer $aopt \ + --no-home \ + --contain \ + --cleanenv \ + --home $toptmp \ + $input_dir_mnt \ + $source_dir_mnt \ + $analysis_dir_mnt \ + --workdir $toptmp \ + --hostname $hstname \ + --cwd /home/maneager/source \ + \ + $project_name diff --git a/reproduce/software/shell/bashrc.sh b/reproduce/software/shell/bashrc.sh index 23845d6..6bbd774 100755 --- a/reproduce/software/shell/bashrc.sh +++ b/reproduce/software/shell/bashrc.sh @@ -3,10 +3,10 @@ # To have better control over the environment of each analysis step (Make # recipe), besides having environment variables (directly included from # Make), it may also be useful to have a Bash startup file (this file). All -# of the Makefiles set this file as the `BASH_ENV' environment variable, so +# of the Makefiles set this file as the 'BASH_ENV' environment variable, so # it is loaded into all the Make recipes within the project. # -# The special `PROJECT_STATUS' environment variable is defined in every +# The special 'PROJECT_STATUS' environment variable is defined in every # top-level Makefile of the project. It defines the the state of the Make # that is calling this script. It can have three values: # @@ -28,8 +28,13 @@ # When doing the project's analysis: all software have known # versions. # +# shell +# ----- +# When the user has activated the interactive shell (with './project +# shell'). # -# Copyright (C) 2019-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# +# Copyright (C) 2019-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> # # This script is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -43,3 +48,48 @@ # # You should have received a copy of the GNU General Public License # along with this script. If not, see <http://www.gnu.org/licenses/>. + + + + + +# Interactive mode settings. We don't want these within the pipeline +# because they are useless there (for example the introduction message or +# prompt) and can un-necessarily slow down the running jobs (recall that +# the shell is executed at the start of each recipe). +if [ x$PROJECT_STATUS = xshell ]; then + + # A small introductory message. + echo "----------------------------------------------------------------------" + echo "Welcome to the Maneage interactive shell for this project, running" + echo " $(sh --version | awk 'NR==1')" + echo + echo "This shell's home directory is the project's build directory:" + echo " HOME: $HOME" + echo + echo "This shell's startup file is in the project's source directory:" + echo " $PROJECT_RCFILE" + echo + echo "To return to your host shell, run the 'exit' command." + echo "----------------------------------------------------------------------" + + # To activate colors in generic commands. + alias ls='ls --color=auto' + alias grep='grep --color=auto' + + # Add the Git branch information to the command prompt only when Git is + # present. Also set the command-prompt color to purple for normal users + # and red when the root is running it. + if git --version &> /dev/null; then + parse_git_branch() { + git branch 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \(.*\)/ (\1)/' + } + else + parse_git_branch() { echo &> /dev/null; } + fi + if [ x$(whoami) = xroot ]; then + export PS1="[\[\033[01;31m\]\u@\h \W\[\033[32m\]\$(parse_git_branch)\[\033[00m\]]# " + else + export PS1="[\[\033[01;35m\]maneage@\h \W\[\033[32m\]\$(parse_git_branch)\[\033[00m\]]$ " + fi +fi diff --git a/reproduce/software/shell/configure.sh b/reproduce/software/shell/configure.sh index 0f7278f..1771487 100755 --- a/reproduce/software/shell/configure.sh +++ b/reproduce/software/shell/configure.sh @@ -2,8 +2,9 @@ # # Necessary preparations/configurations for the reproducible project. # -# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> -# Copyright (C) 2021 Raul Infante-Sainz <infantesainz@gmail.com> +# Copyright (C) 2018-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2021-2025 Raul Infante-Sainz <infantesainz@gmail.com> +# Copyright (C) 2022-2025 Pedram Ashofteh Ardakani <pedramardakani@pm.me> # # This script is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -39,6 +40,14 @@ set -e # had the chance to implement it yet (please help if you can!). Until then, # please set them based on your project (if they differ from the core # branch). + +# If equals 1, a message will be printed, showing the nano-seconds since +# previous step: useful with '-e --offline --nopause --quiet' to find +# bottlenecks for speed optimization. Speed is important because this +# script is called automatically every time by the container scripts. +check_elapsed=0 + +# In case a fortran compiler is necessary to check. need_gfortran=0 @@ -51,14 +60,12 @@ need_gfortran=0 # These are defined to help make this script more readable. topdir="$(pwd)" optionaldir="/optional/path" -adir=reproduce/analysis/config cdir=reproduce/software/config -pconf=$cdir/LOCAL.conf -ptconf=$cdir/LOCAL_tmp.conf -poconf=$cdir/LOCAL_old.conf -depverfile=$cdir/versions.conf -depshafile=$cdir/checksums.conf + + + + @@ -72,14 +79,21 @@ depshafile=$cdir/checksums.conf # that their changes are not going to be permenant. create_file_with_notice () { - if echo "# IMPORTANT: file can be RE-WRITTEN after './project configure'" > "$1" + if printf "# IMPORTANT: " > "$1" then - echo "#" >> "$1" - echo "# This file was created during configuration" >> "$1" - echo "# ('./project configure'). Therefore, it is not under" >> "$1" - echo "# version control and any manual changes to it will be" >> "$1" - echo "# over-written if the project re-configured." >> "$1" - echo "#" >> "$1" + # These commands may look messy, but the produced comments in the + # file are the main goal and they are readable. (without having to + # break our source-code line length). + printf "file can be RE-WRITTEN after './project " >> "$1" + printf "configure'.\n" >> "$1" + printf "#\n" >> "$1" + printf "# This file was created during configuration " >> "$1" + printf "('./project configure').\n" >> "$1" + printf "# Therefore, it is not under version control " >> "$1" + printf "and any manual changes\n" >> "$1" + printf "# to it will be over-written when the " >> "$1" + printf "project is re-configured.\n" >> "$1" + printf "#\n" >> "$1" else echo; echo "Can't write to $1"; echo; exit 1 @@ -101,7 +115,7 @@ absolute_dir () if stat "$address" 1> /dev/null; then echo "$(cd "$(dirname "$1")" && pwd )/$(basename "$1")" else - exit 1; + echo "$optionaldir" fi } @@ -112,7 +126,7 @@ absolute_dir () # Check file permission handling (POSIX-compatibility) # ---------------------------------------------------- # -# Check if a `given' directory handles permissions as expected. +# Check if a 'given' directory handles permissions as expected. # # This is to prevent a known bug in the NTFS filesystem that prevents # proper installation of Perl, and probably some other packages. This @@ -120,15 +134,15 @@ absolute_dir () # file, and examines whether the given directory handles the file # permissions as expected. # -# Returns `0' if everything is fine, and `255' otherwise. Choosing `0' is -# to mimic the `$ echo $?' behavior, while choosing `255' is to prevent +# Returns '0' if everything is fine, and '255' otherwise. Choosing '0' is +# to mimic the '$ echo $?' behavior, while choosing '255' is to prevent # misunderstanding 0 and 1 as true and false. # # ===== CAUTION! ===== # # -# Since there is a `set -e' before running this function, the whole script -# stops and exits IF the `check_permission' (or any other function) returns -# anything OTHER than `0'! So, only use this function as a test. Here's a +# Since there is a 'set -e' before running this function, the whole script +# stops and exits IF the 'check_permission' (or any other function) returns +# anything OTHER than '0'! So, only use this function as a test. Here's a # minimal example: # # if $(check_permission $some_directory) ; then @@ -136,7 +150,7 @@ absolute_dir () # fi ; check_permission () { - # Make a `junk' file, activate its executable flag and record its + # Make a 'junk' file, activate its executable flag and record its # permissions generally. local junkfile="$1"/check_permission_tmp_file rm -f "$junkfile" @@ -190,7 +204,7 @@ free_space_warning() { fs_threshold=$1 fs_destpath="$2" - return $(df "$fs_destpath" \ + return $(df -P "$fs_destpath" \ | awk 'FNR==2 {if($4>'$fs_threshold') print 1; \ else print 0; }') } @@ -199,30 +213,113 @@ free_space_warning() -# See if we are on a Linux-based system -# -------------------------------------- +# Function to empty the temporary software building directory. This can +# either be a symbolic link (to RAM) or an actual directory, so we can't +# simply use 'rm -r' (because a symbolic link is not a directory for 'rm'). +empty_build_tmp() { + + # 'ls -A' does not print the '.' and '..' and the '-z' option of '[' + # checks if the string is empty or not. This allows us to only attempt + # deleting the directory's contents if it actually has anything inside + # of it. Otherwise, '*' will not expand and we'll get an 'rm' error + # complaining that '$tmpblddir/*' doesn't exist. We also don't want to + # use 'rm -rf $tmpblddir/*' because in case of a typo or while + # debugging (if '$tmpblddir' becomes an empty string), this can + # accidentally delete the whole root partition (or a least the '/home' + # partition of the user). + if ! [ x"$( ls -A $tmpblddir )" = x ]; then + rm -r "$tmpblddir"/* + fi + rm -r "$tmpblddir" +} + + + + + +# Function to report the elapsed time between steps (if it was activated +# above with 'check_elapsed'). +elapsed_time_from_prev_step() { + if [ $check_elapsed = 1 ]; then + chel_now=$(date +"%N"); + chel_delta=$(echo $chel_prev $chel_now \ + | awk '{ delta=($2-$1)/1e6; \ + if(delta>0) d=delta; else d=0; \ + print d}') + chel_dsum=$(echo $chel_dsum $chel_delta | awk '{print $1+$2}') + echo $chel_counter $chel_delta "$1" \ + | awk '{ printf "Step %02d: %-6.2f [millisec]; %s\n", \ + $1, $2, $3}' + chel_counter=$((chel_counter+1)) + chel_prev=$(date +"%N") + fi +} + + + + + + + + + + +# In already-built container +# -------------------------- +# +# We need to run './project configure' at the start of every run of Maneage +# within a container (with 'shell' or 'make'). This is because we need to +# ensure the versions of all software are correct. However, the container +# filesystem (where the build/software directory is located) should be run +# as read-only when doing the analysis. So we will not be able to run some +# of the tests that require writing files or are generally not relevant +# when the container is already built (we want the configure command to be +# as fast as possible). +# +# The project source in Maneage'd containers is '/home/maneager/source'. +built_container=0 +if [ "$topdir" = /home/maneager/source ] \ + && [ -f .build/software/config/hardware-parameters.tex ]; then + built_container=1; +fi + +# Initialize the elapsed time measurement parameters. +if [ $check_elapsed = 1 ]; then + chel_dsum=0.00 + chel_counter=1 + chel_prev=$(date +"%N") + chel_start=$(date +"%N") +fi + + + + +# Identify the running OS +# ----------------------- # # Some features are tailored to GNU/Linux systems, while the BSD-based # behavior is different. Initially we only tested macOS (hence the name of # the variable), but as FreeBSD is also being inlucded in our tests. As # more systems get used, we need to tailor these kinds of things better. -kernelname=$(uname -s) -if [ x$kernelname = xLinux ]; then - on_mac_os=no - - # Don't forget to add the respective C++ compiler below (leave 'cc' in - # the end). - c_compiler_list="gcc clang cc" -elif [ x$kernelname = xDarwin ]; then - host_cc=1 - on_mac_os=yes - - # Don't forget to add the respective C++ compiler below (leave 'cc' in - # the end). - c_compiler_list="clang gcc cc" -else - on_mac_os=no - cat <<EOF +if [ $built_container = 0 ]; then + kernelname=$(uname -s) + if [ $pauseformsg = 1 ]; then pausesec=10; else pausesec=0; fi + if [ x$kernelname = xLinux ]; then + on_mac_os=no + + # Don't forget to add the respective C++ compiler below (leave 'cc' in + # the end). + c_compiler_list="gcc clang cc" + elif [ x$kernelname = xDarwin ]; then + host_cc=1 + on_mac_os=yes + + # Don't forget to add the respective C++ compiler below (leave 'cc' in + # the end). + c_compiler_list="clang gcc cc" + else + on_mac_os=no + cat <<EOF ______________________________________________________ !!!!!!! WARNING !!!!!!! @@ -233,17 +330,20 @@ web-form: https://savannah.nongnu.org/support/?func=additem&group=reproduce -The configuration will continue in 10 seconds... +The configuration will continue in $pausesec seconds. To avoid the +pause on such messages use the '--no-pause' option. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! EOF - sleep 10 + sleep $pausesec + fi + elapsed_time_from_prev_step os_identify fi - # Collect CPU information # ----------------------- # @@ -254,33 +354,43 @@ fi # later recorded as a LaTeX macro to be put in the final paper, but it # could be used in a more systematic way to optimize/revise project # workflow and build. -hw_class=$(uname -m) -if [ x$kernelname = xLinux ]; then - byte_order=$(lscpu \ - | grep 'Byte Order' \ - | awk '{ \ - for(i=3;i<NF;++i) \ - printf "%s ", $i; \ - printf "%s", $NF}') - address_sizes=$(lscpu \ - | grep 'Address sizes' \ - | awk '{ \ - for(i=3;i<NF;++i) \ - printf "%s ", $i; \ - printf "%s", $NF}') -elif [ x$on_mac_os = xyes ]; then - hw_byteorder=$(sysctl -n hw.byteorder) - if [ x$hw_byteorder = x1234 ]; then byte_order="Little Endian"; - elif [ x$hw_byteorder = x4321 ]; then byte_order="Big Endian"; - fi - address_size_physical=$(sysctl -n machdep.cpu.address_bits.physical) - address_size_virtual=$(sysctl -n machdep.cpu.address_bits.virtual) - address_sizes="$address_size_physical bits physical, " - address_sizes+="$address_size_virtual bits virtual" -else - byte_order="unrecognized" - address_sizes="unrecognized" - cat <<EOF +if [ $built_container = 0 ]; then + if [ x$kernelname = xLinux ]; then + byte_order=$(lscpu \ + | grep 'Byte Order' \ + | awk '{ \ + for(i=3;i<NF;++i) \ + printf "%s ", $i; \ + printf "%s", $NF}') + address_sizes=$(lscpu \ + | grep 'Address sizes' \ + | awk '{ \ + for(i=3;i<NF;++i) \ + printf "%s ", $i; \ + printf "%s", $NF}') + elif [ x$on_mac_os = xyes ]; then + hw_byteorder=$(sysctl -n hw.byteorder) + if [ x$hw_byteorder = x1234 ]; then byte_order="Little Endian"; + elif [ x$hw_byteorder = x4321 ]; then byte_order="Big Endian"; + fi + + # On macOS, the way of obtaining the number of cores is different + # between Intel or Apple M1 CPUs. Here we disinguish between Apple + # M1 or others. + maccputype=$(sysctl -n machdep.cpu.brand_string) + if [ x"$maccputype" = x"Apple M1" ]; then + address_size_physical=$(sysctl -n machdep.cpu.thread_count) + address_size_virtual=$(sysctl -n machdep.cpu.logical_per_package) + else + address_size_physical=$(sysctl -n machdep.cpu.address_bits.physical) + address_size_virtual=$(sysctl -n machdep.cpu.address_bits.virtual) + fi + address_sizes="$address_size_physical bits physical, " + address_sizes+="$address_size_virtual bits virtual" + else + byte_order="unrecognized" + address_sizes="unrecognized" + cat <<EOF ______________________________________________________ !!!!!!! WARNING !!!!!!! @@ -290,10 +400,15 @@ the necessary steps in the 'reproduce/software/shell/configure.sh' script https://savannah.nongnu.org/support/?func=additem&group=reproduce +The configuration will continue in $pausesec seconds. To avoid the +pause on such messages use the '--no-pause' option. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! EOF - sleep 5 + sleep $pausesec + fi + elapsed_time_from_prev_step cpu-info fi @@ -308,8 +423,10 @@ fi # avoid these error it is highly recommended to install Xcode in the host # system. Here, it is checked that this is the case, and if not, warn the user # about not having Xcode already installed. -if [ x$on_mac_os = xyes ]; then - xcode=$(which xcodebuild) +if [ $built_container = 0 ] && [ x$on_mac_os = xyes ]; then + + # 'which' isn't in POSIX, so we are using 'command -v' instead. + xcode=$(command -v xcodebuild) if [ x$xcode != x ]; then xcode_version=$(xcodebuild -version | grep Xcode) echo " " @@ -329,12 +446,15 @@ web-form: https://savannah.nongnu.org/support/?func=additem&group=reproduce -The configuration will continue in 5 seconds ... +The configuration will continue in $pausesec seconds. To avoid the +pause on such messages use the '--no-pause' option. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! EOF - sleep 5 + sleep $pausesec fi + elapsed_time_from_prev_step compiler-of-mac-os fi @@ -347,14 +467,15 @@ fi # To build the software, we'll need some basic tools (the C/C++ compilers # in particular) to be present. has_compilers=no -for c in $c_compiler_list; do +if [ $built_container = 0 ]; then + for c in $c_compiler_list; do - # Set the respective C++ compiler. - if [ x$c = xcc ]; then cplus=c++; - elif [ x$c = xgcc ]; then cplus=g++; - elif [ x$c = xclang ]; then cplus=clang++; - else - cat <<EOF + # Set the respective C++ compiler. + if [ x$c = xcc ]; then cplus=c++; + elif [ x$c = xgcc ]; then cplus=g++; + elif [ x$c = xclang ]; then cplus=clang++; + else + cat <<EOF ______________________________________________________ !!!!!!! BUG !!!!!!! @@ -367,21 +488,21 @@ script (just above this error message), or contact us with this web-form: !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! EOF - exit 1 - fi + exit 1 + fi - # Check if they exist. - if type $c > /dev/null 2>/dev/null; then - export CC=$c; - if type $cplus > /dev/null 2>/dev/null; then - export CXX=$cplus - has_compilers=yes - break + # Check if they exist. + if type $c > /dev/null 2>/dev/null; then + export CC=$c; + if type $cplus > /dev/null 2>/dev/null; then + export CXX=$cplus + has_compilers=yes + break + fi fi - fi -done -if [ x$has_compilers = xno ]; then - cat <<EOF + done + if [ x$has_compilers = xno ]; then + cat <<EOF ______________________________________________________ !!!!!!! C/C++ Compiler NOT FOUND !!!!!!! @@ -404,49 +525,52 @@ Xcode install are recommended. There are known problems with GCC on macOS. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! EOF - exit 1 + exit 1 + fi + elapsed_time_from_prev_step compiler-present fi - -# Special directory for compiler testing -# -------------------------------------- -# -# This directory will be deleted when the compiler testing is finished. -compilertestdir=.compiler_test_dir_please_delete -if ! [ -d $compilertestdir ]; then mkdir $compilertestdir; fi - - - - - # Check C compiler # ---------------- # -# Here we check if the C compiler works properly. About the "no warning" -# variable ('nowarnings'): -# -# -Wno-nullability-completeness: on macOS Big Sur 11.2.3 and Xcode 12.4, -# hundreds of 'nullability-completeness' warnings are printed which can -# be very annoying and even hide important errors or warnings. It is -# also harmless for our test here, so it is generally added. -testprog=$compilertestdir/test +# We are checking the C compiler before asking for the directories to let +# the user fix lower-level problems before giving inputs. +compilertestdir=.compiler_test_dir_please_delete testsource=$compilertestdir/test.c -noccwarnings="-Wno-nullability-completeness" -echo; echo; echo "Checking host C compiler ('$CC')..."; -cat > $testsource <<EOF +testprog=$compilertestdir/test +if [ $built_container = 0 ]; then + + # Here we check if the C compiler works properly. We'll start by + # making a directory to keep the products. + if ! [ -d $compilertestdir ]; then mkdir $compilertestdir; fi + + # About the "no warning" variable ('nowarnings'): + # + # -Wno-nullability-completeness: on macOS Big Sur 11.2.3 and + # Xcode 12.4, hundreds of 'nullability-completeness' warnings + # are printed which can be very annoying and even hide + # important errors or warnings. It is also harmless for our + # test here, so it is generally added. + if [ x$on_mac_os = xyes ]; then + noccwarnings="-Wno-nullability-completeness" + fi + if [ $quiet = 0 ]; then + echo; echo "Checking host C compiler ('$CC')..."; + fi + cat > $testsource <<EOF #include <stdio.h> #include <stdlib.h> -int main(void){printf("...C compiler works.\n"); - return EXIT_SUCCESS;} +int main(void){printf("Good!\n"); return EXIT_SUCCESS;} EOF -if $CC $noccwarnings $testsource -o$testprog && $testprog; then - rm $testsource $testprog -else - rm $testsource - cat <<EOF + if $CC $noccwarnings $testsource -o$testprog && $testprog > /dev/null; then + if [ $quiet = 0 ]; then echo "... yes"; fi + rm $testsource $testprog + else + rm $testsource + cat <<EOF ______________________________________________________ !!!!!!! C compiler doesn't work !!!!!!! @@ -465,21 +589,23 @@ https://savannah.nongnu.org/support/?func=additem&group=reproduce !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! EOF - exit 1 + exit 1 + fi + elapsed_time_from_prev_step compiler-c-check fi - # See if we need the dynamic-linker (-ldl) # ---------------------------------------- # -# Some programs (like Wget) need dynamic loading (using `libdl'). On -# GNU/Linux systems, we'll need the `-ldl' flag to link such programs. But +# Some programs (like Wget) need dynamic loading (using 'libdl'). On +# GNU/Linux systems, we'll need the '-ldl' flag to link such programs. But # Mac OS doesn't need any explicit linking. So we'll check here to see if # it is present (thus necessary) or not. -cat > $testsource <<EOF +if [ $built_container = 0 ]; then + cat > $testsource <<EOF #include <stdio.h> #include <dlfcn.h> int @@ -488,17 +614,17 @@ main(void) { return 0; } EOF -if $CC $testsource -o$testprog 2>/dev/null > /dev/null; then - needs_ldl=no; -else - needs_ldl=yes; + if $CC $testsource -o$testprog 2>/dev/null > /dev/null; then + needs_ldl=no; + else + needs_ldl=yes; + fi + elapsed_time_from_prev_step compiler-needs-dynamic-linker fi - - # See if the C compiler can build static libraries # ------------------------------------------------ # @@ -508,48 +634,46 @@ fi # programs will go and find their necessary libraries on the host system. # # Another good advantage of shared libraries is that we can actually use -# the shared library tool of the system (`ldd' with GNU C Library) and see +# the shared library tool of the system ('ldd' with GNU C Library) and see # exactly where each linked library comes from. But in static building, # unless you follow the build closely, its not easy to see if the source of # the library came from the system or our build. static_build=no - - - - # Print warning if the host CC is to be used. -if [ x$host_cc = x1 ]; then +if [ $built_container = 0 ] && [ x$host_cc = x1 ]; then cat <<EOF ______________________________________________________ !!!!!!!!!!!!!!! Warning !!!!!!!!!!!!!!!! The GNU Compiler Collection (GCC, including compilers for C, C++, Fortran -and etc) is currently not built on macOS systems for this project. To build -the project's necessary software on this system, we need to use your -system's C compiler. +and etc) is not going to be built for this project. Either it is a macOS, +or you have used '--host-cc'. -Project's configuration will continue in 5 seconds. -______________________________________________________ +The configuration will continue in $pausesec seconds. To avoid the +pause on such messages use the '--no-pause' option. + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! EOF - sleep 5 + sleep $pausesec fi + # Necessary C library element positions # ------------------------------------- # # On some systems (in particular Debian-based OSs), the static C library # and necessary headers in a non-standard place, and we can't build GCC. So -# we need to find them first. The `sys/cdefs.h' header is also in a +# we need to find them first. The 'sys/cdefs.h' header is also in a # similarly different location. sys_cpath="" sys_library_path="" -if [ x"$$on_mac_os" != xyes ]; then +if [ $built_container = 0 ] && [ x"$on_mac_os" != xyes ]; then # Get the GCC target name of the compiler, when its given, special # C libraries and headers are in a sub-directory of the host. @@ -567,6 +691,7 @@ if [ x"$$on_mac_os" != xyes ]; then # For a check: #echo "sys_library_path: $sys_library_path" #echo "sys_cpath: $sys_cpath" + elapsed_time_from_prev_step compiler-sys-cpath fi @@ -576,27 +701,30 @@ fi # See if a link-able static C library exists # ------------------------------------------ # -# A static C library and the `sys/cdefs.h' header are necessary for +# A static C library and the 'sys/cdefs.h' header are necessary for # building GCC. -if [ x"$host_cc" = x0 ]; then - echo; echo; echo "Checking if static C library is available..."; - cat > $testsource <<EOF +if [ $built_container = 0 ]; then + if [ x"$host_cc" = x0 ]; then + if [ $quiet = 0 ]; then + echo; echo "Checking if static C library is available..."; + fi + cat > $testsource <<EOF #include <stdio.h> #include <stdlib.h> #include <sys/cdefs.h> -int main(void){printf("...yes\n"); - return EXIT_SUCCESS;} +int main(void){printf("...yes\n"); return EXIT_SUCCESS;} EOF - cc_call="$CC $testsource $CPPFLAGS $LDFLAGS -o$testprog -static -lc" - if $cc_call && $testprog; then - gccwarning=0 - rm $testsource $testprog - else - echo; echo "Compilation command:"; echo "$cc_call" - rm $testsource - gccwarning=1 - host_cc=1 - cat <<EOF + cc_call="$CC $testsource $CPPFLAGS $LDFLAGS -o$testprog -static -lc" + if $cc_call && $testprog > /dev/null; then + gccwarning=0 + rm $testsource $testprog + if [ $quiet = 0 ]; then echo "... yes"; fi + else + echo; echo "Compilation command:"; echo "$cc_call" + rm $testsource + gccwarning=1 + host_cc=1 + cat <<EOF _______________________________________________________ !!!!!!!!!!!! Warning !!!!!!!!!!!! @@ -623,15 +751,14 @@ re-configure the project to fix this problem. $ export LDFLAGS="-L/PATH/TO/STATIC/LIBC \$LDFLAGS" $ export CPPFLAGS="-I/PATH/TO/SYS/CDEFS_H \$LDFLAGS" - _______________________________________________________ EOF + fi fi -fi -# Print a warning if GCC is not meant to be built. -if [ x"$gccwarning" = x1 ]; then + # Print a warning if GCC is not meant to be built. + if [ x"$gccwarning" = x1 ]; then cat <<EOF PLEASE SEE THE WARNINGS ABOVE. @@ -641,10 +768,13 @@ seconds and use your system's C compiler (it won't build a custom GCC). But please consider installing the necessary package(s) to complete your C compiler, then re-run './project configure'. -Project's configuration will continue in 5 seconds. +The configuration will continue in $pausesec seconds. To avoid the +pause on such messages use the '--no-pause' option. EOF - sleep 5 + sleep $pausesec + fi + elapsed_time_from_prev_step compiler-linkable-static fi @@ -658,7 +788,7 @@ fi # have a fortran compiler: we'll build it internally for high-level # programs with GCC. However, when the host C compiler is to be used, the # user needs to have a Fortran compiler available. -if [ $host_cc = 1 ]; then +if [ $built_container = 0 ] && [ $host_cc = 1 ]; then # If a Fortran compiler is necessary, see if 'gfortran' exists and can # be used. @@ -691,8 +821,9 @@ EOF # Then, see if the Fortran compiler works testsourcef=$compilertestdir/test.f echo; echo; echo "Checking host Fortran compiler..."; - echo " PRINT *, \"... Fortran Compiler works.\"" > $testsourcef - echo " END" >> $testsourcef + echo " PRINT *, \"... Fortran Compiler works.\"" \ + > $testsourcef + echo " END" >> $testsourcef if gfortran $testsourcef -o$testprog && $testprog; then rm $testsourcef $testprog else @@ -718,6 +849,68 @@ EOF exit 1 fi fi + elapsed_time_from_prev_step compiler-fortran +fi + + + + + +# See if the linker accepts -Wl,-rpath-link +# ----------------------------------------- +# +# '-rpath-link' is used to write the information of the linked shared +# library into the shared object (library or program). But some versions of +# LLVM's linker don't accept it an can cause problems. +# +# IMPORTANT NOTE: This test has to be done **AFTER** the definition of +# 'instdir', otherwise, it is going to be used as an empty string. +if [ $built_container = 0 ]; then + cat > $testsource <<EOF +#include <stdio.h> +#include <stdlib.h> +int main(void) {return EXIT_SUCCESS;} +EOF + if $CC $testsource -o$testprog -Wl,-rpath-link 2>/dev/null \ + > /dev/null; then + export rpath_command="-Wl,-rpath-link=$instdir/lib" + else + export rpath_command="" + fi + + # Delete the temporary directory for compiler checking. + rm -f $testprog $testsource + rm -r $compilertestdir + elapsed_time_from_prev_step compiler-rpath +fi + + + + + +# Paths needed by the host compiler (only for 'basic.mk') +# ------------------------------------------------------- +# +# At the end of the basic build, we need to build GCC. But GCC will build +# in multiple phases, making its own simple compiler in order to build +# itself completely. The intermediate/simple compiler doesn't recognize +# some system specific locations like '/usr/lib/ARCHITECTURE' that some +# operating systems use. We thus need to tell the intermediate compiler +# where its necessary libraries and headers are. +if [ $built_container = 0 ]; then + if [ x"$sys_library_path" != x ]; then + if [ x"$LIBRARY_PATH" = x ]; then + export LIBRARY_PATH="$sys_library_path" + else + export LIBRARY_PATH="$LIBRARY_PATH:$sys_library_path" + fi + if [ x"$CPATH" = x ]; then + export CPATH="$sys_cpath" + else + export CPATH="$CPATH:$sys_cpath" + fi + fi + elapsed_time_from_prev_step compiler-paths fi @@ -729,7 +922,8 @@ fi # # Print some basic information so the user gets a feeling of what is going # on and is prepared on what will happen next. -cat <<EOF +if [ $quiet = 0 ]; then + cat <<EOF ----------------------------- Project's local configuration @@ -744,33 +938,29 @@ components from pre-defined webpages). It is STRONGLY recommended to read the description above each question before answering it. EOF +fi - -# What to do with possibly existing configuration file -# ---------------------------------------------------- +# Previous configuration +# ---------------------- # -# `LOCAL.conf' is the top-most local configuration for the project. If it -# already exists when this script is run, we'll make a copy of it as backup -# (for example the user might have ran `./project configure' by mistake). -printnotice=yes -rewritepconfig=yes -if [ -f $pconf ]; then +# 'LOCAL.conf' is the top-most local configuration for the project. At this +# point, if a LOCAL.conf exists within the '.build' symlink, we use it +# (instead of asking the user to interactively specify it). +rewritelconfig=yes +lconf=.build/software/config/LOCAL.conf +if [ -f $lconf ]; then if [ $existing_conf = 1 ]; then - printnotice=no - if [ -f $pconf ]; then rewritepconfig=no; fi + rewritelconfig=no; fi fi - - - # Make sure the group permissions satisfy the previous configuration (if it # exists and we don't want to re-write it). -if [ $rewritepconfig = no ]; then - oldgroupname=$(awk '/GROUP-NAME/ {print $3; exit 0}' $pconf) +if [ $rewritelconfig = no ]; then + oldgroupname=$(awk '/GROUP-NAME/ {print $3; exit 0}' $lconf) if [ "x$oldgroupname" = "x$maneage_group_name" ]; then just_a_place_holder_to_avoid_not_equal_test=1; else @@ -791,50 +981,9 @@ if [ $rewritepconfig = no ]; then echo " $confcommand"; echo exit 1 fi -fi - - - - - -# Identify the downloader tool -# ---------------------------- -# -# After this script finishes, we will have both Wget and cURL for -# downloading any necessary dataset during the processing. However, to -# complete the configuration, we may also need to download the source code -# of some necessary software packages (including the downloaders). So we -# need to check the host's available tool for downloading at this step. -if [ $rewritepconfig = yes ]; then - if type wget > /dev/null 2>/dev/null; then - name=$(which wget) - - # By default Wget keeps the remote file's timestamp, so we'll have - # to disable it manually. - downloader="$name --no-use-server-timestamps -O"; - elif type curl > /dev/null 2>/dev/null; then - name=$(which curl) - - # - cURL doesn't keep the remote file's timestamp by default. - # - With the `-L' option, we tell cURL to follow redirects. - downloader="$name -L -o" - else - cat <<EOF - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -!!!!!!!!!!!!!!!!!!!!!! Warning !!!!!!!!!!!!!!!!!!!!!! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - -Couldn't find GNU Wget, or cURL on this system. These programs are used for -downloading necessary programs and data if they aren't already present (in -directories that you can specify with this configure script). Therefore if -the necessary files are not present, the project will crash. - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -EOF - downloader="no-downloader-found" - fi; + # Report timing of this step if necessary. + elapsed_time_from_prev_step LOCAL-and-group-check fi @@ -844,7 +993,7 @@ fi # Build directory # --------------- currentdir="$(pwd)" -if [ $rewritepconfig = yes ]; then +if [ $rewritelconfig = yes ]; then cat <<EOF =============== @@ -855,33 +1004,45 @@ The project's "source" (this directory) and "build" directories are treated separately. This greatly helps in managing the many intermediate files that are created during the build. The intermediate build files don't need to be archived or backed up: you can always re-build them with the contents of -the source directory. The build directory also needs a relatively large -amount of free space (atleast serveral Giga-bytes), while the source -directory (all plain text) will usually be a mega-byte or less. +the source directory. The build directory also needs a fairly large amount +of free space (at least several gigabytes), while the source directory (all +plain text, ignoring the .git directory if you have it) will usually be a +megabyte or less. -'.build' (a symbolic link to the build directory) will also be created +The link '.build' (a symbolic link to the build directory) will be created during this configuration. It can help encourage you to set the actual -build directory in a very different address from this one (one that can be -deleted and has large volume), while having easy access to it from here. +build directory to a very different path to that of the source (the build +directory should be considered as a large volume directory of throwaway +space that can be casually deleted), while making it easy to access from +here without having to remember the particular path. --- CAUTION --- Do not choose any directory under the top source directory (this directory). The build directory cannot be a subdirectory of the source. --------------- +Build directory: + - Must be writable by running user. + - Not a sub-directory of the source directory. + - No meta-characters in name: SPACE ! ' @ # $ % ^ & * ( ) + ; + EOF bdir= junkname=pure-junk-974adfkj38 while [ x"$bdir" = x ] do - # Ask the user (if not already set on the command-line). + # Ask the user (if not already set on the command-line: 'build_dir' + # comes from the 'project' script). if [ x"$build_dir" = x ]; then - if read -p"Please enter the top build directory: " build_dir; then + if read -p"Please enter the top build directory: " build_dir; + then just_a_place_holder_to_avoid_not_equal_test=1; else - echo "ERROR: shell is in non-interactive-mode and no build directory specified." - echo "The build directory (described above) is mandatory, configuration can't continue." - echo "Please use '--build-dir' to specify a build directory non-interactively." + printf "ERROR: shell is in non-interactive-mode and no " + printf "build directory specified. The build directory " + printf "(described above) is mandatory, configuration " + printf "can't continue. Please use '--build-dir' to " + printf "specify a build directory non-interactively" exit 1 fi fi @@ -908,14 +1069,16 @@ EOF # directory. if ! [ x"$bdir" = x ]; then if echo "$bdir/" \ - | grep '^'"$currentdir" 2> /dev/null > /dev/null; then + | grep '^'"$currentdir/" 2> /dev/null > /dev/null; then # If it was newly created, it will be empty, so delete it. if ! [ "$(ls -A $bdir)" ]; then rm --dir "$bdir"; fi - # Inform the user that this is not acceptable and reset `bdir'. + # Inform the user that this is not acceptable and reset + # 'bdir'. bdir= - echo " ** The build-directory cannot be under the source-directory." + printf " ** The build-directory cannot be under the " + printf "source-directory." fi fi @@ -924,7 +1087,8 @@ EOF # building. if ! [ x"$bdir" = x ]; then hasmeta=0; - case $bdir in *['!'\@\#\$\%\^\&\*\(\)\+\;\ ]* ) hasmeta=1 ;; esac + case $bdir in *['!'\@\#\$\%\^\&\*\(\)\+\;\ ]* ) hasmeta=1 ;; + esac if [ $hasmeta = 1 ]; then # If it was newly created, it will be empty, so delete it. @@ -932,9 +1096,10 @@ EOF # Inform the user and set 'bdir' to empty again. bdir= - echo " ** Build directory should not contain meta-characters" - echo " ** (like SPACE, %, \$, !, ;, or parenthesis, among " - echo " ** others): they can interrup the build for some software." + printf " ** Build directory should not contain " + printf "meta-characters (like SPACE, %, \$, !, ;, or " + printf "parenthesis, among others): they can interrup " + printf "the build for some software." fi fi @@ -945,22 +1110,35 @@ EOF if ! $(check_permission "$bdir"); then # Unable to handle permissions well bdir= - echo " ** File permissions can't be modified in this directory" + printf " ** File permissions can not be modified in " + printf "this directory" else # Able to handle permissions, now check for 5GB free space # in the given partition (note that the number is in units # of 1024 bytes). If this is not the case, print a warning. if $(free_space_warning 5000000 "$bdir"); then - echo " !! LESS THAN 5GB FREE SPACE IN: $bdir" - echo " !! We recommend choosing another partition." - echo " !! Build will continue in 5 seconds..." - sleep 5 + cat <<EOF + +_______________________________________________________ +!!!!!!!!!!!! Warning !!!!!!!!!!!! + +Less than 5GB free space in '$bdir'. We recommend choosing another +partition. Note that the software environment alone will take roughly +4.5GB, so if your datasets are large, it will fill up very soon. + +The configuration will continue in $pausesec seconds. To avoid the +pause on such messages use the '--no-pause' option. + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +EOF + sleep $pausesec fi fi fi # If the build directory was good, the loop will stop, if not, - # reset `build_dir' to blank, so it continues asking for another + # reset 'build_dir' to blank, so it continues asking for another # directory and let the user know that they must select a new # directory. if [ x"$bdir" = x ]; then @@ -968,9 +1146,42 @@ EOF echo " ** Please select another directory." echo "" else + # Set the '.build' and '.local' symbolic links (and delete + # possibly existing symbolic links). These commands are also + # present in the top-level 'project' script, but they are only + # invoked when '--build-dir' is called. When it is not called + # (the user wants to insert the directories interactively: the + # scenario here), the links need to be created from + # scratch. Furthermore, in case the given directory to + # '--build-dir' has problems (fails to pass the sanity checks + # above), the symbolic links also need to be recreated. + rm -f .build .local + ln -s $bdir .build + ln -s $bdir/software/installed .local + + # Inform the user echo " -- Build directory set to ($instring): '$bdir'" fi done + + # Report timing if necessary + elapsed_time_from_prev_step build-dir + +# The directory should be extracted from the existing LOCAL.conf, not from +# the command-line or in interactive mode. +else + + # Read the build directory from existing configuration file. It is + # assumed that 'LOCAL.conf' is created by this script (above the + # 'else') and that all the sanity checks there have already been + # applied. We'll just check if it is empty or not. + bdir=$(awk '$1=="BDIR" {print $3}' $lconf) + if [ x"$bdir" = x ]; then + printf "$scriptname: no value to 'BDIR' of '$lconf'. Please run " + printf "the project configuration again, but without " + printf "'--existing-conf' (or '-e')" + exit 1 + fi fi @@ -979,13 +1190,10 @@ fi # Input directory # --------------- -if [ x"$input_dir" = x ]; then - indir="$optionaldir" -else - indir="$input_dir" +if [ x"$input_dir" = x ]; then indir="$optionaldir" +else indir="$input_dir" fi -noninteractive_sleep=2 -if [ $rewritepconfig = yes ] && [ x"$input_dir" = x ]; then +if [ $rewritelconfig = yes ]; then cat <<EOF ---------------------------------- @@ -1012,35 +1220,61 @@ don't want to make duplicates, you can create symbolic links to them and put those symbolic links in the given top-level directory. EOF - # Read the input directory if interactive mode is enabled. - if read -p"(OPTIONAL) Input datasets directory ($indir): " inindir; then - just_a_place_holder_to_avoid_not_equal_test=1; - else - echo "WARNING: interactive-mode seems to be disabled!" - echo "If you have a local copy of the inputs, use '--input-dir'." - echo "... project configuration will continue in $noninteractive_sleep sec ..." - sleep $noninteractive_sleep + # In case an input directory is not given, ask the user interactively. + if [ x"$input_dir" = x ]; then + + # Read the input directory if interactive mode is enabled. + if read -p"(OPTIONAL) Input datasets directory ($indir): " \ + inindir; then + just_a_place_holder_to_avoid_not_equal_test=1; + else + cat <<EOF +______________________________________________________ +!!!!!!!!!!!!!!! Warning !!!!!!!!!!!!!!!! + +WARNING: interactive-mode seems to be disabled! If you have a local copy of +the inputs, use '--input-dir'. Otherwise, all the data will be downloaded. + +The configuration will continue in $pausesec seconds. To avoid the +pause on such messages use the '--no-pause' option. + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +EOF + sleep $pausesec + fi + else # An input directory was given. + inindir="$input_dir" fi - # In case an input-directory is given, write it in 'indir'. + # If the given string is not empty, write it in 'indir'. if [ x$inindir != x ]; then indir="$(absolute_dir "$inindir")" echo " -- Using '$indir'" fi + + # Report timing if necessary. + elapsed_time_from_prev_step input-dir + +# The directory should be extracted from the existing LOCAL.conf, not from +# the command-line or in interactive mode; similar to 'bdir' above. +else + indir=$(awk '$1=="INDIR" {print $3}' $lconf) fi + # Dependency tarball directory # ---------------------------- -if [ x"$software_dir" = x ]; then - ddir=$optionaldir -else - ddir=$software_dir +if [ x"$software_dir" = x ]; then ddir=$optionaldir +else ddir=$software_dir fi -if [ $rewritepconfig = yes ] && [ x"$software_dir" = x ]; then +if [ $rewritelconfig = yes ]; then + + # Print information. cat <<EOF --------------------------------------- @@ -1056,14 +1290,32 @@ of a dependency, it is necessary to have an internet connection because the project will download the tarballs it needs automatically. EOF - # Read the software directory if interactive mode is enabled. - if read -p"(OPTIONAL) Directory of dependency tarballs ($ddir): " tmpddir; then - just_a_place_holder_to_avoid_not_equal_test=1; + + # Ask the user for the software directory if it is not given as an + # option. + if [ x"$software_dir" = x ]; then + if read -p"(OPTIONAL) Directory of dependency tarballs ($ddir): " \ + tmpddir; then + just_a_place_holder_to_avoid_not_equal_test=1; + else + cat <<EOF +______________________________________________________ +!!!!!!!!!!!!!!! Warning !!!!!!!!!!!!!!!! + +WARNING: interactive-mode seems to be disabled! If you have a local copy of +the software source tarballs, use '--software-dir'. Otherwise, all the +necessary tarballs will be downloaded. + +The configuration will continue in $pausesec seconds. To avoid the +pause on such messages use the '--no-pause' option. + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +EOF + sleep $pausesec + fi else - echo "WARNING: interactive-mode seems to be disabled!" - echo "If you have a local copy of the software source, use '--software-dir'." - echo "... project configuration will continue in $noninteractive_sleep sec ..." - sleep $noninteractive_sleep + tmpddir="$software_dir" fi # If given, write the software directory. @@ -1071,105 +1323,115 @@ EOF ddir="$(absolute_dir "$tmpddir")" echo " -- Using '$ddir'" fi + +# The directory should be extracted from the existing LOCAL.conf, not from +# the command-line or in interactive mode; similar to 'bdir' above. +else + indir=$(awk '$1=="DEPENDENCIES-DIR" {print $3}' $lconf) fi +elapsed_time_from_prev_step software-dir -# Write the parameters into the local configuration file. -if [ $rewritepconfig = yes ]; then +# Downloader +# ---------- +# +# After this script finishes, we will have both Wget and cURL for +# downloading any necessary dataset during the processing. However, to +# complete the configuration, we may also need to download the source code +# of some necessary software packages (including the downloaders). So we +# need to check the host's available tool for downloading at this step. +if [ $rewritelconfig = yes ]; then + if type wget > /dev/null 2>/dev/null; then - # Add commented notice. - create_file_with_notice $pconf + # 'which' isn't in POSIX, so we are using 'command -v' instead. + name=$(command -v wget) + + # See if the host wget has the '--no-use-server-timestamps' option + # (for example wget 1.12 doesn't have it). If not, we'll have to + # remove it. This won't affect the analysis of Maneage in anyway, + # its just to avoid re-downloading if the server timestamps are + # bad; at the worst case, it will just cause a re-download of an + # input software source code (for data inputs, we will use our own + # wget that has this option). + tsname="no-use-server-timestamps" + tscheck=$(wget --help | grep $tsname || true) + if [ x"$tscheck" = x ]; then wgetts="" + else wgetts="--$tsname"; + fi - # Write the values. - sed -e's|@bdir[@]|'"$bdir"'|' \ - -e's|@indir[@]|'"$indir"'|' \ - -e's|@ddir[@]|'"$ddir"'|' \ - -e's|@sys_cpath[@]|'"$sys_cpath"'|' \ - -e's|@downloader[@]|'"$downloader"'|' \ - -e's|@groupname[@]|'"$maneage_group_name"'|' \ - $pconf.in >> $pconf -else - # Read the values from existing configuration file. Note that the build - # directory may have space characters. Even though we currently check - # against it, we hope to be able to remove this condition in the - # future. - inbdir=$(awk '$1=="BDIR" { for(i=3; i<NF; i++) \ - printf "%s ", $i; \ - printf "%s", $NF }' $pconf) - - # Read the software directory (same as 'inbdir' above about space). - ddir=$(awk '$1=="DEPENDENCIES-DIR" { for(i=3; i<NF; i++) \ - printf "%s ", $i; \ - printf "%s", $NF}' $pconf) - - # The downloader command may contain multiple elements, so we'll just - # change the (in memory) first and second tokens to empty space and - # write the full line (the original file is unchanged). - downloader=$(awk '$1=="DOWNLOADER" {$1=""; $2=""; print $0}' $pconf) - - # Make sure all necessary variables have a value - err=0 - verr=0 - novalue="" - if [ x"$inbdir" = x ]; then novalue="BDIR, "; fi - if [ x"$downloader" = x ]; then novalue="$novalue"DOWNLOADER; fi - if [ x"$novalue" != x ]; then verr=1; err=1; fi - - # Make sure `bdir' is an absolute path and it exists. - berr=0 - ierr=0 - bdir="$(absolute_dir "$inbdir")" - - if ! [ -d "$bdir" ]; then if ! mkdir "$bdir"; then berr=1; err=1; fi; fi - if [ $err = 1 ]; then - cat <<EOF + # By default Wget keeps the remote file's timestamp, so we'll have + # to disable it manually. + downloader="$name $wgetts -O"; + elif type curl > /dev/null 2>/dev/null; then + name=$(command -v curl) -################################################################# -######## ERORR reading existing configuration file ############ -################################################################# -EOF - if [ $verr = 1 ]; then - cat <<EOF + # - cURL doesn't keep the remote file's timestamp by default. + # - With the '-L' option, we tell cURL to follow redirects. + downloader="$name -L -o" + else + cat <<EOF -These variables have no value: $novalue. -EOF - fi - if [ $berr = 1 ]; then - cat <<EOF +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +!!!!!!!!!!!!!!!!!!!!!! Warning !!!!!!!!!!!!!!!!!!!!!! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -Couldn't create the build directory '$bdir' (value to 'BDIR') in -'$pconf'. -EOF - fi +Couldn't find GNU Wget, or cURL on this system. These programs are used for +downloading necessary programs and data if they aren't already present (in +directories that you can specify with this configure script). Therefore if +the necessary files are not present, the project will crash. - cat <<EOF +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -Please run the configure script again (accepting to re-write existing -configuration file) so all the values can be filled and checked. -################################################################# EOF + downloader="no-downloader-found" + fi; + +# The downloader should be extracted from the existing LOCAL.conf. +else + # The value will be a command (including white spaces), so we will read + # all the "fields" from the third to the end. + downloader=$(awk '$1=="DOWNLOADER" { for(i=3; i<NF; i++) \ + printf "%s ", $i; \ + printf "%s", $NF }' $lconf) + + if [ x"$downloader" = x ]; then + printf "$scriptname: no value to 'DOWNLOADER' of '$lconf'. " + printf "Please run the project configuration again, but " + printf "without '--existing-conf' (or '-e')" + exit 1 fi fi +elapsed_time_from_prev_step downloader -# Delete final configuration target -# --------------------------------- -# -# We only want to start running the project later if this script has -# completed successfully. To make sure it hasn't crashed in the middle -# (without the user noticing), in the end of this script we make a file and -# we'll delete it here (at the start). Therefore if the script crashed in -# the middle that file won't exist. -sdir="$bdir"/software -finaltarget="$sdir"/configuration-done.txt -if ! [ -d "$sdir" ]; then mkdir "$sdir"; fi -rm -f "$finaltarget" +# When no local configuration existed, write the parameters into the local +# configuration file. +sdir=$bdir/software +sconfdir=$sdir/config +if ! [ -d "$sdir" ]; then mkdir "$sdir"; fi +if ! [ -d "$sconfdir" ]; then mkdir "$sconfdir"; fi +if [ $rewritelconfig = yes ]; then + + # Put the basic comments at the top of the file. + create_file_with_notice $lconf + + # Write the values. + lconfin=$cdir/LOCAL.conf.in + sed -e's|@bdir[@]|'"$bdir"'|' \ + -e's|@indir[@]|'"$indir"'|' \ + -e's|@ddir[@]|'"$ddir"'|' \ + -e's|@sys_cpath[@]|'"$sys_cpath"'|' \ + -e's|@downloader[@]|'"$downloader"'|' \ + -e's|@groupname[@]|'"$maneage_group_name"'|' \ + $lconfin >> $lconf +fi +elapsed_time_from_prev_step LOCAL-write @@ -1182,102 +1444,57 @@ rm -f "$finaltarget" # avoid too many directory dependencies throughout the software and # analysis Makefiles (thus making them hard to read), we are just building # them here -# Software tarballs tardir="$sdir"/tarballs -if ! [ -d "$tardir" ]; then mkdir "$tardir"; fi - -# Installed software instdir="$sdir"/installed -if ! [ -d "$instdir" ]; then mkdir "$instdir"; fi +tmpblddir="$sdir"/build-tmp -# To record software versions and citation. +# Second-level directories. +instlibdir="$instdir"/lib +instbindir="$instdir"/bin verdir="$instdir"/version-info -if ! [ -d "$verdir" ]; then mkdir "$verdir"; fi - -# Program and library versions and citation. -ibidir="$verdir"/proglib -if ! [ -d "$ibidir" ]; then mkdir "$ibidir"; fi - -# Python module versions and citation. -ipydir="$verdir"/python -if ! [ -d "$ipydir" ]; then mkdir "$ipydir"; fi - -# Used software BibTeX entries. -ictdir="$verdir"/cite -if ! [ -d "$ictdir" ]; then mkdir "$ictdir"; fi -# TeXLive versions. +# Sub-directories of version-info itidir="$verdir"/tex -if ! [ -d "$itidir" ]; then mkdir "$itidir"; fi - - - - - -# Project's top-level built analysis directories -# ---------------------------------------------- - -# Top-level built analysis directories. -badir="$bdir"/analysis -if ! [ -d "$badir" ]; then mkdir "$badir"; fi - -# Top-level LaTeX. -texdir="$badir"/tex -if ! [ -d "$texdir" ]; then mkdir "$texdir"; fi - -# LaTeX macros. -mtexdir="$texdir"/macros -if ! [ -d "$mtexdir" ]; then mkdir "$mtexdir"; fi +ictdir="$verdir"/cite +ipydir="$verdir"/python +ibidir="$verdir"/proglib +ircrandir="$verdir"/r-cran +if [ $built_container = 0 ]; then + + # Top-level directories. + if ! [ -d "$tardir" ]; then mkdir "$tardir"; fi + if ! [ -d "$instdir" ]; then mkdir "$instdir"; fi + + # Second-level directories. + if ! [ -d "$verdir" ]; then mkdir "$verdir"; fi + if ! [ -d "$instbindir" ]; then mkdir "$instbindir"; fi + + # Sub-directories of version-info + if ! [ -d "$itidir" ]; then mkdir "$itidir"; fi + if ! [ -d "$ictdir" ]; then mkdir "$ictdir"; fi + if ! [ -d "$ipydir" ]; then mkdir "$ipydir"; fi + if ! [ -d "$ibidir" ]; then mkdir "$ibidir"; fi + if ! [ -d "$ircrandir" ]; then mkdir "$ircrandir"; fi + + # Some software install their libraries in '$(idir)/lib64'. But all + # other libraries are in '$(idir)/lib'. Since Maneage's build is only + # for a single architecture, we can set the '$(idir)/lib64' as a + # symbolic link to '$(idir)/lib' so all the libraries are always + # available in the same place. + if ! [ -d "$instlibdir" ]; then mkdir "$instlibdir"; fi + ln -fs "$instlibdir" "$instdir"/lib64 + + # Wrapper over Make as a single command so it does not default to + # '/bin/sh' during installation (needed by some programs like CMake). + makewshell="$instbindir/make-with-shell" + if ! [ -f "$makewshell" ]; then + echo "$instbindir/make SHELL=$instbindir/bash \$@" > $makewshell + chmod +x $makewshell + fi -# TeX build directory. If built in a group scenario, the TeX build -# directory must be separate for each member (so they can work on their -# relevant parts of the paper without conflicting with each other). -if [ "x$maneage_group_name" = x ]; then - texbdir="$texdir"/build -else - user=$(whoami) - texbdir="$texdir"/build-$user + # Report the execution time of this step. + elapsed_time_from_prev_step subdirectories-of-build fi -if ! [ -d "$texbdir" ]; then mkdir "$texbdir"; fi - -# TiKZ (for building figures within LaTeX). -tikzdir="$texbdir"/tikz -if ! [ -d "$tikzdir" ]; then mkdir "$tikzdir"; fi - -# If 'tex/build' and 'tex/tikz' are symbolic links then 'rm -f' will delete -# them and we can continue. However, when the project is being built from -# the tarball, these two are not symbolic links but actual directories with -# the necessary built-components to build the PDF in them. In this case, -# because 'tex/build' is a directory, 'rm -f' will fail, so we'll just -# rename the two directories (as backup) and let the project build the -# proper symbolic links afterwards. -if rm -f tex/build; then - rm -f tex/tikz -else - mv tex/tikz tex/tikz-from-tarball - mv tex/build tex/build-from-tarball -fi - -# Set the symbolic links for easy access to the top project build -# directories. Note that these are put in each user's source/cloned -# directory, not in the build directory (which can be shared between many -# users and thus may already exist). -# -# Note: if we don't delete them first, it can happen that an extra link -# will be created in each directory that points to its parent. So to be -# safe, we are deleting all the links on each re-configure of the -# project. Note that at this stage, we are using the host's 'ln', not our -# own, so its best not to assume anything (like 'ln -sf'). -rm -f .build .local - -ln -s "$bdir" .build -ln -s "$instdir" .local -ln -s "$texdir" tex/build -ln -s "$tikzdir" tex/tikz - -# --------- Delete for no Gnuastro --------- -rm -f .gnuastro -# ------------------------------------------ @@ -1291,67 +1508,104 @@ rm -f .gnuastro # HDDs/SSDs and improve speed, it is therefore better to build them in the # RAM when possible. The RAM of most systems today (>8GB) is large enough # for the parallel building of the software. - +# # Set the top-level shared memory location. Currently there is only one # standard location (for GNU/Linux OSs), so doing this check here and the # main job below may seem redundant. However, it is written separately from # the main code below because later, we expect to add more possible # mounting locations (for other OSs). -if [ -d /dev/shm ]; then shmdir=/dev/shm -else shmdir="" -fi +if [ $built_container = 0 ]; then + if [ -d /dev/shm ]; then shmdir=/dev/shm + else shmdir="" + fi -# If a shared memory mounted directory exists and has the necessary -# conditions, set that directory to build software. -if [ x"$shmdir" != x ]; then - - # Make sure it has enough space. - needed_space=2000000 - available_space=$(df "$shmdir" | awk 'NR==2{print $4}') - if [ $available_space -gt $needed_space ]; then - - # Set the Maneage-specific directory within the shared - # memory. We'll use the names of the two parent directories to the - # current/running directory, separated by a `-' instead of - # `/'. We'll then appended that with the user's name (in case - # multiple users may be working on similar project names). - # - # Maybe later, we can use something like `mktemp' to add random - # characters to this name and make it unique to every run (even for - # a single user). - dirname=$(pwd | sed -e's/\// /g' \ - | awk '{l=NF-1; printf("%s-%s", $l, $NF)}') - tbshmdir="$shmdir"/"$dirname"-$(whoami) - if ! [ -d "$tbshmdir" ]; then mkdir "$tbshmdir"; fi - - # Some systems may protect '/dev/shm' against the right to execute - # programs by ordinary users. We thus need to check that the device - # allows execution within this directory by this user. - shmexecfile="$tbshmdir"/shm-execution-check.sh - rm -f $shmexecfile # We also don't want any existing flags. - cat > "$shmexecfile" <<EOF + # If a shared memory mounted directory exists and has the necessary + # conditions, set that directory to build software. + if [ x"$shmdir" != x ]; then + + # Make sure it has enough space. + needed_space=2000000 + available_space=$(df "$shmdir" | awk 'NR==2{print $4}') + if [ $available_space -gt $needed_space ]; then + + # Set the Maneage-specific directory within the shared + # memory. We'll use the names of the two parent directories to + # the current/running directory, separated by a '-' instead of + # '/'. We'll then appended that with the user's name (in case + # multiple users may be working on similar project names). + # + # Maybe later, we can use something like 'mktemp' to add random + # characters to this name and make it unique to every run (even + # for a single user). + dirname=$(pwd | sed -e's/\// /g' \ + | awk '{l=NF-1; printf("%s-%s", $l, $NF)}') + tbshmdir="$shmdir"/"$dirname"-$(whoami) + + # Try to make the directory if it does not yet exist. A failed + # directory creation will be tested for a few lines later, when + # testing for the existence and executability of a test file. + if ! [ -d "$tbshmdir" ]; then (mkdir "$tbshmdir" || true); fi + + # Some systems may protect '/dev/shm' against the right to + # execute programs by ordinary users. We thus need to check + # that the device allows execution within this directory by + # this user. + shmexecfile="$tbshmdir"/shm-execution-check.sh + rm -f $shmexecfile # We also don't want any existing flags. + + # Create the file to be executed, but do not fail fatally if it + # cannot be created. We will check a few lines later if the + # file really exists. + (cat > "$shmexecfile" <<EOF || true) #!/bin/sh -printf "This file successfully executed.\n" +a=b EOF - # Make the file executable and see if it runs. If not, set - # 'tbshmdir' to an empty string so it is not used in later steps. - # In any case, delete the temporary file afterwards. - chmod u+x "$shmexecfile" - if ! "$shmexecfile" &> /dev/null; then tbshmdir=""; fi - rm "$shmexecfile" + + # If the file was successfully created, then make the file + # executable and see if it runs. If not, set 'tbshmdir' to an + # empty string so it is not used in later steps. In any case, + # delete the temporary file afterwards. + # + # We aren't adding '&> /dev/null' after the execution command + # because it can produce false failures randomly on some + # systems. + if [ -e "$shmexecfile" ]; then + + # Add the executable flag. + chmod +x "$shmexecfile" + + # The following line tries to execute the file. + if "$shmexecfile"; then + # Successful execution. The colon is a "no-op" (no + # operation) shell command. + : + else + tbshmdir="" + fi + rm "$shmexecfile" + else + tbshmdir="" + fi + fi + else + tbshmdir="" fi -else - tbshmdir="" -fi -# If a shared memory directory was created, set the software building -# directory to be a symbolic link to it. Otherwise, just build the -# temporary build directory under the project's build directory. -tmpblddir="$sdir"/build-tmp -rm -rf "$tmpblddir"/* "$tmpblddir" # If it is a link, we need to empty - # its contents first, then itself. -if [ x"$tbshmdir" = x ]; then mkdir "$tmpblddir"; -else ln -s "$tbshmdir" "$tmpblddir"; + # If a shared memory directory was created, set the software building + # directory to be a symbolic link to it. Otherwise, just build the + # temporary build directory under the project's build directory. + # + # If it is a link, we need to empty its contents first, then itself. + if [ -d "$tmpblddir" ]; then empty_build_tmp; fi + + # Now that we are sure it doesn't exist, we'll make it (either as a + # directory or as a symbolic link). + if [ x"$tbshmdir" = x ]; then mkdir "$tmpblddir"; + else ln -s "$tbshmdir" "$tmpblddir"; + fi + + # Report the time this step took. + elapsed_time_from_prev_step temporary-software-building-dir fi @@ -1363,8 +1617,7 @@ fi # # Everything is ready, let the user know that the building is going to # start. -if [ $printnotice = yes ]; then - tsec=10 +if [ $quiet = 0 ]; then cat <<EOF ------------------------- @@ -1379,20 +1632,20 @@ NOTE: the built software will NOT BE INSTALLED in standard places of your OS (so no root access is required). They are only for local usage by this project. -**TIP**: you can see which software are being installed at every moment -with the following command. See "Inspecting status" section of -'README-hacking.md' for more. In short, run it while the project is being -configured (in another terminal, but in this same directory: -'$currentdir'): +TIP: you can see which software are being installed at every moment with +the following command. See "Inspecting status" section of +'README-hacking.md' for more. In short, run it in another terminal while +the project is being configured. $ ./project --check-config -Project's configuration will continue in $tsec seconds. +Project's configuration will continue in $tsec seconds. To avoid the pause +on such messages use the '--no-pause' option. ------------------------- EOF - sleep $tsec + sleep $pausesec fi @@ -1408,83 +1661,73 @@ fi # - On BSD-based systems (for example FreeBSD and macOS), we have a # 'hw.ncpu' in the output of 'sysctl'. # - When none of the above work, just set the number of threads to 1. -if [ $jobs = 0 ]; then - if type nproc > /dev/null 2> /dev/null; then - numthreads=$(nproc --all); +# +# This check is also used in 'reproduce/software/shell/docker.sh'. +if [ $built_container = 0 ]; then + if [ $jobs = 0 ]; then + if type nproc > /dev/null 2> /dev/null; then + numthreads=$(nproc --all); + else + numthreads=$(sysctl -a | awk '/^hw\.ncpu/{print $2}') + if [ x"$numthreads" = x ]; then numthreads=1; fi + fi else - numthreads=$(sysctl -a | awk '/^hw\.ncpu/{print $2}') - if [ x"$numthreads" = x ]; then numthreads=1; fi + numthreads=$jobs fi -else - numthreads=$jobs + elapsed_time_from_prev_step num-threads fi - -# See if the linker accepts -Wl,-rpath-link -# ----------------------------------------- -# -# `-rpath-link' is used to write the information of the linked shared -# library into the shared object (library or program). But some versions of -# LLVM's linker don't accept it an can cause problems. +# Libraries necessary for the system's shell +# ------------------------------------------ # -# IMPORTANT NOTE: This test has to be done **AFTER** the definition of -# 'instdir', otherwise, it is going to be used as an empty string. -cat > $testsource <<EOF -#include <stdio.h> -#include <stdlib.h> -int main(void) {return EXIT_SUCCESS;} -EOF -if $CC $testsource -o$testprog -Wl,-rpath-link 2>/dev/null > /dev/null; then - export rpath_command="-Wl,-rpath-link=$instdir/lib" -else - export rpath_command="" -fi - - - - - -# Delete the compiler testing directory -# ------------------------------------- +# In some cases (mostly the programs that Maneage doesn't yet build by +# itself), the programs may call the system's shell, not Maneage's +# shell. After we close-off the system environment from Maneage, this will +# cause a crash! To avoid such cases, we need to find the locations of the +# libraries that the shell needs and temporarily add them to the library +# search path. # -# This directory was made above to make sure the necessary compilers can be -# run. -rm -f $testprog $testsource -rm -rf $compilertestdir - - - - - -# Paths needed by the host compiler (only for `basic.mk') -# ------------------------------------------------------- +# About the 'grep -v "(0x[^)]*)"' term (from bug 66847, see [1]): On some +# systems [2], the output of 'ldd /bin/sh' includes a line for the vDSO [3] +# that is different to the formats that are assumed, prior to this commit, +# by the algorithm in 'configure.sh' when evaluating the variable +# 'sys_library_sh_path'. This leads to a fatal syntax error in (at least) +# 'ncurses', because the option using 'sys_library_sh_path' contains an +# unquoted RAM address in parentheses. Even if the address were quoted, it +# would still be incorrect. This 'grep command excludes candidate host path +# strings that look like RAM addresses to address the problem. # -# At the end of the basic build, we need to build GCC. But GCC will build -# in multiple phases, making its own simple compiler in order to build -# itself completely. The intermediate/simple compiler doesn't recognize -# some system specific locations like `/usr/lib/ARCHITECTURE' that some -# operating systems use. We thus need to tell the intermediate compiler -# where its necessary libraries and headers are. -if [ x"$sys_library_path" != x ]; then - if [ x"$LIBRARY_PATH" = x ]; then - export LIBRARY_PATH="$sys_library_path" +# [1] https://savannah.nongnu.org/bugs/index.php?66847 +# [2] https://stackoverflow.com/questions/34428037/how-to-interpret-the-output-of-the-ldd-program +# [3] man vdso +if [ $built_container = 0 ]; then + if [ x"$on_mac_os" = xyes ]; then + sys_library_sh_path=$(otool -L /bin/sh \ + | awk '/\/lib/{print $1}' \ + | sed 's#/[^/]*$##' \ + | sort \ + | uniq \ + | awk '{if (NR==1) printf "%s", $1; \ + else printf ":%s", $1}') else - export LIBRARY_PATH="$LIBRARY_PATH:$sys_library_path" - fi - if [ x"$CPATH" = x ]; then - export CPATH="$sys_cpath" - else - export CPATH="$CPATH:$sys_cpath" + sys_library_sh_path=$(ldd /bin/sh \ + | awk '{if($3!="") print $3}' \ + | sed 's#/[^/]*$##' \ + | grep -v "(0x[^)]*)" \ + | sort \ + | uniq \ + | awk '{if (NR==1) printf "%s", $1; \ + else printf ":%s", $1}') fi + elapsed_time_from_prev_step sys-library-sh-path fi - # Find Zenodo URL for software downloading # ---------------------------------------- # @@ -1502,41 +1745,32 @@ fi # which will download the DOI-resolved webpage, and extract the Zenodo-URL # of the most recent version from there (using the 'coreutils' tarball as # an example, the directory part of the URL for all the other software are -# the same). This is not done if the option `--debug' is used. +# the same). This is not done if the options '--debug' or `--offline` are +# used. zenodourl="" user_backup_urls="" -zenodocheck=.build/software/zenodo-check.html -if [ x$debug = x ]; then - if $downloader $zenodocheck https://doi.org/10.5281/zenodo.3883409; then - zenodourl=$(sed -n -e'/coreutils/p' $zenodocheck \ - | sed -n -e'/http/p' \ - | tr ' ' '\n' \ - | grep http \ - | sed -e 's/href="//' -e 's|/coreutils| |' \ - | awk 'NR==1{print $1}') - fi +zenodocheck="$bdir"/software/zenodo-check.html +if [ $built_container = 0 ]; then + if [ x$debug = x ] && [ x$offline = x ]; then + if $downloader $zenodocheck \ + https://doi.org/10.5281/zenodo.3883409; then + zenodourl=$(sed -n -e'/coreutils/p' $zenodocheck \ + | sed -n -e'/http/p' \ + | tr ' ' '\n' \ + | grep http \ + | sed -e 's/href="//' -e 's|/coreutils| |' \ + | awk 'NR==1{print $1}') + fi + fi + rm -f $zenodocheck + + # Add the Zenodo URL to the user's given back software URLs. Since the + # user can specify 'user_backup_urls' (not yet implemented as an option + # in './project'), we'll give preference to their specified servers, + # then add the Zenodo URL afterwards. + user_backup_urls="$user_backup_urls $zenodourl" + elapsed_time_from_prev_step zenodo-url fi -rm -f $zenodocheck - -# Add the Zenodo URL to the user's given back software URLs. Since the user -# can specify 'user_backup_urls' (not yet implemented as an option in -# './project'), we'll give preference to their specified servers, then add -# the Zenodo URL afterwards. -user_backup_urls="$user_backup_urls $zenodourl" - - - - - -# Build core tools for project -# ---------------------------- -# -# Here we build the core tools that 'basic.mk' depends on: Lzip -# (compression program), GNU Make (that 'basic.mk' is written in), Dash -# (minimal Bash-like shell) and Flock (to lock files and enable serial -# download). -./reproduce/software/shell/pre-make-build.sh \ - "$bdir" "$ddir" "$downloader" "$user_backup_urls" @@ -1564,14 +1798,31 @@ fi -# Build other basic tools our own GNU Make -# ---------------------------------------- +# Core software +# ------------- +# +# Here we build the core tools that 'basic.mk' depends on: Lzip +# (compression program), GNU Make (that 'basic.mk' is written in), Dash +# (minimal Bash-like shell) and Flock (to lock files and enable serial +# operations where necessary: mostly in download). +export on_mac_os +if [ $quiet = 0 ]; then echo "Building/validating software: pre-make"; fi +./reproduce/software/shell/pre-make-build.sh \ + "$bdir" "$ddir" "$downloader" "$user_backup_urls" +elapsed_time_from_prev_step make-software-pre-make + + + + + +# Basic software +# -------------- # -# When building these software we don't have our own un-packing software, -# Bash, Make, or AWK. In this step, we'll install such low-level basic -# tools, but we have to be very portable (and use minimal features in all). -echo; echo "Building necessary software (if necessary)..." +# Having built the core tools, we are now ready to build GCC and all its +# dependencies (the "basic" software). +if [ $quiet = 0 ]; then echo "Building/validating software: basic"; fi .local/bin/make $keepgoing -f reproduce/software/make/basic.mk \ + sys_library_sh_path=$sys_library_sh_path \ user_backup_urls="$user_backup_urls" \ sys_library_path=$sys_library_path \ rpath_command=$rpath_command \ @@ -1581,35 +1832,35 @@ echo; echo "Building necessary software (if necessary)..." on_mac_os=$on_mac_os \ host_cc=$host_cc \ -j$numthreads +elapsed_time_from_prev_step make-software-basic -# All other software -# ------------------ +# High-level software +# ------------------- # -# We will be making all the dependencies before running the top-level -# Makefile. To make the job easier, we'll do it in a Makefile, not a -# script. Bash and Make were the tools we need to run Makefiles, so we had -# to build them in this script. But after this, we can rely on Makefiles. -if [ $jobs = 0 ]; then - numthreads=$(.local/bin/nproc --all) -else - numthreads=$jobs -fi +# Having our custom GCC in place, we can now build the high-level (science) +# software: we are using our custom-built 'env' to ensure that nothing from +# the host environment leaks into the high-level software environment. +if [ $quiet = 0 ]; then echo "Building/validating software: high-level"; fi .local/bin/env -i HOME=$bdir \ - .local/bin/make $keepgoing -f reproduce/software/make/high-level.mk \ - user_backup_urls="$user_backup_urls" \ - sys_library_path=$sys_library_path \ - rpath_command=$rpath_command \ - all_highlevel=$all_highlevel \ - static_build=$static_build \ - numthreads=$numthreads \ - on_mac_os=$on_mac_os \ - sys_cpath=$sys_cpath \ - host_cc=$host_cc \ - -j$numthreads + .local/bin/make $keepgoing \ + -f reproduce/software/make/high-level.mk \ + sys_library_sh_path=$sys_library_sh_path \ + user_backup_urls="$user_backup_urls" \ + sys_library_path=$sys_library_path \ + rpath_command=$rpath_command \ + all_highlevel=$all_highlevel \ + static_build=$static_build \ + numthreads=$numthreads \ + on_mac_os=$on_mac_os \ + sys_cpath=$sys_cpath \ + host_cc=$host_cc \ + offline=$offline \ + -j$numthreads +elapsed_time_from_prev_step make-software-high-level @@ -1624,17 +1875,17 @@ fi # will just stop at the stage when all the processing is complete and it is # only necessary to build the PDF. So we don't want to stop the project's # configuration and building if its not present. -if [ -f $itidir/texlive-ready-tlmgr ]; then - texlive_result=$(cat $itidir/texlive-ready-tlmgr) -else - texlive_result="NOT!" -fi -if [ x"$texlive_result" = x"NOT!" ]; then - cat <<EOF +if [ $built_container = 0 ]; then + if [ -f $itidir/texlive-ready-tlmgr ]; then + texlive_result=$(cat $itidir/texlive-ready-tlmgr) + else + texlive_result="NOT!" + fi + if [ x"$texlive_result" = x"NOT!" ]; then + cat <<EOF -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -!!!!!!!!!!!!!!!!!!!!!! Warning !!!!!!!!!!!!!!!!!!!!!! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +______________________________________________________ +!!!!!!!!!!!!!!! Warning !!!!!!!!!!!!!!!! TeX Live couldn't be installed during the configuration (probably because there were downloading problems). TeX Live is only necessary in making the @@ -1654,25 +1905,30 @@ and re-run configure: ./project configure -e -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +The configuration will continue in $pausesec seconds. To avoid the pause on +such messages use the '--no-pause' option. + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! EOF - sleep 10 # increase the chance that an interactive user reads this message + sleep $pausesec + fi + elapsed_time_from_prev_step check-tex-installation fi -# Citation of installed software +# Software information the paper +# ------------------------------ # # After everything is installed, we'll put all the names and versions in a # human-readable paragraph and also prepare the BibTeX citation for the # software. prepare_name_version () { - # First see if the (possible) `*' in the input arguments corresponds to + # First see if the (possible) '*' in the input arguments corresponds to # anything. Note that some of the given directories may be empty (no # software installed). hasfiles=0 @@ -1707,101 +1963,101 @@ prepare_name_version () fi } -# Import the context/sentences for placing between the list of software -# names during their acknowledgment. -. $cdir/software_acknowledge_context.sh - -# Report the different software in separate contexts (separating Python and -# TeX packages from the C/C++ programs and libraries). -proglibs=$(prepare_name_version $verdir/proglib/*) -pymodules=$(prepare_name_version $verdir/python/*) -texpkg=$(prepare_name_version $verdir/tex/texlive) +# Relevant files +pkgver=$sconfdir/dependencies.tex +pkgbib=$sconfdir/dependencies-bib.tex -# Acknowledge these software packages in a LaTeX paragraph. -pkgver=$mtexdir/dependencies.tex +# Build the software LaTeX source but only when not in a container. +if [ $built_container = 0 ]; then -# Add the text to the ${pkgver} file. -.local/bin/echo "$thank_software_introduce " > $pkgver -.local/bin/echo "$thank_progs_libs $proglibs. " >> $pkgver -if [ x"$pymodules" != x ]; then - .local/bin/echo "$thank_python $pymodules. " >> $pkgver -fi -.local/bin/echo "$thank_latex $texpkg. " >> $pkgver -.local/bin/echo "$thank_software_conclude" >> $pkgver - -# Prepare the BibTeX entries for the used software (if there are any). -hasentry=0 -bibfiles="$ictdir/*" -for f in $bibfiles; do if [ -f $f ]; then hasentry=1; break; fi; done; - -# Make sure we start with an empty output file. -pkgbib=$mtexdir/dependencies-bib.tex -echo "" > $pkgbib - -# Fill it in with all the BibTeX entries in this directory. We'll just -# avoid writing any comments (usually copyright notices) and also put an -# empty line after each file's contents to make the output more readable. -if [ $hasentry = 1 ]; then - for f in $bibfiles; do - awk '!/^%/{print} END{print ""}' $f >> $pkgbib - done -fi + # Import the context/sentences for placing between the list of software + # names during their acknowledgment. + . $cdir/software_acknowledge_context.sh + # Report the different software in separate contexts (separating Python + # and TeX packages from the C/C++ programs and libraries). + proglibs=$(prepare_name_version $verdir/proglib/*) + pymodules=$(prepare_name_version $verdir/python/*) + texpkg=$(prepare_name_version $verdir/tex/texlive) + # Acknowledge these software packages in a LaTeX paragraph. + .local/bin/echo "$thank_software_introduce " > $pkgver + .local/bin/echo "$thank_progs_libs $proglibs. " >> $pkgver + if [ x"$pymodules" != x ]; then + .local/bin/echo "$thank_python $pymodules. " >> $pkgver + fi + .local/bin/echo "$thank_latex $texpkg. " >> $pkgver + .local/bin/echo "$thank_software_conclude" >> $pkgver + + # Prepare the BibTeX entries for the used software (if there are any). + hasentry=0 + bibfiles="$ictdir/*" + for f in $bibfiles; do if [ -f $f ]; then hasentry=1; break; fi; done; + + # Fill it in with all the BibTeX entries in this directory. We'll just + # avoid writing any comments (usually copyright notices) and also put an + # empty line after each file's contents to make the output more readable. + echo "" > $pkgbib # We don't want to inherit any pre-existing content. + if [ $hasentry = 1 ]; then + for f in $bibfiles; do + awk '!/^%/{print} END{print ""}' $f >> $pkgbib + done + fi - - -# Report machine architecture -# --------------------------- -# -# Report hardware -hwparam="$mtexdir/hardware-parameters.tex" - -# Add the text to the ${hwparam} file. Since harware class might include -# underscore, it must be replaced with '\_', otherwise pdftex would -# complain and break the build process when doing ./project make. -hw_class_fixed="$(echo $hw_class | sed -e 's/_/\\_/')" -.local/bin/echo "\\newcommand{\\machinearchitecture}{$hw_class_fixed}" > $hwparam -.local/bin/echo "\\newcommand{\\machinebyteorder}{$byte_order}" >> $hwparam -.local/bin/echo "\\newcommand{\\machineaddresssizes}{$address_sizes}" >> $hwparam + # Report the time that this operation took. + elapsed_time_from_prev_step tex-macros +fi -# Clean the temporary build directory -# --------------------------------- +# Report machine architecture (has to be final created file) +# ---------------------------------------------------------- # -# By the time the script reaches here the temporary software build -# directory should be empty, so just delete it. Note `tmpblddir' may be a -# symbolic link to shared memory. So, to work in any scenario, first delete -# the contents of the directory (if it has any), then delete `tmpblddir'. -.local/bin/rm -rf $tmpblddir/* $tmpblddir - - - - - -# Register successful completion -# ------------------------------ -echo `.local/bin/date` > $finaltarget - +# This is the final file that is created in the configuration phase: it is +# used by the high-level project script to verify that configuration has +# been completed. If any other files should be created in the final statges +# of configuration, be sure to add them before this. +# +# Since harware class might include underscore, it must be replaced with +# '\_', otherwise pdftex would complain and break the build process when +# doing ./project make. +if [ $built_container = 0 ]; then + hw_class=$(uname -m) + hwparam="$sconfdir/hardware-parameters.tex" + hw_class_fixed="$(echo $hw_class | sed -e 's/_/\\_/')" + .local/bin/echo "\\newcommand{\\machinearchitecture}{$hw_class_fixed}" \ + > $hwparam + .local/bin/echo "\\newcommand{\\machinebyteorder}{$byte_order}" \ + >> $hwparam + .local/bin/echo "\\newcommand{\\machineaddresssizes}{$address_sizes}" \ + >> $hwparam + elapsed_time_from_prev_step hardware-params +fi -# Final notice -# ------------ +# Clean up and final notice +# ------------------------- # -# The configuration is now complete, we can inform the user on the next -# step(s) to take. -if [ x$maneage_group_name = x ]; then - buildcommand="./project make -j8" -else - buildcommand="./project make --group=$maneage_group_name -j8" -fi -cat <<EOF +# The configuration is now complete. We just need to delete the temporary +# build directory and inform the user (if '--quiet' wasn't called) on the +# next step(s). +if [ -d $tmpblddir ]; then empty_build_tmp; fi +if [ $quiet = 0 ]; then + + # Suggest the command to use. + if [ x$maneage_group_name = x ]; then + buildcommand="./project make -j8" + else + buildcommand="./project make --group=$maneage_group_name -j8" + fi + + # Print the message. + cat <<EOF ---------------- The project and its environment are configured with no errors. @@ -1819,3 +2075,10 @@ Please run the following command to start the project. $buildcommand EOF +fi + + +# Total time +if [ $check_elapsed = 1 ]; then + echo $chel_dsum | awk '{printf "Total: %-6.2f [millisec]\n", $1}' +fi diff --git a/reproduce/software/shell/docker-README.md b/reproduce/software/shell/docker-README.md new file mode 100644 index 0000000..d651e22 --- /dev/null +++ b/reproduce/software/shell/docker-README.md @@ -0,0 +1,201 @@ +# Maneage'd projects in Docker + +Copyright (C) 2021-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org>\ +See the end of the file for license conditions. + +For an introduction on containers, see the "Building in containers" section +of the `README.md` file within the top-level directory of this +project. Here, we focus on Docker with a simple checklist on how to use the +`docker.sh` script that we have already prepared in this directory for easy +usage in a Maneage'd project. + + + + + +## Building your Maneage'd project in Docker + +Through the steps below, you will create a Docker image that will only +contain the software environment and keep the project source and built +analysis files (data and PDF) on your host operating system. This enables +you to keep the size of the image to a minimum (only containing the built +software environment) to easily move it from one computer to another. + + 0. Add your user to the `docker` group: `usermod -aG docker + USERNAME`. This is only necessary once on an operating system. + + 1. Start the Docker daemon (root permissions required). If the operating + system uses systemd you can use the command below. If you want the + Docker daemon to be available after a reboot also (so you don't have to + restart it after turning off your computer), run this command again but + replacing `start` with `enable` (this is not recommended if you don't + regularly use Docker: it will slow the boot time of your OS). + + ```shell + systemctl start docker + ``` + + 2. Using your favorite text editor, create a `run.sh` in your top Maneage + directory (as described in the comments at the start of the `docker.sh` + script in this directory). Just activate `--build-only` on the first + run so it doesn't go onto doing the analysis and just sets up the + software environment. Set the respective directory(s) based on your + filesystem (the software directory is optional). The `run.sh` file name + is already in `.gitignore` (because it contains local directories), so + Git will ignore it and it won't be committed by mistake. + + 3. After the setup is complete, remove the `--build-only` and run the + command below to confirm that `maneage-base` (the OS of the container) + and `maneaged` (your project's full Maneage'd environment) images are + available. If you want different names for these images, add the + `--project-name` and `--base-name` options to the `docker.sh` call. + + ```shell + docker image list + ``` + + 4. You are now ready to do your analysis by removing the `--build-only` + option. + + + + + +## Script usage tips + +The `docker.sh` script introduced above has many options allowing certain +customizations that you can see when running it with the `--help` +option. The tips below are some of the more useful scenarios that we have +encountered so far. + +### Docker image in a single file + +In case you want to store the image as a single file as backup or to move +to another computer. For such cases, run the `docker.sh` script with the +`--image-file` option (for example `--image-file=myproj.tar.gz`). After +moving the file to the other system, run `docker.sh` with the same option. + +When the given file to `docker.sh` already exists, it will only be used for +loading the environment. When it doesn't exist, the script will save the +image into it. + + + + + +## Docker usage tips + +Below are some useful Docker usage scenarios that have proved to be +relevant for us in Maneage'd projects. + +### Saving and loading an image as a file + +Docker keeps its images in hard to access (by humans) location on the +operating system. Very much like Git, but with much less elegance: the +place is shared by all users and projects of the system. So they are not +easy to archive for usage on another system at a low-level. But it does +have an interface (`docker save`) to copy all the relevant files within an +image into a tar ball that you can archive externally. There is also a +separate interface to load the tarball back into docker (`docker load`). + +Both of these have been implemented as the `--image-file` option of the +`docker.sh` script. If you want to save your Maneage'd image into an image, +simply give the tarball name to this option. Alternatively, if you already +have a tarball and want to load it into Docker, give it to this option once +(until you "clean up", as explained below). In fact, docker images take a +lot of space and it is better to "clean up" regularly. And the only way you +can clean up safely is through saving your needed images as a file. + +### Cleaning up + +Docker has stored many large files in your operating system that can drain +valuable storage space. The storage of the cached files are usually orders +of magnitudes larger than what you see in `docker image list`! So after +doing your work, it is best to clean up all those files. If you feel you +may need the image later, you can save it in a single file as mentioned +above and delete all the un-necessary cached files. Afterwards, when you +load the image, only that image will be present with nothing extra. + +The easiest and most powerful way to clean up everything in Docker is the +two commands below. The first will close all open containers. The second +will remove all stopped containers, all networks not used by at least one +container, all images without at least one container associated to them, +and all build cache. + +```shell +docker ps -a -q | xargs docker rm +docker system prune -a +``` + +If you only want to delete the existing used images, run the command +below. But be careful that the cache is the largest storage consumer! So +the command above is the solution if your OS's root partition is close to +getting filled. + +```shell +docker images -a -q | xargs docker rmi -f +``` + + +### Preserving the state of an open container + +All interactive changes in a container will be deleted as soon as you exit +it. This is a very good feature of Docker in general! If you want to make +persistent changes, you should do it in the project's plain-text source and +commit them into your project's online Git repository. But in certain +situations, it is necessary to preserve the state of an interactive +container. To do this, you need to `commit` the container (and thus save it +as a Docker "image"). To do this, while the container is still running, +open another terminal and run these commands: + +```shell +# These two commands should be done in another terminal +docker container list + +# Get the 'XXXXXXX' of your desired container from the first column above. +# Give the new image a name by replacing 'NEW-IMAGE-NAME'. +docker commit XXXXXXX NEW-IMAGE-NAME +``` + + +### Interactive tests on built container + +If you later want to start a container with the built image and enter it in +interactive mode (for example for temporary tests), run the following +command. Just replace `NAME` with the same name you specified when building +the project. You can always exit the container with the `exit` command +(note that all your changes will be discarded once you exit, see below if +you want to preserve your changes after you exit). + +```shell +docker run -it NAME +``` + + +### Copying files from the Docker image to host operating system + +Except for the mounted directories, the Docker environment's file system is +indepenent of your host operating system. One easy way to copy files to and +from an open container is to use the `docker cp` command (very similar to +the shell's `cp` command). + +```shell +docker cp CONTAINER:/file/path/within/container /host/path/target +``` + + + +## Copyright information + +This file is free software: you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free +Software Foundation, either version 3 of the License, or (at your option) +any later version. + +This file is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for +more details. + +You should have received a copy of the GNU General Public License along +with this file. If not, see <https://www.gnu.org/licenses/>. diff --git a/reproduce/software/shell/docker.sh b/reproduce/software/shell/docker.sh new file mode 100755 index 0000000..714c75f --- /dev/null +++ b/reproduce/software/shell/docker.sh @@ -0,0 +1,519 @@ +#!/bin/sh +# +# Create a Docker container from an existing image of the built software +# environment, but with the source, data and build (analysis) directories +# directly within the host file system. This script is assumed to be run in +# the top project source directory (that has 'README.md' and +# 'paper.tex'). If not, use the '--source-dir' option to specify where the +# Maneage'd project source is located. +# +# Usage: +# +# - When you are at the top Maneage'd project directory, run this script +# like the example below. Just set the build directory location on your +# system. See the items below for optional values to optimize the +# process (avoid downloading for exmaple). +# +# ./reproduce/software/shell/docker.sh --shm-size=20gb \ +# --build-dir=/PATH/TO/BUILD/DIRECTORY +# +# - Non-mandatory options: +# +# - If you already have the input data that is necessary for your +# project, use the '--input-dir' option to specify its location +# on your host file system. Otherwise the necessary analysis +# files will be downloaded directly into the build +# directory. Note that this is only necessary when '--build-only' +# is not given. +# +# - If you already have the necessary software tarballs that are +# necessary for your project, use the '--software-dir' option to +# specify its location on your host file system only when +# building the container. No problem if you don't have them, they +# will be downloaded during the configuration phase. +# +# - To avoid having to set them every time you want to start the +# apptainer environment, you can put this command (with the proper +# directories) into a 'run.sh' script in the top Maneage'd project +# source directory and simply execute that. The special name 'run.sh' +# is in Maneage's '.gitignore', so it will not be included in your +# git history by mistake. +# +# Known problems: +# +# - As of 2025-04-06 the log file containing the output of the 'docker +# build' command that configures the Maneage'd project does not keep +# all the output (which gets clipped by Docker). with a "[output +# clipped, log limit 2MiB reached]" message. We need to find a way to +# fix this (so nothing gets clipped: useful for debugging). +# +# Copyright (C) 2021-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# +# This script is free software: you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. +# +# This script is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this script. If not, see <http://www.gnu.org/licenses/>. + + + + + +# Script settings +# --------------- +# Stop the script if there are any errors. +set -e + + + + + +# Default option values +jobs=0 +quiet=0 +source_dir= +build_only= +image_file="" +shm_size=20gb +scriptname="$0" +project_shell=0 +container_shell=0 +project_name=maneaged +base_name=maneage-base +base_os=debian:stable-slim + +print_help() { + # Print the output. + cat <<EOF +Usage: $scriptname [OPTIONS] + +Top-level script to build and run a Maneage'd project within Docker. + + Host OS directories (to be mounted in the container): + -b, --build-dir=STR Dir. to build in (only analysis in host). + -i, --input-dir=STR Dir. of input datasets (optional). + -s, --software-dir=STR Directory of necessary software tarballs. + --source-dir=STR Directory of source code (default: 'pwd -P'). + + Docker images + --base-os=STR Base OS name (default: '$base_os'). + --base-name=STR Base OS docker image (default: $base_name). + --project-name=STR Project's docker image (default: $project_name). + --image-file=STR [Docker only] Load (if given file exists), or + save (if given file does not exist), the image. + For saving, the given name has to have an + '.tar.gz' suffix. + + Interactive shell + --project-shell Open the project's shell within the container. + --container-shell Open the container shell. + + Operating mode: + -q, --quiet Do not print informative statements. + -?, --help Give this help list. + --shm-size=STR Passed to 'docker build' (default: $shm_size). + -j, --jobs=INT Number of threads to use in each phase. + --build-only Just build the container, don't run it. + +Mandatory or optional arguments to long options are also mandatory or +optional for any corresponding short options. + +Maneage URL: https://maneage.org + +Report bugs to mohammad@akhlaghi.org +EOF +} + +on_off_option_error() { + if [ "x$2" = x ]; then + echo "$scriptname: '$1' doesn't take any values" + else + echo "$scriptname: '$1' (or '$2') doesn't take any values" + fi + exit 1 +} + +check_v() { + if [ x"$2" = x ]; then + printf "$scriptname: option '$1' requires an argument. " + printf "Try '$scriptname --help' for more information\n" + exit 1; + fi +} + +while [ $# -gt 0 ] +do + case $1 in + + # OS directories + -b|--build-dir) build_dir="$2"; check_v "$1" "$build_dir"; shift;shift;; + -b=*|--build-dir=*) build_dir="${1#*=}"; check_v "$1" "$build_dir"; shift;; + -b*) build_dir=$(echo "$1" | sed -e's/-b//'); check_v "$1" "$build_dir"; shift;; + -i|--input-dir) input_dir="$2"; check_v "$1" "$input_dir"; shift;shift;; + -i=*|--input-dir=*) input_dir="${1#*=}"; check_v "$1" "$input_dir"; shift;; + -i*) input_dir=$(echo "$1" | sed -e's/-i//'); check_v "$1" "$input_dir"; shift;; + -s|--software-dir) software_dir="$2"; check_v "$1" "$software_dir"; shift;shift;; + -s=*|--software-dir=*) software_dir="${1#*=}"; check_v "$1" "$software_dir"; shift;; + -s*) software_dir=$(echo "$1" | sed -e's/-s//'); check_v "$1" "$software_dir"; shift;; + --source-dir) source_dir="$2"; check_v "$1" "$source_dir"; shift;shift;; + --source-dir=*) source_dir="${1#*=}"; check_v "$1" "$source_dir"; shift;; + + # Container options. + --base-name) base_name="$2"; check_v "$1" "$base_name"; shift;shift;; + --base-name=*) base_name="${1#*=}"; check_v "$1" "$base_name"; shift;; + --project-name) project_name="$2"; check_v "$1" "$project_name"; shift;shift;; + --project-name=*) project_name="${1#*=}"; check_v "$1" "$project_name"; shift;; + + # Interactive shell. + --project-shell) project_shell=1; shift;; + --project_shell=*) on_off_option_error --project-shell;; + --container-shell) container_shell=1; shift;; + --container_shell=*) on_off_option_error --container-shell;; + + # Operating mode + -q|--quiet) quiet=1; shift;; + -q*|--quiet=*) on_off_option_error --quiet;; + -j|--jobs) jobs="$2"; check_v "$1" "$jobs"; shift;shift;; + -j=*|--jobs=*) jobs="${1#*=}"; check_v "$1" "$jobs"; shift;; + -j*) jobs=$(echo "$1" | sed -e's/-j//'); check_v "$1" "$jobs"; shift;; + --build-only) build_only=1; shift;; + --build-only=*) on_off_option_error --build-only;; + --shm-size) shm_size="$2"; check_v "$1" "$shm_size"; shift;shift;; + --shm-size=*) shm_size="${1#*=}"; check_v "$1" "$shm_size"; shift;; + -'?'|--help) print_help; exit 0;; + -'?'*|--help=*) on_off_option_error --help -?;; + + # Output file + --image-file) image_file="$2"; check_v "$1" "$image_file"; shift;shift;; + --image-file=*) image_file="${1#*=}"; check_v "$1" "$image_file"; shift;; + + # Unrecognized option: + -*) echo "$scriptname: unknown option '$1'"; exit 1;; + esac +done + + + + + +# Sanity checks +# ------------- +# +# Make sure that the build directory is given and that it exists. +if [ x$build_dir = x ]; then + printf "$scriptname: '--build-dir' not provided, this is the location " + printf "that all built analysis files will be kept on the host OS\n" + exit 1; +else + if ! [ -d $build_dir ]; then + printf "$scriptname: '$build_dir' (value to '--build-dir') doesn't " + printf "exist\n"; exit 1; + fi +fi + +# The temporary directory to place the Dockerfile. +tmp_dir="$build_dir"/temporary-docker-container-dir + + + + +# Directory preparations +# ---------------------- +# +# If the host operating system has '/dev/shm', then give Docker access +# to it also for improved speed in some scenarios (like configuration). +if [ -d /dev/shm ]; then shm_mnt="-v /dev/shm:/dev/shm"; +else shm_mnt=""; fi + +# If the following directories do not exist within the build directory, +# create them to make sure the '--mount' commands always work and +# that any file. Ideally, the 'input' directory should not be under the 'build' +# directory, but if the user hasn't given it then they don't care about +# potentially deleting it later (Maneage will download the inputs), so put +# it in the build directory. +analysis_dir="$build_dir"/analysis +if ! [ -d $analysis_dir ]; then mkdir $analysis_dir; fi + +# If no '--source-dir' was given, set it to the output of 'pwd -P' (to get +# the path without potential symbolic links) in the running directory. +if [ x"$source_dir" = x ]; then source_dir=$(pwd -P); fi + +# Only when an an input directory is given, we need the respective 'mount' +# option for the 'docker run' command. +input_dir_mnt="" +if ! [ x"$input_dir" = x ]; then + input_dir_mnt="-v $input_dir:/home/maneager/input" +fi + +# Number of threads to build software (taken from 'configure.sh'). +if [ x"$jobs" = x0 ]; then + if type nproc > /dev/null 2> /dev/null; then + numthreads=$(nproc --all); + else + numthreads=$(sysctl -a | awk '/^hw\.ncpu/{print $2}') + if [ x"$numthreads" = x ]; then numthreads=1; fi + fi +else + numthreads=$jobs +fi + +# Since the container is read-only and is run with the '--contain' option +# (which makes an empty '/tmp'), we need to make a dedicated directory for +# the container to be able to write to. This is necessary because some +# software (Biber in particular on the default branch or Ghostscript) need +# to write there! See https://github.com/plk/biber/issues/494. We'll keep +# the directory on the host OS within the build directory, but as a hidden +# file (since it is not necessary in other types of build and ultimately +# only contains temporary files of programs that need it). +toptmp=$build_dir/.docker-tmp-$(whoami) +if ! [ -d $toptmp ]; then mkdir $toptmp; fi +chmod -R +w $toptmp/ # Some software remove writing flags on /tmp files. +if ! [ x"$( ls -A $toptmp )" = x ]; then rm -r "$toptmp"/*; fi + +# [DOCKER-ONLY] Make sure the user is a member of the 'docker' group. This +# is needed only for Linux, given that other systems uses other strategies. +# (See: https://stackoverflow.com/a/70385997) +kernelname=$(uname -s) +if [ x$kernelname = xLinux ]; then + glist=$(groups $(whoami) | awk '/docker/') + if [ x"$glist" = x ]; then + printf "$scriptname: you are not a member of the 'docker' group " + printf "You can run the following command as root to fix this: " + printf "'usermod -aG docker $(whoami)'\n" + exit 1 + fi +fi + +# [DOCKER-ONLY] Function to check the temporary directory for building the +# base operating system docker image. It is necessary that this directory +# be empty because Docker will inherit the sub-directories of the directory +# that the Dockerfile is located in. +tmp_dir_check () { + if [ -d $tmp_dir ]; then + printf "$scriptname: '$tmp_dir' already exists, please " + printf "delete it and re-run this script. This is a temporary " + printf "directory only necessary when building a Docker image " + printf "and gets deleted automatically after a successful " + printf "build. The fact that it remains hints at a problem " + printf "in a previous attempt to build a Docker image\n" + exit 1 + else + mkdir $tmp_dir + fi +} + + + + + +# Base operating system +# --------------------- +# +# If the base image does not exist, then create it. If it does, inform the +# user that it will be used. +if docker image list | grep $base_name &> /dev/null; then + if [ $quiet = 0 ]; then + printf "$scriptname: info: base OS docker image ('$base_name') " + printf "already exists and will be used. If you want to build a " + printf "new base OS image, give a new name to '--base-name'. " + printf "To remove this message run with '--quiet'\n" + fi +else + + # In case an image file is given, load the environment from that (no + # need to build the environment from scratch). + if ! [ x"$image_file" = x ] && [ -f "$image_file" ]; then + docker load --input $image_file + else + + # Build the temporary directory. + tmp_dir_check + + # Build the Dockerfile. + uid=$(id -u) + cat <<EOF > $tmp_dir/Dockerfile +FROM $base_os +RUN useradd -ms /bin/sh --uid $uid maneager; \\ + printf '123\n123' | passwd maneager; \\ + printf '456\n456' | passwd root +RUN apt update; apt install -y gcc g++ wget; echo 'export PS1="[\[\033[01;31m\]\u@\h \W\[\033[32m\]\[\033[00m\]]# "' >> ~/.bashrc +USER maneager +WORKDIR /home/maneager +RUN mkdir build; mkdir build/analysis; echo 'export PS1="[\[\033[01;35m\]\u@\h \W\[\033[32m\]\[\033[00m\]]$ "' >> ~/.bashrc +EOF + + # Build the base-OS container and delete the temporary directory. + curdir="$(pwd)" + cd $tmp_dir + docker build ./ \ + -t $base_name \ + --shm-size=$shm_size + cd "$curdir" + rm -rf $tmp_dir + fi +fi + + + + + +# Maneage software configuration +# ------------------------------ +# +# Having the base operating system in place, we can now construct the +# project's docker file. +intbuild=/home/maneager/build +if docker image list | grep $project_name &> /dev/null; then + if [ $quiet = 0 ]; then + printf "$scriptname: info: project's image ('$project_name') " + printf "already exists and will be used. If you want to build a " + printf "new project image, give a new name to '--project-name'. " + printf "To remove this message run with '--quiet'\n" + fi +else + + # Build the temporary directory. + tmp_dir_check + df=$tmp_dir/Dockerfile + + # The only way to mount a directory inside the Docker build environment + # is the 'RUN --mount' command. But Docker doesn't recognize things + # like symbolic links. So we need to copy the project's source under + # this temporary directory. + sdir=source + mkdir $tmp_dir/$sdir + dsr=/home/maneager/source-raw + cp -r $source_dir/* $source_dir/.git $tmp_dir/$sdir + + # Start constructing the Dockerfile. + # + # Note on the printf's '\x5C\n' part: this will print out as a + # backslash at the end of the line to allow easy human readability of + # the Dockerfile (necessary for debugging!). + echo "FROM $base_name" > $df + printf "RUN --mount=type=bind,source=$sdir,target=$dsr \x5C\n" >> $df + + # If a software directory was given, copy it and add its line. + tsdir=tarballs-software + dts=/home/maneager/tarballs-software + if ! [ x"$software_dir" = x ]; then + + # Make the directory to host the software and copy the contents + # that the user gave there. + mkdir $tmp_dir/$tsdir + cp -r "$software_dir"/* $tmp_dir/$tsdir/ + printf " --mount=type=bind,source=$tsdir,target=$dts \x5C\n" >> $df + fi + + # Construct the rest of the 'RUN' command. + printf " cp -r $dsr /home/maneager/source; \x5C\n" >> $df + printf " cd /home/maneager/source; \x5C\n" >> $df + printf " ./project configure --jobs=$jobs \x5C\n" >> $df + printf " --build-dir=$intbuild \x5C\n" >> $df + printf " --input-dir=/home/maneager/input \x5C\n" >> $df + printf " --software-dir=$dts; \x5C\n" >> $df + + # We are deleting the '.build/software/tarballs' directory because this + # directory is not relevant for the analysis of the project. But in + # case any tarball was downloaded, it will consume space within the + # container. + printf " rm -rf .build/software/tarballs; \x5C\n" >> $df + + # We are deleting the source directory becaues later (at 'docker run' + # time), the 'source' will be mounted directly from the host operating + # system. + printf " cd /home/maneager; \x5C\n" >> $df + printf " rm -rf source\n" >> $df + + # Build the Maneage container and delete the temporary directory. The + # '--progress plain' option is for Docker to print all the outputs + # (otherwise, it will only print a very small part!). + cd $tmp_dir + docker build ./ -t $project_name \ + --progress=plain \ + --shm-size=$shm_size \ + --no-cache \ + 2>&1 | tee build.log + cd .. + rm -rf $tmp_dir +fi + +# If the user wants to save the container (into a file that does not +# exist), do it here. If the file exists, it will only be used for creating +# the container in the previous stages. +if ! [ x"$image_file" = x ] && ! [ -f "$image_file" ]; then + + # Save the image into a tarball + tarname=$(echo $image_file | sed -e's|.gz$||') + if [ $quiet = 0 ]; then + printf "$scriptname: info: saving docker image to '$tarname'" + fi + docker save -o $tarname $project_name + + # Compress the saved image + if [ $quiet = 0 ]; then + printf "$scriptname: info: compressing to '$image_file' (can " + printf "take +10 minutes, but volume decreases by more than half!)" + fi + gzip --best $tarname +fi + +# If the user just wanted to build the base operating system, abort the +# script here. +if ! [ x"$build_only" = x ]; then + if [ $quiet = 0 ]; then + printf "$scriptname: info: Maneaged project has been configured " + printf "successfully in the '$project_name' image" + fi + exit 0 +fi + + + + + +# Run the analysis within the Maneage'd container +# ----------------------------------------------- +# +# The startup command of the container is managed though the 'shellopt' +# variable that starts here. +shellopt="" +sobase="/bin/bash -c 'cd source; " +sobase="$sobase ./project configure --build-dir=$intbuild " +sobase="$sobase --existing-conf --no-pause --offline --quiet && " +sobase="$sobase ./project MODE --build-dir=$intbuild" +if [ $container_shell = 1 ] || [ $project_shell = 1 ]; then + + # The interactive flag is necessary for both these scenarios. + interactiveopt="-it" + + # With '--project-shell' we need 'shellopt', the MODE just needs to be + # set to 'shell'. + if [ $project_shell = 1 ]; then + shellopt="$(echo $sobase | sed -e's|MODE|shell|');'" + fi + +# No interactive shell requested, just run the project. +else + interactiveopt="" + shellopt="$(echo $sobase | sed -e's|MODE|make|') --jobs=$jobs;'" +fi + +# Execute Docker. The 'eval' is because the 'shellopt' variable contains a +# single-quote that the shell should "evaluate". +eval docker run --read-only \ + -v "$analysis_dir":/home/maneager/build/analysis \ + -v "$source_dir":/home/maneager/source \ + -v $toptmp:/tmp \ + $input_dir_mnt \ + $shm_mnt \ + $interactiveopt \ + $project_name \ + $shellopt diff --git a/reproduce/software/shell/git-post-checkout b/reproduce/software/shell/git-post-checkout index 7a90108..7b521a3 100755 --- a/reproduce/software/shell/git-post-checkout +++ b/reproduce/software/shell/git-post-checkout @@ -1,14 +1,14 @@ #!@BINDIR@/bash # # The example hook script to store the metadata information of version -# controlled files (with each commit) using the `metastore' program. +# controlled files (with each commit) using the 'metastore' program. # # Copyright (C) 2016 Przemyslaw Pawelczyk <przemoc@gmail.com> -# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2018-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> # -# This script is taken from the `examples/hooks/pre-commit' file of the -# `metastore' package (installed within the project, with an MIT license -# for copyright). We have just changed the name of the `MSFILE' and also +# This script is taken from the 'examples/hooks/pre-commit' file of the +# 'metastore' package (installed within the project, with an MIT license +# for copyright). We have just changed the name of the 'MSFILE' and also # set special characters for the installation location of meta-store so our # own installation is found by Git. # diff --git a/reproduce/software/shell/git-pre-commit b/reproduce/software/shell/git-pre-commit index 85d3474..7b98ad0 100755 --- a/reproduce/software/shell/git-pre-commit +++ b/reproduce/software/shell/git-pre-commit @@ -1,10 +1,10 @@ #!@BINDIR@/bash # # The example hook script to store the metadata information of version -# controlled files (with each commit) using the `metastore' program. +# controlled files (with each commit) using the 'metastore' program. # # Copyright (C) 2016 Przemyslaw Pawelczyk <przemoc@gmail.com> -# Copyright (C) 2018-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2018-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> # # WARNING: # @@ -17,9 +17,9 @@ # git reset HEAD -- .metadata # git checkout HEAD -- .metadata # -# This script is taken from the `examples/hooks/pre-commit' file of the -# `metastore' package (installed within the project, with an MIT license -# for copyright). Here, the name of the `MSFILE' and also set special +# This script is taken from the 'examples/hooks/pre-commit' file of the +# 'metastore' package (installed within the project, with an MIT license +# for copyright). Here, the name of the 'MSFILE' and also set special # characters for the installation location of meta-store so our own # installation is found by Git. # diff --git a/reproduce/software/shell/pre-make-build.sh b/reproduce/software/shell/pre-make-build.sh index a033963..172bdb6 100755 --- a/reproduce/software/shell/pre-make-build.sh +++ b/reproduce/software/shell/pre-make-build.sh @@ -2,7 +2,7 @@ # # Very basic tools necessary to start Maneage's default building. # -# Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2020-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> # # This script is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -48,7 +48,7 @@ instdir="$sdir"/installed tmpblddir="$sdir"/build-tmp confdir=reproduce/software/config ibidir="$instdir"/version-info/proglib -downloadwrapper=reproduce/analysis/bash/download-multi-try +downloadwrapper=reproduce/analysis/bash/download-multi-try.sh # Derived directories bindir="$instdir"/bin @@ -123,7 +123,8 @@ download_tarball() { # Make sure this is the correct tarball. if type sha512sum > /dev/null 2> /dev/null; then checksum=$(sha512sum "$ucname" | awk '{print $1}') - expectedchecksum=$(awk '/^'$progname'-checksum/{print $3}' "$checksumsfile") + expectedchecksum=$(awk '/^'$progname'-checksum/{print $3}' \ + "$checksumsfile") if [ x$checksum = x$expectedchecksum ]; then mv "$ucname" "$maneagetar" else echo "ERROR: Non-matching checksum: $tarball" @@ -134,14 +135,6 @@ download_tarball() { else mv "$ucname" "$maneagetar" fi fi - - # If the tarball is newer than the (possibly existing) program (the version - # has changed), then delete the program. - if [ -f "$ibidir/$progname" ]; then - if [ "$maneagetar" -nt "$ibidir/$progname" ]; then - rm "$ibidir/$progname" - fi - fi } @@ -156,6 +149,9 @@ build_program() { # Options configoptions=$1 + # Inform the user. + echo; echo "Pre-make building of $progname"; echo + # Go into the temporary building directory. cd "$tmpblddir" unpackdir="$progname"-"$version" @@ -174,17 +170,34 @@ build_program() { fi # Unpack the tarball and go into it. - tar xf "$intar" + tar xf "$intar" --no-same-owner --no-same-permissions if [ x$intarrm = x1 ]; then rm "$intar"; fi cd "$unpackdir" # build the project, either with Make and either without it. if [ x$progname = xlzip ]; then - ./configure --build --check --installdir="$instdir/bin" $configoptions + ./configure --build --check --installdir="$instdir/bin" \ + $configoptions else # All others accept the configure script. ./configure --prefix="$instdir" $configoptions + # In Flock 0.4.0 there is a crash that can be fixed by simply + # replacing '%1u' with '%ld' on GNU/Linux and '%d' on macOS. This + # has been reported to flock maintainers: + # https://github.com/discoteq/flock/issues/33 + if [ x$progname = xflock ]; then + case $on_mac_os in + yes) sed -e's/\%1u/\%d/' src/flock.c > src/flock-new.c;; + no) sed -e's/\%1u/\%ld/' src/flock.c > src/flock-new.c;; + *) + printf "pre-make-build.sh: '$on_mac_os' " + printf "unrecognized value for on_mac_os" + exit 1;; + esac + mv src/flock-new.c src/flock.c + fi + # To build GNU Make, we don't want to assume the existance of a # Make program, so we use its 'build.sh' script and its own built # 'make' program to install itself. @@ -192,15 +205,20 @@ build_program() { /bin/sh build.sh ./make install else - make + make V=1 make install fi fi - # Clean up the source directory + # Clean up the source directory and write the LaTeX name of the + # software (if necessary: otherwise, just make an empty file). cd "$topdir" rm -rf "$tmpblddir/$unpackdir" - echo "$progname_tex $version" > "$ibidir/$progname" + if [ x"$progname_tex" = x ]; then + echo "" > "$texfile" + else + echo "$progname_tex $version" > "$texfile" + fi fi } @@ -217,12 +235,12 @@ build_program() { # (without compression it is just ~400Kb). So we use its '.tar' file and # won't rely on the host's compression tools at all. progname="lzip" -progname_tex="Lzip" +progname_tex="" # Lzip is re-built after GCC (empty to avoid repetition) url=$(awk '/^'$progname'-url/{print $3}' $urlfile) version=$(awk '/^'$progname'-version/{print $3}' "$versionsfile") tarball=$progname-$version.tar -download_tarball -build_program +texfile="$ibidir/$progname-$version-pre-make" +if ! [ -f $texfile ]; then download_tarball; build_program; fi @@ -235,13 +253,23 @@ build_program # '--disable-dependency-tracking' configure-time option is necessary so # Make doesn't check for an existing 'make' implementation (recall that we # aren't assuming any 'make' on the host). +# +# If GNU Guile is already present on the host system, Make will try to link +# with it, and this will cause dependency problems later. So we have +# distabled Guile. If a project needs the Guile extensions of Make, we need +# to add a build rule for Guile in Maneage, with a special Guile-enabled +# Make that has a different executable name (using the '--program-prefix=' +# configure option) from the "default" make (which is this one!). progname="make" -progname_tex="GNU Make" +progname_tex="" # Make re-built after GCC (empty string to avoid repetition) url=$(awk '/^'$progname'-url/{print $3}' $urlfile) version=$(awk '/^'$progname'-version/{print $3}' $versionsfile) tarball=$progname-$version.tar.lz -download_tarball -build_program --disable-dependency-tracking +texfile="$ibidir/$progname-$version-pre-make" +if ! [ -f $texfile ]; then + download_tarball + build_program "--disable-dependency-tracking --without-guile" +fi @@ -258,13 +286,11 @@ progname_tex="Dash" url=$(awk '/^'$progname'-url/{print $3}' $urlfile) version=$(awk '/^'$progname'-version/{print $3}' $versionsfile) tarball=$progname-$version.tar.lz -download_tarball -build_program +texfile="$ibidir/$progname-$version" +if ! [ -f $texfile ]; then download_tarball; build_program; fi # If the 'sh' symbolic link isn't set yet, set it to point to Dash. -if [ -f $bindir/sh ]; then just_a_place_holder=1 -else ln -sf $bindir/dash $bindir/sh; -fi +if ! [ -f $bindir/sh ]; then ln -sf $bindir/dash $bindir/sh; fi @@ -274,11 +300,11 @@ fi # ----- # # Flock (or file-lock) is necessary to serialize operations when -# necessary. GNU/Linux machines have it as part of their `util-linux' +# necessary. GNU/Linux machines have it as part of their 'util-linux' # programs. But to be consistent in non-GNU/Linux systems, we will be using # our own build. # -# The reason that `flock' is built here is that generally the building of +# The reason that 'flock' is built here is that generally the building of # software is done in parallel, but we need it to serialize the download # process of the software tarballs to avoid network complications when too # many simultaneous download commands are called. @@ -287,12 +313,5 @@ progname_tex="Discoteq flock" url=$(awk '/^'$progname'-url/{print $3}' $urlfile) version=$(awk '/^'$progname'-version/{print $3}' $versionsfile) tarball=$progname-$version.tar.lz -download_tarball -build_program - - - - - -# Finish this script successfully -exit 0 +texfile="$ibidir/$progname-$version" +if ! [ -f $texfile ]; then download_tarball; build_program; fi diff --git a/reproduce/software/shell/prep-source.sh b/reproduce/software/shell/prep-source.sh new file mode 100755 index 0000000..dcdc472 --- /dev/null +++ b/reproduce/software/shell/prep-source.sh @@ -0,0 +1,94 @@ +#!/usr/bin/env sh +# +# Necessary corrections in the un-packed source of programs to make them +# portable (for example to not use '/bin/sh'). +# +# Usage: Run in top source directory (will work on all files within the +# directory that it is run in ): +# ./prep-source.sh /FULL/ADDRESS/TO/DESIRED/BIN +# +# Copyright (C) 2024-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# +# This script is free software: you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. +# +# This script is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this script. If not, see <http://www.gnu.org/licenses/>. + + + + + +# Abort the script in case of an error +set -e + + + + +# Read the first argument. +bindir="$1" +if [ x"$bindir" = x ]; then + printf "$0: no argument (location of the 'bin/' directory " + printf "containing the 'bash' executable)\n" + exit 1 +elif ! [ -d "$bindir" ]; then + printf "$0: the directory given as the first argument ('$bindir')" + printf "does not exist" +fi + + + + + +# Find all the files that contain the '/bin/sh' string and correct them to +# Maneage's own Bash. We are using 'while read' to read the file names line +# by line. This is necessary to account file names that include the 'SPACE' +# character (happens in CMake for example!). +# +# Note that dates are important in the source directory (files depend on +# each other), so we should read the original date and after making. We are +# also not using GNU SED's '-i' ('--in-place') option because the host OS +# may not have GNU SED. +# +# Actual situation which prompted the addition of this step: a Maneage'd +# project (with GNU Bash 5.1.8 and Readline 8.1.1) was being built on a +# system where '/bin/sh' was GNU Bash 5.2.26 and had Readline 8.2.010. The +# newer version of Bash needed the newer Readline library function(s) that +# were not available in Maneage's Readline library. Therefore, as soon as +# the basic software were built and Maneage entered the creation of +# high-level software (where we completely close-off the host environment), +# Maneage crashed with the following '/bin/sh' error: +# +# /bin/sh: symbol lookup error: /bin/sh: undefined symbol: rl_trim_arg_from_keyseq +# +# This lead to the discovery that through '/bin/sh' the host operating +# system was leaking into our closed Maneage environment which needs to be +# closed. This needs a source-level correction because '/bin/sh' is +# hard-coded in the source code of almost all programs (their build +# scripts); and in special programs like GNU Make, GNU M4 or CMake it is +# actually hardcoded in the source code (not just build scripts). +if [ -f "$bindir/bash" ]; then shpath="$bindir"/bash +else shpath="$bindir"/dash +fi +grep -I -r -e'/bin/sh' $(pwd)/* \ + | sed -e's|:|\t|' \ + | awk 'BEGIN{FS="\t"}{print $1}' \ + | sort \ + | uniq \ + | while read filename; do \ + tmp="$filename".tmp; \ + origtime="$(date -R -r "$filename")"; \ + origperm=$(stat -c '%a' "$filename"); \ + sed -e's|/bin/sh|'"$shpath"'|g' "$filename" > "$tmp"; \ + mv "$tmp" "$filename"; \ + chmod $origperm "$filename"; \ + touch -d"$origtime" "$filename"; \ + echo "Corrected /bin/sh in $filename"; \ + done diff --git a/reproduce/software/shell/run-parts.in b/reproduce/software/shell/run-parts.in index 7e649b1..053f5f3 100755 --- a/reproduce/software/shell/run-parts.in +++ b/reproduce/software/shell/run-parts.in @@ -10,8 +10,8 @@ # However, it didn't have a copyright statement. So one is being added # here. # -# Copyright (C) 2021 Authors mentioned above. -# Copyright (C) 2020-2021 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2025 Authors mentioned above. +# Copyright (C) 2025-2024 Mohammad Akhlaghi <mohammad@akhlaghi.org> # # This script is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by diff --git a/reproduce/software/shell/tarball-prepare.sh b/reproduce/software/shell/tarball-prepare.sh new file mode 100755 index 0000000..8e30931 --- /dev/null +++ b/reproduce/software/shell/tarball-prepare.sh @@ -0,0 +1,268 @@ +#!/bin/bash + +# Script to convert all files (tarballs in any format; just recognized +# by 'tar') within an 'odir' to a unified '.tar.lz' format. +# +# The inputs are assumed to be formatted with 'NAME_VERSION', and only for +# the names, we are currently assuming '.tar.*' (for the 'sed' +# command). Please modify/generalize accordingly. +# +# It will unpack the source in a certain empty directory with the +# 'tmpunpack' suffix, and rename the top directory to the requested format +# of NAME-VERSION also. So irrespective of the name of the top original +# tarball directory, the resulting tarball's top directory will have a name +# formatting of NAME-VERSION. +# +# Discussion: https://savannah.nongnu.org/task/?15699 +# +# Copyright (C) 2022-2025 Mohammad Akhlaghi <mohammad@akhlaghi.org> +# Copyright (C) 2022-2025 Pedram Ashofteh Ardakani <pedramardakani@pm.me> +# Copyright (C) 2025-2025 Giacomo Lorenzetti <glorenzetti@cefca.es> +# +# This script is free software: you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. +# +# This script is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this script. If not, see <http://www.gnu.org/licenses/>. + + + + + +# Abort the script in case of an error. +set -e + + + + + +# Default arguments +odir= +idir= +quiet= +basedir=$PWD +scriptname=$0 + + +# The --help output +print_help() { + cat <<EOF +Usage: $scriptname [OPTIONS] + +Low-level script to create maneage-standard tarballs. + -o, --output-dir Target directory to write the packed tarballs. + Current: $odir + -i, --input-dir Directory containing original tarballs. + Current: $idir + -q, --quiet Suppress logging information. Only print the + final packed file and its sha512sum. +Maneage URL: https://maneage.org + +Report bugs: https://savannah.nongnu.org/bugs/?group=reproduce +EOF +} + + + + + +# Functions to check option values and complain if necessary. +on_off_option_error() { + if [ x"$2" = x ]; then + echo "$scriptname: '$1' doesn't take any values" + else + echo "$scriptname: '$1' (or '$2') doesn't take any values" + fi + exit 1 +} + +check_v() { + if [ x"$2" = x ]; then + cat <<EOF +$scriptname: option '$1' requires an argument. Try '$scriptname --help' for more information +EOF + exit 1; + fi +} + +option_given_and_valid() { + dirname="$1" + optionlong="$2" + optionshort="$3" + if [ x"$dirname" = x ]; then + cat <<EOF +$scriptname: no '--$optionlong' (or '-$optionshort') given: use this for identifying the directory containing the input tarballs +EOF + exit 1 + else + dirname=$(echo "$dirname" | sed 's|/$||'); # Remove possible trailing slash + if [ ! -d "$dirname" ]; then + cat <<EOF +$scriptname: '$dirname' that is given to '--$optionlong' (or '-$optionshort') couldn't be opened +EOF + exit 1 + else + outdir=$(realpath $dirname) + fi + fi + ogvout=$outdir +} + + + + + +# Parse the arguments +while [ $# -gt 0 ] +do + case $1 in + # Input and Output directories + -i|--input-dir) idir="$2"; check_v "$1" "$idir"; shift;shift;; + -i=*|--input-dir=*) idir="${1#*=}"; check_v "$1" "$idir"; shift;; + -i*) idir=$(echo "$1" | sed -e's/-i//'); check_v "$1" "$idir"; shift;; + -o|--output-dir) odir="$2"; check_v "$1" "$odir"; shift;shift;; + -o=*|--output-dir=*) odir="${1#*=}"; check_v "$1" "$odir"; shift;; + -o*) odir=$(echo "$1" | sed -e's/-o//'); check_v "$1" "$odir"; shift;; + + # Operating mode options + -?|--help) print_help; exit 0;; + -'?'*|--help=*) on_off_option_error --help -?;; + -q|--quiet) quiet=1; shift;; + -q*|--quiet=*) on_off_option_error --quiet -q;; + *) echo "$scriptname: unknown option '$1'"; exit 1;; + esac +done + + + + + +# Basic sanity checks +# +# Make sure the input and output directories are given. Also extract +# the absolute path to input and output directories and remove any +# possible trailing '/'. Working with a relative path is a great +# source of confusion and unwanted side-effects like moving/removing +# files by accident. +option_given_and_valid "$idir" "input-dir" "i" && idir=$ogvout +option_given_and_valid "$odir" "output-dir" "o" && odir=$ogvout + + + + + +# Unpack and pack all files in the '$idir' +# ---------------------------------------- +allfiles=$(ls $idir | sort) + +# Let user know number of tarballs if its not in quiet mode +if [ -z $quiet ]; then + nfiles=$(ls $idir | wc -l) + echo "Found $nfiles file(s) in '$idir/'" +fi + +# Process all files +for f in $allfiles; do + + # Extract the name and version (while replacing any possible '_' with + # '-' because some software separate name and version with '_'). + name=$(echo $(basename $f) \ + | sed -e 's/.tar.*//' -e's/_/-/') + + # Extract the software's name from the tarball name (needed later + # for software-specific checks). + sname=$(echo $name | sed -e's|-| |' | awk '{print $1}') + + # Lzip will not be available to unpack Lzip itself, so just use Tar. + if [[ $name =~ ^lzip ]]; then + outname=$name.tar + else + outname=$name.tar.lz + fi + + # Skip previously packed files + if [ -f $odir/$outname ]; then + + # Print the info message if not in quiet mode + if [ -z $quiet ]; then + echo "$scriptname: $odir/$outname: already present in output directory" + fi + + # skip this file + continue + else + + # Print the info message if not in quiet mode + if [ -z $quiet ]; then + echo "$scriptname: processing '$idir/$f'" + fi + fi + + # Create a temporary directory name + tmpdir=$odir/$name-tmpunpack + + # If the temporary directory exists, delete it and build an empty + # directory to work on (to avoid potential conflicts). + if [ -d $tmpdir ]; then rm -rf $tmpdir; fi + mkdir $tmpdir + + + + + + # Move into the temporary directory + # --------------------------------- + # + # The default output directory for all the following commands: $tmpdir + cd $tmpdir + + # Unpack + tar -xf $idir/$f + + # Make sure the unpacked tarball is contained within a directory with + # the clean program name + if [ ! -d "$name" ]; then + mv * $name/ + fi + + # Put the current date on all the files because some packagers will not + # add dates to their release tarballs, resulting in dates of the + # Unix-time zero'th second (1970-01-01 at 00:00:00)! + # -print0 is needed for those tarballs that has paths with spaces. For + # the same reason it's needed also `xargs -0`. (`xargs` is needed also + # for large tarballs such as gcc's) + find "$name"/ -type f -print0 | xargs -0 touch + + # Some software source files (like Boost; https://www.boost.io) + # have names that are too long for the more robust 'ustar' + # format. So for we should use 'pax' for them. + if [ $sname = boost ]; then tarformat=pax + else tarformat=ustar + fi + + # Pack with recommended options + tar -c -H$tarformat --owner=root --group=root \ + -f $name.tar $name/ + + # Lzip will not be available when unpacking Lzip, so we just use Tar. + if [[ ! $name =~ ^lzip ]]; then + lzip -9 $name.tar + fi + + # Move the compressed file from the temporary directory to the target + # output directory + mv $outname $odir/ + + # Print the sha512sum along with the filename for a quick reference + echo $(sha512sum $odir/$outname) + + # Clean up the temporary directory + rm -rf $tmpdir +done |